Deepseek Chat: no wait / GPT-4o Mini / 3.5 Turbo: no wait / GPT-4: no wait / GPT-4 32k: no wait / GPT-4 Turbo: no wait / GPT-4o: no wait / Claude (Sonnet): no wait / Claude (Opus): no wait / Gemini Flash: no wait / Gemini Pro: no wait / AWS Claude (Sonnet): no wait / Azure GPT-4: no wait
{ "uptime": 2111728, "endpoints": { "openai": "https://whore-proxy.ru/proxy/openai", "anthropic": "https://whore-proxy.ru/proxy/anthropic", "google-ai": "https://whore-proxy.ru/proxy/google-ai", "aws-claude": "https://whore-proxy.ru/proxy/aws/claude", "aws-mistral": "https://whore-proxy.ru/proxy/aws/mistral", "azure": "https://whore-proxy.ru/proxy/azure/openai", "deepseek": "https://whore-proxy.ru/proxy/deepseek" }, "proompts": 61659, "tookens": "543.18m", "proomptersNow": 1, "openaiKeys": 910, "openaiOrgs": 303, "anthropicKeys": 2, "google-aiKeys": 7, "awsKeys": 1, "azureKeys": 4, "deepseekKeys": 1, "turbo": { "usage": "813.8k tokens", "activeKeys": 39, "revokedKeys": 623, "overQuotaKeys": 245, "trialKeys": 0, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "gpt4-turbo": { "usage": "84.37m tokens", "activeKeys": 30, "overQuotaKeys": 190, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "gpt4": { "usage": "60.11m tokens", "activeKeys": 30, "overQuotaKeys": 189, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "gpt4-32k": { "usage": "3.02m tokens", "activeKeys": 13, "overQuotaKeys": 36, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "gpt4o": { "usage": "302.04m tokens", "activeKeys": 35, "overQuotaKeys": 245, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "claude": { "usage": "0 tokens", "activeKeys": 0, "revokedKeys": 2, "overQuotaKeys": 0, "trialKeys": 0, "prefilledKeys": 0, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "claude-opus": { "usage": "0 tokens", "activeKeys": 0, "revokedKeys": 2, "overQuotaKeys": 0, "trialKeys": 0, "prefilledKeys": 0, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "gemini-pro": { "usage": "0 tokens", "activeKeys": 0, "revokedKeys": 7, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "gemini-flash": { "usage": "0 tokens", "activeKeys": 0, "revokedKeys": 7, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "aws-claude": { "usage": "0 tokens", "activeKeys": 0, "revokedKeys": 0, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "azure-gpt4": { "usage": "0 tokens", "activeKeys": 0, "revokedKeys": 4, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "deepseek": { "usage": "92.82m tokens", "activeKeys": 0, "revokedKeys": 0, "overQuotaKeys": 1, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "config": { "gatekeeper": "user_token", "maxIpsAutoBan": "false", "captchaMode": "none", "powTokenHours": "24", "powTokenMaxIps": "2", "powDifficultyLevel": "low", "powChallengeTimeout": "30", "textModelRateLimit": "3", "imageModelRateLimit": "2", "maxContextTokensOpenAI": "0", "maxContextTokensAnthropic": "60000", "maxOutputTokensOpenAI": "4000", "maxOutputTokensAnthropic": "4000", "allowAwsLogging": "false", "promptLogging": "false", "tokenQuota": { "deepseek": "0", "turbo": "0", "gpt4": "0", "gpt4-32k": "0", "gpt4-turbo": "0", "gpt4o": "0", "o1": "0", "o1-mini": "0", "dall-e": "0", "claude": "0", "claude-opus": "0", "gemini-flash": "0", "gemini-pro": "0", "gemini-ultra": "0", "mistral-tiny": "0", "mistral-small": "0", "mistral-medium": "0", "mistral-large": "0", "aws-claude": "0", "aws-claude-opus": "0", "aws-mistral-tiny": "0", "aws-mistral-small": "0", "aws-mistral-medium": "0", "aws-mistral-large": "0", "gcp-claude": "0", "gcp-claude-opus": "0", "azure-turbo": "0", "azure-gpt4": "0", "azure-gpt4-32k": "0", "azure-gpt4-turbo": "0", "azure-gpt4o": "0", "azure-dall-e": "0", "azure-o1": "0", "azure-o1-mini": "0" }, "allowOpenAIToolUsage": "false", "tokensPunishmentFactor": "0" }, "build": "6de338c (main@penurin/oai-reverse-proxy)" }