Shark's proxy
Current Service Stats
| Service |
Active Keys |
Wait time |
| dall e |
11 |
no wait |
| gpt image 1 |
0 |
no wait |
| turbo |
13 |
no wait |
| gpt4o |
12 |
no wait |
| gpt4 |
11 |
no wait |
| gpt5 |
12 |
no wait |
| gpt4 32k |
0 |
no wait |
| gpt4 turbo |
11 |
no wait |
| azure gpt4o |
0 |
no wait |
| azure gpt4 |
0 |
no wait |
| azure gpt4 turbo |
0 |
no wait |
| azure gpt4 32k |
0 |
no wait |
| azure turbo |
0 |
no wait |
| aws claude |
1 |
no wait |
| claude |
0 |
no wait |
| gemini pro |
7 |
no wait |
| mistral medium |
1 |
no wait |
| mistral small |
1 |
no wait |
| mistral tiny |
1 |
no wait |
{
"uptime": 6854862,
"endpoints": {
"openai": "https://oai-proxy.yuki.pet/proxy/openai",
"openai-image": "https://oai-proxy.yuki.pet/proxy/openai-image",
"anthropic": "https://oai-proxy.yuki.pet/proxy/anthropic",
"google-ai": "https://oai-proxy.yuki.pet/proxy/google-ai",
"mistral-ai": "https://oai-proxy.yuki.pet/proxy/mistral-ai",
"aws-claude": "https://oai-proxy.yuki.pet/proxy/aws/claude",
"aws-mistral": "https://oai-proxy.yuki.pet/proxy/aws/mistral"
},
"proompts": 6911,
"tookens": "382.42m ($20301.65)",
"proomptersNow": 0,
"openaiKeys": 45,
"openaiOrgs": 32,
"anthropicKeys": 39,
"google-aiKeys": 27,
"mistral-aiKeys": 1,
"awsKeys": 5,
"turbo": {
"usage": "0 tokens ($0.00)",
"activeKeys": 13,
"revokedKeys": 24,
"overQuotaKeys": 8,
"trialKeys": 0,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"gpt4": {
"usage": "87.53m tokens ($2626.00)",
"activeKeys": 11,
"overQuotaKeys": 5,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"gpt5": {
"usage": "294.57m tokens ($17674.01)",
"activeKeys": 12,
"overQuotaKeys": 8,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"dall-e": {
"usage": "0 tokens ($0.00)",
"activeKeys": 11,
"overQuotaKeys": 8,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"gpt4-turbo": {
"usage": "0 tokens ($0.00)",
"activeKeys": 11,
"overQuotaKeys": 5,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"gpt4o": {
"usage": "6.4k tokens ($0.03)",
"activeKeys": 12,
"overQuotaKeys": 8,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"o1": {
"usage": "0 tokens ($0.00)",
"activeKeys": 11,
"overQuotaKeys": 8,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"gpt-5.1-codex": {
"usage": "0 tokens ($0.00)",
"activeKeys": 11,
"overQuotaKeys": 8,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"claude": {
"usage": "0 tokens ($0.00)",
"activeKeys": 0,
"revokedKeys": 17,
"overQuotaKeys": 22,
"trialKeys": 0,
"prefilledKeys": 0,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"claude-opus": {
"usage": "0 tokens ($0.00)",
"activeKeys": 0,
"revokedKeys": 17,
"overQuotaKeys": 22,
"trialKeys": 0,
"prefilledKeys": 0,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"gemini-pro": {
"usage": "209.5k tokens ($0.00)",
"activeKeys": 7,
"revokedKeys": 20,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"mistral-tiny": {
"usage": "0 tokens ($0.00)",
"activeKeys": 1,
"revokedKeys": 0,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"mistral-small": {
"usage": "0 tokens ($0.00)",
"activeKeys": 1,
"revokedKeys": 0,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"mistral-medium": {
"usage": "0 tokens ($0.00)",
"activeKeys": 1,
"revokedKeys": 0,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"mistral-large": {
"usage": "0 tokens ($0.00)",
"activeKeys": 1,
"revokedKeys": 0,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"aws-claude": {
"usage": "0 tokens ($0.00)",
"activeKeys": 1,
"revokedKeys": 0,
"enabledVariants": "sonnet3",
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"aws-claude-opus": {
"usage": "107.3k tokens ($1.61)",
"activeKeys": 0,
"revokedKeys": 0,
"proomptersInQueue": 0,
"estimatedQueueTime": "no wait"
},
"config": {
"gatekeeper": "user_token",
"maxIpsAutoBan": "false",
"captchaMode": "none",
"powTokenHours": "24",
"powTokenMaxIps": "2",
"powDifficultyLevel": "low",
"powChallengeTimeout": "30",
"textModelRateLimit": "60",
"imageModelRateLimit": "4",
"maxContextTokensOpenAI": "9007199254740991",
"maxContextTokensAnthropic": "9007199254740991",
"maxOutputTokensOpenAI": "9007199254740991",
"maxOutputTokensAnthropic": "9007199254740991",
"allowAwsLogging": "true",
"promptLogging": "false",
"tokenQuota": {
"deepseek": "0",
"turbo": "0",
"gpt5": "0",
"gpt-5.1-codex": "0",
"gpt4": "0",
"gpt4-32k": "0",
"gpt4-turbo": "0",
"gpt4o": "0",
"o1": "0",
"o1-mini": "0",
"o3-mini": "0",
"gpt-4o-mini": "0",
"dall-e": "0",
"claude": "0",
"claude-opus": "0",
"gemini-flash": "0",
"gemini-pro": "0",
"gemini-ultra": "0",
"mistral-tiny": "0",
"mistral-small": "0",
"mistral-medium": "0",
"mistral-large": "0",
"aws-claude": "0",
"aws-claude-opus": "0",
"aws-mistral-tiny": "0",
"aws-mistral-small": "0",
"aws-mistral-medium": "0",
"aws-mistral-large": "0",
"gcp-claude": "0",
"gcp-claude-opus": "0",
"azure-turbo": "0",
"azure-gpt5": "0",
"azure-gpt4": "0",
"azure-gpt4-32k": "0",
"azure-gpt4-turbo": "0",
"azure-gpt4o": "0",
"azure-dall-e": "0",
"azure-o1": "0",
"azure-o1-mini": "0",
"azure-o3-mini": "0",
"azure-gpt-4o-mini": "0",
"azure-gpt-5.1-codex": "0"
},
"allowOpenAIToolUsage": "true",
"allowedVisionServices": "openai",
"tokensPunishmentFactor": "0"
},
"build": "c18ef80 (HEAD@psyonity/oai-reverse-proxy)"
}