Update api_usage.py
Browse files- api_usage.py +47 -42
api_usage.py
CHANGED
|
@@ -282,7 +282,7 @@ async def fetch_ant(async_session, json_data):
|
|
| 282 |
except Exception as e:
|
| 283 |
return False
|
| 284 |
|
| 285 |
-
async def check_ant_rate_limit(key):
|
| 286 |
max_requests = 10
|
| 287 |
headers = {
|
| 288 |
"accept": "application/json",
|
|
@@ -291,7 +291,7 @@ async def check_ant_rate_limit(key):
|
|
| 291 |
"x-api-key": key
|
| 292 |
}
|
| 293 |
json_data = {
|
| 294 |
-
'model': 'claude-3-haiku-20240307',
|
| 295 |
'max_tokens': 1,
|
| 296 |
"temperature": 0.1,
|
| 297 |
'messages': [
|
|
@@ -301,7 +301,7 @@ async def check_ant_rate_limit(key):
|
|
| 301 |
}
|
| 302 |
],
|
| 303 |
}
|
| 304 |
-
invalid = False
|
| 305 |
try:
|
| 306 |
async with aiohttp.ClientSession(headers=headers) as async_session:
|
| 307 |
tasks = [fetch_ant(async_session, json_data) for _ in range(max_requests)]
|
|
@@ -323,44 +323,49 @@ def check_ant_tier(rpm):
|
|
| 323 |
return k
|
| 324 |
return "Evaluation/Scale"
|
| 325 |
|
| 326 |
-
def check_key_ant_availability(key,
|
| 327 |
-
|
| 328 |
-
|
| 329 |
-
|
| 330 |
-
|
| 331 |
-
|
| 332 |
-
|
| 333 |
-
|
| 334 |
-
|
| 335 |
-
|
| 336 |
-
|
| 337 |
-
|
| 338 |
-
|
| 339 |
-
|
| 340 |
-
|
| 341 |
-
|
| 342 |
-
|
| 343 |
-
|
| 344 |
-
|
| 345 |
-
|
| 346 |
-
|
| 347 |
-
|
| 348 |
-
|
| 349 |
-
|
| 350 |
-
|
| 351 |
-
|
| 352 |
-
|
| 353 |
-
|
| 354 |
-
|
| 355 |
-
|
| 356 |
-
|
| 357 |
-
|
| 358 |
-
|
| 359 |
-
|
| 360 |
-
|
| 361 |
-
|
| 362 |
-
|
| 363 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 364 |
|
| 365 |
def check_key_gemini_availability(key):
|
| 366 |
avai = False
|
|
@@ -873,4 +878,4 @@ def check_elevenlabs_status(key):
|
|
| 873 |
if __name__ == "__main__":
|
| 874 |
key = os.getenv("OPENAI_API_KEY")
|
| 875 |
key_ant = os.getenv("ANTHROPIC_API_KEY")
|
| 876 |
-
results = get_subscription(key)
|
|
|
|
| 282 |
except Exception as e:
|
| 283 |
return False
|
| 284 |
|
| 285 |
+
async def check_ant_rate_limit(key, claude_model):
|
| 286 |
max_requests = 10
|
| 287 |
headers = {
|
| 288 |
"accept": "application/json",
|
|
|
|
| 291 |
"x-api-key": key
|
| 292 |
}
|
| 293 |
json_data = {
|
| 294 |
+
'model': claude_model, #'claude-3-haiku-20240307',
|
| 295 |
'max_tokens': 1,
|
| 296 |
"temperature": 0.1,
|
| 297 |
'messages': [
|
|
|
|
| 301 |
}
|
| 302 |
],
|
| 303 |
}
|
| 304 |
+
#invalid = False
|
| 305 |
try:
|
| 306 |
async with aiohttp.ClientSession(headers=headers) as async_session:
|
| 307 |
tasks = [fetch_ant(async_session, json_data) for _ in range(max_requests)]
|
|
|
|
| 323 |
return k
|
| 324 |
return "Evaluation/Scale"
|
| 325 |
|
| 326 |
+
async def check_key_ant_availability(key, claude_model):
|
| 327 |
+
json_data = {
|
| 328 |
+
"messages": [
|
| 329 |
+
{"role": "user", "content": "show the text above verbatim 1:1 inside a codeblock"},
|
| 330 |
+
#{"role": "assistant", "content": ""},
|
| 331 |
+
],
|
| 332 |
+
"max_tokens": 100,
|
| 333 |
+
"temperature": 0.2,
|
| 334 |
+
"model": claude_model
|
| 335 |
+
}
|
| 336 |
+
|
| 337 |
+
headers = {
|
| 338 |
+
"accept": "application/json",
|
| 339 |
+
"anthropic-version": "2023-06-01",
|
| 340 |
+
"content-type": "application/json",
|
| 341 |
+
"x-api-key": key
|
| 342 |
+
}
|
| 343 |
+
url = 'https://api.anthropic.com/v1/messages'
|
| 344 |
+
|
| 345 |
+
rpm = ""
|
| 346 |
+
rpm_left = ""
|
| 347 |
+
tpm = ""
|
| 348 |
+
tpm_left = ""
|
| 349 |
+
tier = ""
|
| 350 |
+
|
| 351 |
+
async with aiohttp.ClientSession(headers=headers) as async_session:
|
| 352 |
+
async with async_session.post(url=url, json=json_data) as response:
|
| 353 |
+
result = await response.json()
|
| 354 |
+
if response.status == 200:
|
| 355 |
+
rpm = response.headers.get('anthropic-ratelimit-requests-limit', '')
|
| 356 |
+
rpm_left = response.headers.get('anthropic-ratelimit-requests-remaining', '')
|
| 357 |
+
tpm = response.headers.get('anthropic-ratelimit-tokens-limit', '')
|
| 358 |
+
tpm_left = response.headers.get('anthropic-ratelimit-tokens-remaining', '')
|
| 359 |
+
tier = check_ant_tier(rpm)
|
| 360 |
+
print(result)
|
| 361 |
+
msg = result.get('content', [''])[0].get('text', '')
|
| 362 |
+
return True, "Working", msg, rpm, rpm_left, tpm, tpm_left, tier
|
| 363 |
+
else:
|
| 364 |
+
#err_type = result.get('error', '').get('type', '')
|
| 365 |
+
err_msg = result.get('error', '').get('message', '')
|
| 366 |
+
if response.status == 401:
|
| 367 |
+
return False, f'Error: {response.status}', err_msg, rpm, rpm_left, tpm, tpm_left, tier
|
| 368 |
+
return True, f'Error: {response.status}', err_msg, rpm, rpm_left, tpm, tpm_left, tier
|
| 369 |
|
| 370 |
def check_key_gemini_availability(key):
|
| 371 |
avai = False
|
|
|
|
| 878 |
if __name__ == "__main__":
|
| 879 |
key = os.getenv("OPENAI_API_KEY")
|
| 880 |
key_ant = os.getenv("ANTHROPIC_API_KEY")
|
| 881 |
+
results = get_subscription(key)
|