aient 1.1.73__py3-none-any.whl → 1.1.75__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
aient/core/models.py CHANGED
@@ -1,4 +1,3 @@
1
- import json
2
1
  from io import IOBase
3
2
  from pydantic import BaseModel, Field, model_validator, ConfigDict
4
3
  from typing import List, Dict, Optional, Union, Tuple, Literal, Any
aient/core/request.py CHANGED
@@ -2,6 +2,7 @@ import re
2
2
  import json
3
3
  import httpx
4
4
  import base64
5
+ import asyncio
5
6
  import urllib.parse
6
7
  from io import IOBase
7
8
  from typing import Tuple
@@ -336,11 +337,11 @@ def create_jwt(client_email, private_key):
336
337
  segments.append(base64.urlsafe_b64encode(signature).rstrip(b'='))
337
338
  return b'.'.join(segments).decode()
338
339
 
339
- def get_access_token(client_email, private_key):
340
- jwt = create_jwt(client_email, private_key)
340
+ async def get_access_token(client_email, private_key):
341
+ jwt = await asyncio.to_thread(create_jwt, client_email, private_key)
341
342
 
342
- with httpx.Client() as client:
343
- response = client.post(
343
+ async with httpx.AsyncClient() as client:
344
+ response = await client.post(
344
345
  "https://oauth2.googleapis.com/token",
345
346
  data={
346
347
  "grant_type": "urn:ietf:params:oauth:grant-type:jwt-bearer",
@@ -356,7 +357,7 @@ async def get_vertex_gemini_payload(request, engine, provider, api_key=None):
356
357
  'Content-Type': 'application/json'
357
358
  }
358
359
  if provider.get("client_email") and provider.get("private_key"):
359
- access_token = get_access_token(provider['client_email'], provider['private_key'])
360
+ access_token = await get_access_token(provider['client_email'], provider['private_key'])
360
361
  headers['Authorization'] = f"Bearer {access_token}"
361
362
  if provider.get("project_id"):
362
363
  project_id = provider.get("project_id")
@@ -596,7 +597,7 @@ async def get_vertex_claude_payload(request, engine, provider, api_key=None):
596
597
  'Content-Type': 'application/json',
597
598
  }
598
599
  if provider.get("client_email") and provider.get("private_key"):
599
- access_token = get_access_token(provider['client_email'], provider['private_key'])
600
+ access_token = await get_access_token(provider['client_email'], provider['private_key'])
600
601
  headers['Authorization'] = f"Bearer {access_token}"
601
602
  if provider.get("project_id"):
602
603
  project_id = provider.get("project_id")
@@ -972,7 +973,9 @@ async def get_aws_payload(request, engine, provider, api_key=None):
972
973
 
973
974
  if provider.get("aws_access_key") and provider.get("aws_secret_key"):
974
975
  ACCEPT_HEADER = "application/vnd.amazon.bedrock.payload+json" # 指定接受 Bedrock 流格式
975
- amz_date, payload_hash, authorization_header = get_signature(payload, original_model, provider.get("aws_access_key"), provider.get("aws_secret_key"), AWS_REGION, HOST, CONTENT_TYPE, ACCEPT_HEADER)
976
+ amz_date, payload_hash, authorization_header = await asyncio.to_thread(
977
+ get_signature, payload, original_model, provider.get("aws_access_key"), provider.get("aws_secret_key"), AWS_REGION, HOST, CONTENT_TYPE, ACCEPT_HEADER
978
+ )
976
979
  headers = {
977
980
  'Accept': ACCEPT_HEADER,
978
981
  'Content-Type': CONTENT_TYPE,
aient/core/response.py CHANGED
@@ -53,9 +53,9 @@ def gemini_json_poccess(response_str):
53
53
 
54
54
  return is_thinking, reasoning_content, content, image_base64, function_call_name, function_full_response, finishReason, blockReason, promptTokenCount, candidatesTokenCount, totalTokenCount
55
55
 
56
- async def fetch_gemini_response_stream(client, url, headers, payload, model):
56
+ async def fetch_gemini_response_stream(client, url, headers, payload, model, timeout):
57
57
  timestamp = int(datetime.timestamp(datetime.now()))
58
- async with client.stream('POST', url, headers=headers, json=payload) as response:
58
+ async with client.stream('POST', url, headers=headers, json=payload, timeout=timeout) as response:
59
59
  error_message = await check_response(response, "fetch_gemini_response_stream")
60
60
  if error_message:
61
61
  yield error_message
@@ -122,9 +122,9 @@ async def fetch_gemini_response_stream(client, url, headers, payload, model):
122
122
 
123
123
  yield "data: [DONE]" + end_of_line
124
124
 
125
- async def fetch_vertex_claude_response_stream(client, url, headers, payload, model):
125
+ async def fetch_vertex_claude_response_stream(client, url, headers, payload, model, timeout):
126
126
  timestamp = int(datetime.timestamp(datetime.now()))
127
- async with client.stream('POST', url, headers=headers, json=payload) as response:
127
+ async with client.stream('POST', url, headers=headers, json=payload, timeout=timeout) as response:
128
128
  error_message = await check_response(response, "fetch_vertex_claude_response_stream")
129
129
  if error_message:
130
130
  yield error_message
@@ -190,14 +190,14 @@ async def fetch_vertex_claude_response_stream(client, url, headers, payload, mod
190
190
 
191
191
  yield "data: [DONE]" + end_of_line
192
192
 
193
- async def fetch_gpt_response_stream(client, url, headers, payload):
193
+ async def fetch_gpt_response_stream(client, url, headers, payload, timeout):
194
194
  timestamp = int(datetime.timestamp(datetime.now()))
195
195
  random.seed(timestamp)
196
196
  random_str = ''.join(random.choices(string.ascii_letters + string.digits, k=29))
197
197
  is_thinking = False
198
198
  has_send_thinking = False
199
199
  ark_tag = False
200
- async with client.stream('POST', url, headers=headers, json=payload) as response:
200
+ async with client.stream('POST', url, headers=headers, json=payload, timeout=timeout) as response:
201
201
  error_message = await check_response(response, "fetch_gpt_response_stream")
202
202
  if error_message:
203
203
  yield error_message
@@ -306,12 +306,12 @@ async def fetch_gpt_response_stream(client, url, headers, payload):
306
306
  yield "data: " + json.dumps(line).strip() + end_of_line
307
307
  yield "data: [DONE]" + end_of_line
308
308
 
309
- async def fetch_azure_response_stream(client, url, headers, payload):
309
+ async def fetch_azure_response_stream(client, url, headers, payload, timeout):
310
310
  timestamp = int(datetime.timestamp(datetime.now()))
311
311
  is_thinking = False
312
312
  has_send_thinking = False
313
313
  ark_tag = False
314
- async with client.stream('POST', url, headers=headers, json=payload) as response:
314
+ async with client.stream('POST', url, headers=headers, json=payload, timeout=timeout) as response:
315
315
  error_message = await check_response(response, "fetch_azure_response_stream")
316
316
  if error_message:
317
317
  yield error_message
@@ -362,9 +362,9 @@ async def fetch_azure_response_stream(client, url, headers, payload):
362
362
  yield "data: " + json.dumps(line).strip() + end_of_line
363
363
  yield "data: [DONE]" + end_of_line
364
364
 
365
- async def fetch_cloudflare_response_stream(client, url, headers, payload, model):
365
+ async def fetch_cloudflare_response_stream(client, url, headers, payload, model, timeout):
366
366
  timestamp = int(datetime.timestamp(datetime.now()))
367
- async with client.stream('POST', url, headers=headers, json=payload) as response:
367
+ async with client.stream('POST', url, headers=headers, json=payload, timeout=timeout) as response:
368
368
  error_message = await check_response(response, "fetch_cloudflare_response_stream")
369
369
  if error_message:
370
370
  yield error_message
@@ -387,9 +387,9 @@ async def fetch_cloudflare_response_stream(client, url, headers, payload, model)
387
387
  yield sse_string
388
388
  yield "data: [DONE]" + end_of_line
389
389
 
390
- async def fetch_cohere_response_stream(client, url, headers, payload, model):
390
+ async def fetch_cohere_response_stream(client, url, headers, payload, model, timeout):
391
391
  timestamp = int(datetime.timestamp(datetime.now()))
392
- async with client.stream('POST', url, headers=headers, json=payload) as response:
392
+ async with client.stream('POST', url, headers=headers, json=payload, timeout=timeout) as response:
393
393
  error_message = await check_response(response, "fetch_cohere_response_stream")
394
394
  if error_message:
395
395
  yield error_message
@@ -410,9 +410,9 @@ async def fetch_cohere_response_stream(client, url, headers, payload, model):
410
410
  yield sse_string
411
411
  yield "data: [DONE]" + end_of_line
412
412
 
413
- async def fetch_claude_response_stream(client, url, headers, payload, model):
413
+ async def fetch_claude_response_stream(client, url, headers, payload, model, timeout):
414
414
  timestamp = int(datetime.timestamp(datetime.now()))
415
- async with client.stream('POST', url, headers=headers, json=payload) as response:
415
+ async with client.stream('POST', url, headers=headers, json=payload, timeout=timeout) as response:
416
416
  error_message = await check_response(response, "fetch_claude_response_stream")
417
417
  if error_message:
418
418
  yield error_message
@@ -463,9 +463,9 @@ async def fetch_claude_response_stream(client, url, headers, payload, model):
463
463
 
464
464
  yield "data: [DONE]" + end_of_line
465
465
 
466
- async def fetch_aws_response_stream(client, url, headers, payload, model):
466
+ async def fetch_aws_response_stream(client, url, headers, payload, model, timeout):
467
467
  timestamp = int(datetime.timestamp(datetime.now()))
468
- async with client.stream('POST', url, headers=headers, json=payload) as response:
468
+ async with client.stream('POST', url, headers=headers, json=payload, timeout=timeout) as response:
469
469
  error_message = await check_response(response, "fetch_aws_response_stream")
470
470
  if error_message:
471
471
  yield error_message
@@ -514,13 +514,13 @@ async def fetch_aws_response_stream(client, url, headers, payload, model):
514
514
 
515
515
  yield "data: [DONE]" + end_of_line
516
516
 
517
- async def fetch_response(client, url, headers, payload, engine, model):
517
+ async def fetch_response(client, url, headers, payload, engine, model, timeout=200):
518
518
  response = None
519
519
  if payload.get("file"):
520
520
  file = payload.pop("file")
521
- response = await client.post(url, headers=headers, data=payload, files={"file": file})
521
+ response = await client.post(url, headers=headers, data=payload, files={"file": file}, timeout=timeout)
522
522
  else:
523
- response = await client.post(url, headers=headers, json=payload)
523
+ response = await client.post(url, headers=headers, json=payload, timeout=timeout)
524
524
  error_message = await check_response(response, "fetch_response")
525
525
  if error_message:
526
526
  yield error_message
@@ -625,27 +625,27 @@ async def fetch_response(client, url, headers, payload, engine, model):
625
625
  response_json = response.json()
626
626
  yield response_json
627
627
 
628
- async def fetch_response_stream(client, url, headers, payload, engine, model):
628
+ async def fetch_response_stream(client, url, headers, payload, engine, model, timeout=200):
629
629
  if engine == "gemini" or engine == "vertex-gemini":
630
- async for chunk in fetch_gemini_response_stream(client, url, headers, payload, model):
630
+ async for chunk in fetch_gemini_response_stream(client, url, headers, payload, model, timeout):
631
631
  yield chunk
632
632
  elif engine == "claude" or engine == "vertex-claude":
633
- async for chunk in fetch_claude_response_stream(client, url, headers, payload, model):
633
+ async for chunk in fetch_claude_response_stream(client, url, headers, payload, model, timeout):
634
634
  yield chunk
635
635
  elif engine == "aws":
636
- async for chunk in fetch_aws_response_stream(client, url, headers, payload, model):
636
+ async for chunk in fetch_aws_response_stream(client, url, headers, payload, model, timeout):
637
637
  yield chunk
638
638
  elif engine == "gpt" or engine == "openrouter" or engine == "azure-databricks":
639
- async for chunk in fetch_gpt_response_stream(client, url, headers, payload):
639
+ async for chunk in fetch_gpt_response_stream(client, url, headers, payload, timeout):
640
640
  yield chunk
641
641
  elif engine == "azure":
642
- async for chunk in fetch_azure_response_stream(client, url, headers, payload):
642
+ async for chunk in fetch_azure_response_stream(client, url, headers, payload, timeout):
643
643
  yield chunk
644
644
  elif engine == "cloudflare":
645
- async for chunk in fetch_cloudflare_response_stream(client, url, headers, payload, model):
645
+ async for chunk in fetch_cloudflare_response_stream(client, url, headers, payload, model, timeout):
646
646
  yield chunk
647
647
  elif engine == "cohere":
648
- async for chunk in fetch_cohere_response_stream(client, url, headers, payload, model):
648
+ async for chunk in fetch_cohere_response_stream(client, url, headers, payload, model, timeout):
649
649
  yield chunk
650
650
  else:
651
- raise ValueError("Unknown response")
651
+ raise ValueError("Unknown response")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aient
3
- Version: 1.1.73
3
+ Version: 1.1.75
4
4
  Summary: Aient: The Awakening of Agent.
5
5
  Requires-Python: >=3.11
6
6
  Description-Content-Type: text/markdown
@@ -1,9 +1,9 @@
1
1
  aient/__init__.py,sha256=SRfF7oDVlOOAi6nGKiJIUK6B_arqYLO9iSMp-2IZZps,21
2
2
  aient/core/__init__.py,sha256=NxjebTlku35S4Dzr16rdSqSTWUvvwEeACe8KvHJnjPg,34
3
3
  aient/core/log_config.py,sha256=kz2_yJv1p-o3lUQOwA3qh-LSc3wMHv13iCQclw44W9c,274
4
- aient/core/models.py,sha256=d4MISNezTSe0ls0-fjuToI2SoT-sk5fWqAJuKVinIlo,7502
5
- aient/core/request.py,sha256=4FFCwQ7h7b6bqtrA8qw-DPJVXZTj2i1CkYccFeEwUPw,76552
6
- aient/core/response.py,sha256=tYKWOeexYEhWK4napIAfYjCn2rQ1zpxRvknsBBWrv2M,33206
4
+ aient/core/models.py,sha256=KMlCRLjtq1wQHZTJGqnbWhPS2cHq6eLdnk7peKDrzR8,7490
5
+ aient/core/request.py,sha256=vfwi3ZGYp2hQzSJ6mPXJVgcV_uu5AJ_NAL84mLfF8WA,76674
6
+ aient/core/response.py,sha256=uCQEZY_68neqNbbdr-tL4yy7thqBuSNvV1z4Ezww8o8,33538
7
7
  aient/core/utils.py,sha256=D98d5Cy1h4ejKtuxS0EEDtL4YqpaZLB5tuXoVP0IBWQ,28462
8
8
  aient/core/test/test_base_api.py,sha256=pWnycRJbuPSXKKU9AQjWrMAX1wiLC_014Qc9hh5C2Pw,524
9
9
  aient/core/test/test_geminimask.py,sha256=HFX8jDbNg_FjjgPNxfYaR-0-roUrOO-ND-FVsuxSoiw,13254
@@ -30,8 +30,8 @@ aient/plugins/write_file.py,sha256=Jt8fOEwqhYiSWpCbwfAr1xoi_BmFnx3076GMhuL06uI,3
30
30
  aient/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
31
  aient/utils/prompt.py,sha256=UcSzKkFE4-h_1b6NofI6xgk3GoleqALRKY8VBaXLjmI,11311
32
32
  aient/utils/scripts.py,sha256=VqtK4RFEx7KxkmcqG3lFDS1DxoNlFFGErEjopVcc8IE,40974
33
- aient-1.1.73.dist-info/licenses/LICENSE,sha256=XNdbcWldt0yaNXXWB_Bakoqnxb3OVhUft4MgMA_71ds,1051
34
- aient-1.1.73.dist-info/METADATA,sha256=tBDP7wGDeeRRD-UEww0-UxF5IgbmHhQV4O7qXuDVXOA,4842
35
- aient-1.1.73.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
36
- aient-1.1.73.dist-info/top_level.txt,sha256=3oXzrP5sAVvyyqabpeq8A2_vfMtY554r4bVE-OHBrZk,6
37
- aient-1.1.73.dist-info/RECORD,,
33
+ aient-1.1.75.dist-info/licenses/LICENSE,sha256=XNdbcWldt0yaNXXWB_Bakoqnxb3OVhUft4MgMA_71ds,1051
34
+ aient-1.1.75.dist-info/METADATA,sha256=nv_h5lJXeM4gcyJ7WRSynJ_giD1ZZXWIC1CwtTn05Ig,4842
35
+ aient-1.1.75.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
36
+ aient-1.1.75.dist-info/top_level.txt,sha256=3oXzrP5sAVvyyqabpeq8A2_vfMtY554r4bVE-OHBrZk,6
37
+ aient-1.1.75.dist-info/RECORD,,
File without changes