weco 0.1.8__tar.gz → 0.1.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {weco-0.1.8 → weco-0.1.9}/.github/workflows/release.yml +2 -2
- {weco-0.1.8 → weco-0.1.9}/PKG-INFO +1 -1
- {weco-0.1.8 → weco-0.1.9}/pyproject.toml +1 -1
- {weco-0.1.8 → weco-0.1.9}/tests/test_reasoning.py +6 -2
- {weco-0.1.8 → weco-0.1.9}/weco/client.py +34 -8
- {weco-0.1.8 → weco-0.1.9}/weco/functional.py +20 -4
- {weco-0.1.8 → weco-0.1.9}/weco.egg-info/PKG-INFO +1 -1
- {weco-0.1.8 → weco-0.1.9}/.github/workflows/lint.yml +0 -0
- {weco-0.1.8 → weco-0.1.9}/.gitignore +0 -0
- {weco-0.1.8 → weco-0.1.9}/LICENSE +0 -0
- {weco-0.1.8 → weco-0.1.9}/README.md +0 -0
- {weco-0.1.8 → weco-0.1.9}/assets/weco.svg +0 -0
- {weco-0.1.8 → weco-0.1.9}/examples/cookbook.ipynb +0 -0
- {weco-0.1.8 → weco-0.1.9}/setup.cfg +0 -0
- {weco-0.1.8 → weco-0.1.9}/tests/test_asynchronous.py +0 -0
- {weco-0.1.8 → weco-0.1.9}/tests/test_batching.py +0 -0
- {weco-0.1.8 → weco-0.1.9}/tests/test_synchronous.py +0 -0
- {weco-0.1.8 → weco-0.1.9}/weco/__init__.py +0 -0
- {weco-0.1.8 → weco-0.1.9}/weco/constants.py +0 -0
- {weco-0.1.8 → weco-0.1.9}/weco/utils.py +0 -0
- {weco-0.1.8 → weco-0.1.9}/weco.egg-info/SOURCES.txt +0 -0
- {weco-0.1.8 → weco-0.1.9}/weco.egg-info/dependency_links.txt +0 -0
- {weco-0.1.8 → weco-0.1.9}/weco.egg-info/requires.txt +0 -0
- {weco-0.1.8 → weco-0.1.9}/weco.egg-info/top_level.txt +0 -0
|
@@ -79,7 +79,7 @@ jobs:
|
|
|
79
79
|
GITHUB_TOKEN: ${{ github.token }}
|
|
80
80
|
run: >-
|
|
81
81
|
gh release create
|
|
82
|
-
'v0.1.
|
|
82
|
+
'v0.1.9'
|
|
83
83
|
--repo '${{ github.repository }}'
|
|
84
84
|
--notes ""
|
|
85
85
|
- name: Upload artifact signatures to GitHub Release
|
|
@@ -90,5 +90,5 @@ jobs:
|
|
|
90
90
|
# sigstore-produced signatures and certificates.
|
|
91
91
|
run: >-
|
|
92
92
|
gh release upload
|
|
93
|
-
'v0.1.
|
|
93
|
+
'v0.1.9' dist/**
|
|
94
94
|
--repo '${{ github.repository }}'
|
|
@@ -10,7 +10,7 @@ authors = [
|
|
|
10
10
|
]
|
|
11
11
|
description = "A client facing API for interacting with the WeCo AI function builder service."
|
|
12
12
|
readme = "README.md"
|
|
13
|
-
version = "0.1.
|
|
13
|
+
version = "0.1.9"
|
|
14
14
|
license = {text = "MIT"}
|
|
15
15
|
requires-python = ">=3.8"
|
|
16
16
|
dependencies = ["asyncio", "httpx[http2]", "pillow"]
|
|
@@ -7,7 +7,8 @@ def assert_query_response(query_response):
|
|
|
7
7
|
assert isinstance(query_response, dict)
|
|
8
8
|
assert isinstance(query_response["output"], dict)
|
|
9
9
|
assert isinstance(query_response["reasoning_steps"], list)
|
|
10
|
-
for step in query_response["reasoning_steps"]:
|
|
10
|
+
for step in query_response["reasoning_steps"]:
|
|
11
|
+
assert isinstance(step, str)
|
|
11
12
|
assert isinstance(query_response["in_tokens"], int)
|
|
12
13
|
assert isinstance(query_response["out_tokens"], int)
|
|
13
14
|
assert isinstance(query_response["latency_ms"], float)
|
|
@@ -24,11 +25,14 @@ def text_reasoning_evaluator():
|
|
|
24
25
|
|
|
25
26
|
def test_text_reasoning_query(text_reasoning_evaluator):
|
|
26
27
|
fn_name, version_number, _ = text_reasoning_evaluator
|
|
27
|
-
query_response = query(
|
|
28
|
+
query_response = query(
|
|
29
|
+
fn_name=fn_name, version_number=version_number, text_input="I love this product!", return_reasoning=True
|
|
30
|
+
)
|
|
28
31
|
|
|
29
32
|
assert_query_response(query_response)
|
|
30
33
|
assert set(query_response["output"].keys()) == {"sentiment", "explanation"}
|
|
31
34
|
|
|
35
|
+
|
|
32
36
|
@pytest.fixture
|
|
33
37
|
def vision_reasoning_evaluator():
|
|
34
38
|
fn_name, version_number, fn_desc = build(
|
|
@@ -68,6 +68,7 @@ class WecoAI:
|
|
|
68
68
|
self.http2 = http2
|
|
69
69
|
self.timeout = timeout
|
|
70
70
|
self.base_url = "https://function.api.weco.ai"
|
|
71
|
+
|
|
71
72
|
# Setup clients
|
|
72
73
|
self.client = httpx.Client(http2=http2, timeout=timeout)
|
|
73
74
|
self.async_client = httpx.AsyncClient(http2=http2, timeout=timeout)
|
|
@@ -396,7 +397,7 @@ class WecoAI:
|
|
|
396
397
|
version_number: Optional[int],
|
|
397
398
|
text_input: Optional[str],
|
|
398
399
|
images_input: Optional[List[str]],
|
|
399
|
-
return_reasoning: Optional[bool]
|
|
400
|
+
return_reasoning: Optional[bool],
|
|
400
401
|
) -> Union[Dict[str, Any], Coroutine[Any, Any, Dict[str, Any]]]:
|
|
401
402
|
"""Internal method to handle both synchronous and asynchronous query requests.
|
|
402
403
|
|
|
@@ -440,7 +441,13 @@ class WecoAI:
|
|
|
440
441
|
|
|
441
442
|
# Make the request
|
|
442
443
|
endpoint = "query"
|
|
443
|
-
data = {
|
|
444
|
+
data = {
|
|
445
|
+
"name": fn_name,
|
|
446
|
+
"text": text_input,
|
|
447
|
+
"images": image_urls,
|
|
448
|
+
"version_number": version_number,
|
|
449
|
+
"return_reasoning": return_reasoning,
|
|
450
|
+
}
|
|
444
451
|
request = self._make_request(endpoint=endpoint, data=data, is_async=is_async)
|
|
445
452
|
|
|
446
453
|
if is_async:
|
|
@@ -460,7 +467,7 @@ class WecoAI:
|
|
|
460
467
|
version_number: Optional[int] = -1,
|
|
461
468
|
text_input: Optional[str] = "",
|
|
462
469
|
images_input: Optional[List[str]] = [],
|
|
463
|
-
return_reasoning: Optional[bool] = False
|
|
470
|
+
return_reasoning: Optional[bool] = False,
|
|
464
471
|
) -> Dict[str, Any]:
|
|
465
472
|
"""Asynchronously queries a function with the given function ID and input.
|
|
466
473
|
|
|
@@ -484,7 +491,12 @@ class WecoAI:
|
|
|
484
491
|
and the latency in milliseconds.
|
|
485
492
|
"""
|
|
486
493
|
return await self._query(
|
|
487
|
-
fn_name=fn_name,
|
|
494
|
+
fn_name=fn_name,
|
|
495
|
+
version_number=version_number,
|
|
496
|
+
text_input=text_input,
|
|
497
|
+
images_input=images_input,
|
|
498
|
+
return_reasoning=return_reasoning,
|
|
499
|
+
is_async=True,
|
|
488
500
|
)
|
|
489
501
|
|
|
490
502
|
def query(
|
|
@@ -493,7 +505,7 @@ class WecoAI:
|
|
|
493
505
|
version_number: Optional[int] = -1,
|
|
494
506
|
text_input: Optional[str] = "",
|
|
495
507
|
images_input: Optional[List[str]] = [],
|
|
496
|
-
return_reasoning: Optional[bool] = False
|
|
508
|
+
return_reasoning: Optional[bool] = False,
|
|
497
509
|
) -> Dict[str, Any]:
|
|
498
510
|
"""Synchronously queries a function with the given function ID and input.
|
|
499
511
|
|
|
@@ -517,11 +529,20 @@ class WecoAI:
|
|
|
517
529
|
and the latency in milliseconds.
|
|
518
530
|
"""
|
|
519
531
|
return self._query(
|
|
520
|
-
fn_name=fn_name,
|
|
532
|
+
fn_name=fn_name,
|
|
533
|
+
version_number=version_number,
|
|
534
|
+
text_input=text_input,
|
|
535
|
+
images_input=images_input,
|
|
536
|
+
return_reasoning=return_reasoning,
|
|
537
|
+
is_async=False,
|
|
521
538
|
)
|
|
522
539
|
|
|
523
540
|
def batch_query(
|
|
524
|
-
self,
|
|
541
|
+
self,
|
|
542
|
+
fn_name: str,
|
|
543
|
+
batch_inputs: List[Dict[str, Any]],
|
|
544
|
+
version_number: Optional[int] = -1,
|
|
545
|
+
return_reasoning: Optional[bool] = False,
|
|
525
546
|
) -> List[Dict[str, Any]]:
|
|
526
547
|
"""Batch queries a function version with a list of inputs.
|
|
527
548
|
|
|
@@ -547,7 +568,12 @@ class WecoAI:
|
|
|
547
568
|
|
|
548
569
|
async def run_queries():
|
|
549
570
|
tasks = list(
|
|
550
|
-
map(
|
|
571
|
+
map(
|
|
572
|
+
lambda fn_input: self.aquery(
|
|
573
|
+
fn_name=fn_name, version_number=version_number, return_reasoning=return_reasoning, **fn_input
|
|
574
|
+
),
|
|
575
|
+
batch_inputs,
|
|
576
|
+
)
|
|
551
577
|
)
|
|
552
578
|
return await asyncio.gather(*tasks)
|
|
553
579
|
|
|
@@ -79,7 +79,13 @@ def query(
|
|
|
79
79
|
and the latency in milliseconds.
|
|
80
80
|
"""
|
|
81
81
|
client = WecoAI(api_key=api_key)
|
|
82
|
-
response = client.query(
|
|
82
|
+
response = client.query(
|
|
83
|
+
fn_name=fn_name,
|
|
84
|
+
version_number=version_number,
|
|
85
|
+
text_input=text_input,
|
|
86
|
+
images_input=images_input,
|
|
87
|
+
return_reasoning=return_reasoning,
|
|
88
|
+
)
|
|
83
89
|
return response
|
|
84
90
|
|
|
85
91
|
|
|
@@ -116,13 +122,21 @@ async def aquery(
|
|
|
116
122
|
"""
|
|
117
123
|
client = WecoAI(api_key=api_key)
|
|
118
124
|
response = await client.aquery(
|
|
119
|
-
fn_name=fn_name,
|
|
125
|
+
fn_name=fn_name,
|
|
126
|
+
version_number=version_number,
|
|
127
|
+
text_input=text_input,
|
|
128
|
+
images_input=images_input,
|
|
129
|
+
return_reasoning=return_reasoning,
|
|
120
130
|
)
|
|
121
131
|
return response
|
|
122
132
|
|
|
123
133
|
|
|
124
134
|
def batch_query(
|
|
125
|
-
fn_name: str,
|
|
135
|
+
fn_name: str,
|
|
136
|
+
batch_inputs: List[Dict[str, Any]],
|
|
137
|
+
version_number: Optional[int] = -1,
|
|
138
|
+
return_reasoning: Optional[bool] = False,
|
|
139
|
+
api_key: Optional[str] = None,
|
|
126
140
|
) -> List[Dict[str, Any]]:
|
|
127
141
|
"""Synchronously queries multiple functions using asynchronous calls internally.
|
|
128
142
|
|
|
@@ -153,5 +167,7 @@ def batch_query(
|
|
|
153
167
|
in the same order as the input queries.
|
|
154
168
|
"""
|
|
155
169
|
client = WecoAI(api_key=api_key)
|
|
156
|
-
responses = client.batch_query(
|
|
170
|
+
responses = client.batch_query(
|
|
171
|
+
fn_name=fn_name, version_number=version_number, batch_inputs=batch_inputs, return_reasoning=return_reasoning
|
|
172
|
+
)
|
|
157
173
|
return responses
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|