weco 0.1.6__py3-none-any.whl → 0.1.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- weco/client.py +65 -25
- weco/functional.py +24 -9
- {weco-0.1.6.dist-info → weco-0.1.8.dist-info}/METADATA +8 -4
- weco-0.1.8.dist-info/RECORD +10 -0
- {weco-0.1.6.dist-info → weco-0.1.8.dist-info}/WHEEL +1 -1
- weco-0.1.6.dist-info/RECORD +0 -10
- {weco-0.1.6.dist-info → weco-0.1.8.dist-info}/LICENSE +0 -0
- {weco-0.1.6.dist-info → weco-0.1.8.dist-info}/top_level.txt +0 -0
weco/client.py
CHANGED
|
@@ -25,14 +25,21 @@ class WecoAI:
|
|
|
25
25
|
"""A client for the WecoAI function builder API that allows users to build and query specialized functions built by LLMs.
|
|
26
26
|
The user must simply provide a task description to build a function, and then query the function with an input to get the result they need.
|
|
27
27
|
Our client supports both synchronous and asynchronous request paradigms and uses HTTP/2 for faster communication with the API.
|
|
28
|
+
Support for multimodality is included.
|
|
28
29
|
|
|
29
30
|
Attributes
|
|
30
31
|
----------
|
|
31
32
|
api_key : str
|
|
32
33
|
The API key used for authentication.
|
|
34
|
+
|
|
35
|
+
timeout : float
|
|
36
|
+
The timeout for the HTTP requests in seconds. Default is 120.0.
|
|
37
|
+
|
|
38
|
+
http2 : bool
|
|
39
|
+
Whether to use HTTP/2 protocol for the HTTP requests. Default is True.
|
|
33
40
|
"""
|
|
34
41
|
|
|
35
|
-
def __init__(self, api_key: str = None, timeout: float = 120.0, http2: bool = True) -> None:
|
|
42
|
+
def __init__(self, api_key: Union[str, None] = None, timeout: float = 120.0, http2: bool = True) -> None:
|
|
36
43
|
"""Initializes the WecoAI client with the provided API key and base URL.
|
|
37
44
|
|
|
38
45
|
Parameters
|
|
@@ -41,7 +48,7 @@ class WecoAI:
|
|
|
41
48
|
The API key used for authentication. If not provided, the client will attempt to read it from the environment variable - WECO_API_KEY.
|
|
42
49
|
|
|
43
50
|
timeout : float, optional
|
|
44
|
-
The timeout for the HTTP requests in seconds (default is
|
|
51
|
+
The timeout for the HTTP requests in seconds (default is 120.0).
|
|
45
52
|
|
|
46
53
|
http2 : bool, optional
|
|
47
54
|
Whether to use HTTP/2 protocol for the HTTP requests (default is True).
|
|
@@ -146,24 +153,29 @@ class WecoAI:
|
|
|
146
153
|
for _warning in response.get("warnings", []):
|
|
147
154
|
warnings.warn(_warning)
|
|
148
155
|
|
|
149
|
-
|
|
156
|
+
returned_response = {
|
|
150
157
|
"output": response["response"],
|
|
151
158
|
"in_tokens": response["num_input_tokens"],
|
|
152
159
|
"out_tokens": response["num_output_tokens"],
|
|
153
160
|
"latency_ms": response["latency_ms"],
|
|
154
161
|
}
|
|
162
|
+
if "reasoning_steps" in response:
|
|
163
|
+
returned_response["reasoning_steps"] = response["reasoning_steps"]
|
|
164
|
+
return returned_response
|
|
155
165
|
|
|
156
|
-
def _build(
|
|
166
|
+
def _build(
|
|
167
|
+
self, task_description: str, multimodal: bool, is_async: bool
|
|
168
|
+
) -> Union[Tuple[str, int, str], Coroutine[Any, Any, Tuple[str, int, str]]]:
|
|
157
169
|
"""Internal method to handle both synchronous and asynchronous build requests.
|
|
158
170
|
|
|
159
171
|
Parameters
|
|
160
172
|
----------
|
|
161
173
|
task_description : str
|
|
162
174
|
A description of the task for which the function is being built.
|
|
163
|
-
|
|
175
|
+
|
|
164
176
|
multimodal : bool
|
|
165
177
|
Whether the function is multimodal or not.
|
|
166
|
-
|
|
178
|
+
|
|
167
179
|
is_async : bool
|
|
168
180
|
Whether to perform an asynchronous request.
|
|
169
181
|
|
|
@@ -205,7 +217,7 @@ class WecoAI:
|
|
|
205
217
|
----------
|
|
206
218
|
task_description : str
|
|
207
219
|
A description of the task for which the function is being built.
|
|
208
|
-
|
|
220
|
+
|
|
209
221
|
multimodal : bool, optional
|
|
210
222
|
Whether the function is multimodal or not (default is False).
|
|
211
223
|
|
|
@@ -223,7 +235,7 @@ class WecoAI:
|
|
|
223
235
|
----------
|
|
224
236
|
task_description : str
|
|
225
237
|
A description of the task for which the function is being built.
|
|
226
|
-
|
|
238
|
+
|
|
227
239
|
multimodal : bool, optional
|
|
228
240
|
Whether the function is multimodal or not (default is False).
|
|
229
241
|
|
|
@@ -378,7 +390,13 @@ class WecoAI:
|
|
|
378
390
|
return image_info
|
|
379
391
|
|
|
380
392
|
def _query(
|
|
381
|
-
self,
|
|
393
|
+
self,
|
|
394
|
+
is_async: bool,
|
|
395
|
+
fn_name: str,
|
|
396
|
+
version_number: Optional[int],
|
|
397
|
+
text_input: Optional[str],
|
|
398
|
+
images_input: Optional[List[str]],
|
|
399
|
+
return_reasoning: Optional[bool]
|
|
382
400
|
) -> Union[Dict[str, Any], Coroutine[Any, Any, Dict[str, Any]]]:
|
|
383
401
|
"""Internal method to handle both synchronous and asynchronous query requests.
|
|
384
402
|
|
|
@@ -394,6 +412,8 @@ class WecoAI:
|
|
|
394
412
|
The text input to the function.
|
|
395
413
|
images_input : List[str], optional
|
|
396
414
|
A list of image URLs or images encoded in base64 with their metadata to be sent as input to the function.
|
|
415
|
+
return_reasoning : bool, optional
|
|
416
|
+
Whether to return reasoning for the output.
|
|
397
417
|
|
|
398
418
|
Returns
|
|
399
419
|
-------
|
|
@@ -405,8 +425,6 @@ class WecoAI:
|
|
|
405
425
|
ValueError
|
|
406
426
|
If the input is invalid.
|
|
407
427
|
"""
|
|
408
|
-
warnings.warn("Setting the version number of the function is not yet supported. Currently, the first version of the function will be used i.e., version 0.")
|
|
409
|
-
version_number = 0
|
|
410
428
|
# Validate the input
|
|
411
429
|
image_info = self._validate_query(text_input=text_input, images_input=images_input)
|
|
412
430
|
|
|
@@ -422,7 +440,7 @@ class WecoAI:
|
|
|
422
440
|
|
|
423
441
|
# Make the request
|
|
424
442
|
endpoint = "query"
|
|
425
|
-
data = {"name": fn_name, "text": text_input, "images": image_urls, "version_number": version_number}
|
|
443
|
+
data = {"name": fn_name, "text": text_input, "images": image_urls, "version_number": version_number, "return_reasoning": return_reasoning}
|
|
426
444
|
request = self._make_request(endpoint=endpoint, data=data, is_async=is_async)
|
|
427
445
|
|
|
428
446
|
if is_async:
|
|
@@ -437,7 +455,12 @@ class WecoAI:
|
|
|
437
455
|
return self._process_query_response(response=response)
|
|
438
456
|
|
|
439
457
|
async def aquery(
|
|
440
|
-
self,
|
|
458
|
+
self,
|
|
459
|
+
fn_name: str,
|
|
460
|
+
version_number: Optional[int] = -1,
|
|
461
|
+
text_input: Optional[str] = "",
|
|
462
|
+
images_input: Optional[List[str]] = [],
|
|
463
|
+
return_reasoning: Optional[bool] = False
|
|
441
464
|
) -> Dict[str, Any]:
|
|
442
465
|
"""Asynchronously queries a function with the given function ID and input.
|
|
443
466
|
|
|
@@ -451,6 +474,8 @@ class WecoAI:
|
|
|
451
474
|
The text input to the function.
|
|
452
475
|
images_input : List[str], optional
|
|
453
476
|
A list of image URLs or images encoded in base64 with their metadata to be sent as input to the function.
|
|
477
|
+
return_reasoning : bool, optional
|
|
478
|
+
Whether to return reasoning for the output. Default is False.
|
|
454
479
|
|
|
455
480
|
Returns
|
|
456
481
|
-------
|
|
@@ -458,9 +483,18 @@ class WecoAI:
|
|
|
458
483
|
A dictionary containing the output of the function, the number of input tokens, the number of output tokens,
|
|
459
484
|
and the latency in milliseconds.
|
|
460
485
|
"""
|
|
461
|
-
return await self._query(
|
|
462
|
-
|
|
463
|
-
|
|
486
|
+
return await self._query(
|
|
487
|
+
fn_name=fn_name, version_number=version_number, text_input=text_input, images_input=images_input, return_reasoning=return_reasoning, is_async=True
|
|
488
|
+
)
|
|
489
|
+
|
|
490
|
+
def query(
|
|
491
|
+
self,
|
|
492
|
+
fn_name: str,
|
|
493
|
+
version_number: Optional[int] = -1,
|
|
494
|
+
text_input: Optional[str] = "",
|
|
495
|
+
images_input: Optional[List[str]] = [],
|
|
496
|
+
return_reasoning: Optional[bool] = False
|
|
497
|
+
) -> Dict[str, Any]:
|
|
464
498
|
"""Synchronously queries a function with the given function ID and input.
|
|
465
499
|
|
|
466
500
|
Parameters
|
|
@@ -473,30 +507,36 @@ class WecoAI:
|
|
|
473
507
|
The text input to the function.
|
|
474
508
|
images_input : List[str], optional
|
|
475
509
|
A list of image URLs or images encoded in base64 with their metadata to be sent as input to the function.
|
|
510
|
+
return_reasoning : bool, optional
|
|
511
|
+
Whether to return reasoning for the output. Default is False.
|
|
476
512
|
|
|
477
513
|
Returns
|
|
478
514
|
-------
|
|
479
515
|
dict
|
|
480
|
-
|
|
516
|
+
A dictionary containing the output of the function, the number of input tokens, the number of output tokens,
|
|
481
517
|
and the latency in milliseconds.
|
|
482
518
|
"""
|
|
483
|
-
return self._query(
|
|
519
|
+
return self._query(
|
|
520
|
+
fn_name=fn_name, version_number=version_number, text_input=text_input, images_input=images_input, return_reasoning=return_reasoning, is_async=False
|
|
521
|
+
)
|
|
484
522
|
|
|
485
|
-
def batch_query(
|
|
523
|
+
def batch_query(
|
|
524
|
+
self, fn_name: str, batch_inputs: List[Dict[str, Any]], version_number: Optional[int] = -1, return_reasoning: Optional[bool] = False
|
|
525
|
+
) -> List[Dict[str, Any]]:
|
|
486
526
|
"""Batch queries a function version with a list of inputs.
|
|
487
527
|
|
|
488
528
|
Parameters
|
|
489
529
|
----------
|
|
490
530
|
fn_name : str
|
|
491
531
|
The name of the function or a list of function names to query.
|
|
492
|
-
|
|
493
532
|
batch_inputs : List[Dict[str, Any]]
|
|
494
533
|
A list of inputs for the functions to query. The input must be a dictionary containing the data to be processed. e.g.,
|
|
495
534
|
when providing for a text input, the dictionary should be {"text_input": "input text"}, for an image input, the dictionary should be {"images_input": ["url1", "url2", ...]}
|
|
496
535
|
and for a combination of text and image inputs, the dictionary should be {"text_input": "input text", "images_input": ["url1", "url2", ...]}.
|
|
497
|
-
|
|
498
536
|
version_number : int, optional
|
|
499
537
|
The version number of the function to query. If not provided, the latest version will be used. Pass -1 to use the latest version.
|
|
538
|
+
return_reasoning : bool, optional
|
|
539
|
+
Whether to return reasoning for the output. Default is False.
|
|
500
540
|
|
|
501
541
|
Returns
|
|
502
542
|
-------
|
|
@@ -504,11 +544,11 @@ class WecoAI:
|
|
|
504
544
|
A list of dictionaries, each containing the output of a function query,
|
|
505
545
|
in the same order as the input queries.
|
|
506
546
|
"""
|
|
547
|
+
|
|
507
548
|
async def run_queries():
|
|
508
|
-
tasks = list(
|
|
509
|
-
lambda fn_input: self.aquery(fn_name=fn_name, version_number=version_number, **fn_input),
|
|
510
|
-
|
|
511
|
-
))
|
|
549
|
+
tasks = list(
|
|
550
|
+
map(lambda fn_input: self.aquery(fn_name=fn_name, version_number=version_number, return_reasoning=return_reasoning, **fn_input), batch_inputs)
|
|
551
|
+
)
|
|
512
552
|
return await asyncio.gather(*tasks)
|
|
513
553
|
|
|
514
554
|
return asyncio.run(run_queries())
|
weco/functional.py
CHANGED
|
@@ -48,7 +48,12 @@ async def abuild(task_description: str, multimodal: bool = False, api_key: str =
|
|
|
48
48
|
|
|
49
49
|
|
|
50
50
|
def query(
|
|
51
|
-
fn_name: str,
|
|
51
|
+
fn_name: str,
|
|
52
|
+
version_number: Optional[int] = -1,
|
|
53
|
+
text_input: Optional[str] = "",
|
|
54
|
+
images_input: Optional[List[str]] = [],
|
|
55
|
+
return_reasoning: Optional[bool] = False,
|
|
56
|
+
api_key: Optional[str] = None,
|
|
52
57
|
) -> Dict[str, Any]:
|
|
53
58
|
"""Queries a function synchronously with the given function ID and input.
|
|
54
59
|
|
|
@@ -62,6 +67,8 @@ def query(
|
|
|
62
67
|
The text input to the function.
|
|
63
68
|
images_input : List[str], optional
|
|
64
69
|
A list of image URLs or base64 encoded images to be used as input to the function.
|
|
70
|
+
return_reasoning : bool, optional
|
|
71
|
+
A flag to indicate if the reasoning should be returned. Default is False.
|
|
65
72
|
api_key : str
|
|
66
73
|
The API key for the WecoAI service. If not provided, the API key must be set using the environment variable - WECO_API_KEY.
|
|
67
74
|
|
|
@@ -72,12 +79,17 @@ def query(
|
|
|
72
79
|
and the latency in milliseconds.
|
|
73
80
|
"""
|
|
74
81
|
client = WecoAI(api_key=api_key)
|
|
75
|
-
response = client.query(fn_name=fn_name, version_number=version_number, text_input=text_input, images_input=images_input)
|
|
82
|
+
response = client.query(fn_name=fn_name, version_number=version_number, text_input=text_input, images_input=images_input, return_reasoning=return_reasoning)
|
|
76
83
|
return response
|
|
77
84
|
|
|
78
85
|
|
|
79
86
|
async def aquery(
|
|
80
|
-
fn_name: str,
|
|
87
|
+
fn_name: str,
|
|
88
|
+
version_number: Optional[int] = -1,
|
|
89
|
+
text_input: Optional[str] = "",
|
|
90
|
+
images_input: Optional[List[str]] = [],
|
|
91
|
+
return_reasoning: Optional[bool] = False,
|
|
92
|
+
api_key: Optional[str] = None,
|
|
81
93
|
) -> Dict[str, Any]:
|
|
82
94
|
"""Queries a function asynchronously with the given function ID and input.
|
|
83
95
|
|
|
@@ -91,6 +103,8 @@ async def aquery(
|
|
|
91
103
|
The text input to the function.
|
|
92
104
|
images_input : List[str], optional
|
|
93
105
|
A list of image URLs to be used as input to the function.
|
|
106
|
+
return_reasoning : bool, optional
|
|
107
|
+
A flag to indicate if the reasoning should be returned. Default is False.
|
|
94
108
|
api_key : str
|
|
95
109
|
The API key for the WecoAI service. If not provided, the API key must be set using the environment variable - WECO_API_KEY.
|
|
96
110
|
|
|
@@ -101,12 +115,14 @@ async def aquery(
|
|
|
101
115
|
and the latency in milliseconds.
|
|
102
116
|
"""
|
|
103
117
|
client = WecoAI(api_key=api_key)
|
|
104
|
-
response = await client.aquery(
|
|
118
|
+
response = await client.aquery(
|
|
119
|
+
fn_name=fn_name, version_number=version_number, text_input=text_input, images_input=images_input, return_reasoning=return_reasoning
|
|
120
|
+
)
|
|
105
121
|
return response
|
|
106
122
|
|
|
107
123
|
|
|
108
124
|
def batch_query(
|
|
109
|
-
fn_name: str, batch_inputs: List[Dict[str, Any]], version_number: Optional[int] = -1, api_key: Optional[str] = None
|
|
125
|
+
fn_name: str, batch_inputs: List[Dict[str, Any]], version_number: Optional[int] = -1, return_reasoning: Optional[bool] = False, api_key: Optional[str] = None
|
|
110
126
|
) -> List[Dict[str, Any]]:
|
|
111
127
|
"""Synchronously queries multiple functions using asynchronous calls internally.
|
|
112
128
|
|
|
@@ -119,15 +135,14 @@ def batch_query(
|
|
|
119
135
|
The name of the function or a list of function names to query.
|
|
120
136
|
Note that if a single function name is provided, it will be used for all queries.
|
|
121
137
|
If a list of function names is provided, the length must match the number of queries.
|
|
122
|
-
|
|
123
138
|
batch_inputs : List[str]
|
|
124
139
|
A list of inputs for the functions to query. The input must be a dictionary containing the data to be processed. e.g.,
|
|
125
140
|
when providing for a text input, the dictionary should be {"text_input": "input text"}, for an image input, the dictionary should be {"images_input": ["url1", "url2", ...]}
|
|
126
141
|
and for a combination of text and image inputs, the dictionary should be {"text_input": "input text", "images_input": ["url1", "url2", ...]}.
|
|
127
|
-
|
|
128
142
|
version_number : int, optional
|
|
129
143
|
The version number of the function to query. If not provided, the latest version is used. Default is -1 for the same behavior.
|
|
130
|
-
|
|
144
|
+
return_reasoning : bool, optional
|
|
145
|
+
A flag to indicate if the reasoning should be returned. Default is False.
|
|
131
146
|
api_key : str, optional
|
|
132
147
|
The API key for the WecoAI service. If not provided, the API key must be set using the environment variable - WECO_API_KEY.
|
|
133
148
|
|
|
@@ -138,5 +153,5 @@ def batch_query(
|
|
|
138
153
|
in the same order as the input queries.
|
|
139
154
|
"""
|
|
140
155
|
client = WecoAI(api_key=api_key)
|
|
141
|
-
responses = client.batch_query(fn_name=fn_name, version_number=version_number, batch_inputs=batch_inputs)
|
|
156
|
+
responses = client.batch_query(fn_name=fn_name, version_number=version_number, batch_inputs=batch_inputs, return_reasoning=return_reasoning)
|
|
142
157
|
return responses
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: weco
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.8
|
|
4
4
|
Summary: A client facing API for interacting with the WeCo AI function builder service.
|
|
5
5
|
Author-email: WeCo AI Team <dhruv@weco.ai>
|
|
6
6
|
License: MIT
|
|
@@ -52,12 +52,16 @@ pip install weco
|
|
|
52
52
|
```
|
|
53
53
|
|
|
54
54
|
## Features
|
|
55
|
+
- Synchronous & Asynchronous client.
|
|
56
|
+
- Batch API
|
|
57
|
+
- Multimodality (Language & Vision)
|
|
58
|
+
- Interpretability (view the reasoning behind outputs)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
## What We Offer
|
|
55
62
|
|
|
56
63
|
- The **build** function enables quick and easy prototyping of new functions via LLMs through just natural language. We encourage users to do this through our [web console](https://weco-app.vercel.app/function) for maximum control and ease of use, however, you can also do this through our API as shown in [here](examples/cookbook.ipynb).
|
|
57
64
|
- The **query** function allows you to test and use the newly created function in your own code.
|
|
58
|
-
- We offer asynchronous versions of the above clients.
|
|
59
|
-
- We provide a **batch_query** functions that allows users to batch functions for various inputs as well as multiple inputs for the same function in a query. This is helpful to make a large number of queries more efficiently.
|
|
60
|
-
- We also offer multimodality capabilities. You can now query our client with both **language** AND **vision** inputs!
|
|
61
65
|
|
|
62
66
|
We provide both services in two ways:
|
|
63
67
|
- `weco.WecoAI` client to be used when you want to maintain the same client service across a portion of code. This is better for dense service usage.
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
weco/__init__.py,sha256=qiKpnrm6t0n0bpAtXEKJO1Yz2xYXnJJRZBWt-cH7DdU,168
|
|
2
|
+
weco/client.py,sha256=l0H62JxDsfiF4vW2v5GEUhf6IEWX3m9_oFy7A-Wf_wI,22447
|
|
3
|
+
weco/constants.py,sha256=eoAq-9qN2aZrqyIWdrb3V1zomV5kp80PfxxoPoQNMNI,167
|
|
4
|
+
weco/functional.py,sha256=hFUFCWIlWDadaPAFpjl0cLF0ExysTYuld8Uwza_hyqM,6853
|
|
5
|
+
weco/utils.py,sha256=UUSw6ocqWdlSmIXVcH66DAL4NuLU2rFOyviD8aTWsv0,4371
|
|
6
|
+
weco-0.1.8.dist-info/LICENSE,sha256=NvpxfBuSajszAczWBGKxhHe4gsvil1H63zmu8xXZdL0,1064
|
|
7
|
+
weco-0.1.8.dist-info/METADATA,sha256=olHH_wAex7_djdDwleCGCoQnwXTAjTwOSLHo0SWG1JU,5716
|
|
8
|
+
weco-0.1.8.dist-info/WHEEL,sha256=Mdi9PDNwEZptOjTlUcAth7XJDFtKrHYaQMPulZeBCiQ,91
|
|
9
|
+
weco-0.1.8.dist-info/top_level.txt,sha256=F0N7v6e2zBSlsorFv-arAq2yDxQbzX3KVO8GxYhPUeE,5
|
|
10
|
+
weco-0.1.8.dist-info/RECORD,,
|
weco-0.1.6.dist-info/RECORD
DELETED
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
weco/__init__.py,sha256=qiKpnrm6t0n0bpAtXEKJO1Yz2xYXnJJRZBWt-cH7DdU,168
|
|
2
|
-
weco/client.py,sha256=JNpIJz-_YtWqaZHJUldK3tFI1u7wcuU65xxyRUJOdog,21322
|
|
3
|
-
weco/constants.py,sha256=eoAq-9qN2aZrqyIWdrb3V1zomV5kp80PfxxoPoQNMNI,167
|
|
4
|
-
weco/functional.py,sha256=gckeXFVouy4wLHj0uLwxQkRNUj0urldk1f5ZDZk4yhY,6209
|
|
5
|
-
weco/utils.py,sha256=UUSw6ocqWdlSmIXVcH66DAL4NuLU2rFOyviD8aTWsv0,4371
|
|
6
|
-
weco-0.1.6.dist-info/LICENSE,sha256=NvpxfBuSajszAczWBGKxhHe4gsvil1H63zmu8xXZdL0,1064
|
|
7
|
-
weco-0.1.6.dist-info/METADATA,sha256=CLrbjmTuQJHzW0SUIBkah1rdU7P3Ra9PHFuf_eYd_M0,5957
|
|
8
|
-
weco-0.1.6.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
|
|
9
|
-
weco-0.1.6.dist-info/top_level.txt,sha256=F0N7v6e2zBSlsorFv-arAq2yDxQbzX3KVO8GxYhPUeE,5
|
|
10
|
-
weco-0.1.6.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|