isaacus 0.8.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. isaacus/__init__.py +3 -1
  2. isaacus/_base_client.py +40 -12
  3. isaacus/_client.py +17 -9
  4. isaacus/_compat.py +48 -48
  5. isaacus/_files.py +4 -4
  6. isaacus/_models.py +51 -45
  7. isaacus/_qs.py +7 -7
  8. isaacus/_types.py +53 -12
  9. isaacus/_utils/__init__.py +9 -2
  10. isaacus/_utils/_compat.py +45 -0
  11. isaacus/_utils/_datetime_parse.py +136 -0
  12. isaacus/_utils/_transform.py +13 -3
  13. isaacus/_utils/_typing.py +6 -1
  14. isaacus/_utils/_utils.py +4 -5
  15. isaacus/_version.py +1 -1
  16. isaacus/resources/__init__.py +14 -0
  17. isaacus/resources/classifications/universal.py +17 -17
  18. isaacus/resources/embeddings.py +246 -0
  19. isaacus/resources/extractions/qa.py +23 -21
  20. isaacus/resources/rerankings.py +19 -19
  21. isaacus/types/__init__.py +3 -1
  22. isaacus/types/classifications/__init__.py +1 -1
  23. isaacus/types/classifications/{universal_classification.py → universal_classification_response.py} +2 -2
  24. isaacus/types/classifications/universal_create_params.py +4 -2
  25. isaacus/types/embedding_create_params.py +49 -0
  26. isaacus/types/embedding_response.py +31 -0
  27. isaacus/types/extractions/__init__.py +1 -1
  28. isaacus/types/extractions/{answer_extraction.py → answer_extraction_response.py} +2 -2
  29. isaacus/types/extractions/qa_create_params.py +7 -4
  30. isaacus/types/reranking_create_params.py +4 -2
  31. isaacus/types/{reranking.py → reranking_response.py} +2 -2
  32. {isaacus-0.8.0.dist-info → isaacus-0.9.0.dist-info}/METADATA +53 -40
  33. isaacus-0.9.0.dist-info/RECORD +52 -0
  34. isaacus-0.8.0.dist-info/RECORD +0 -47
  35. {isaacus-0.8.0.dist-info → isaacus-0.9.0.dist-info}/WHEEL +0 -0
  36. {isaacus-0.8.0.dist-info → isaacus-0.9.0.dist-info}/licenses/LICENSE +0 -0
@@ -2,13 +2,13 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from typing import List, Optional
5
+ from typing import Optional
6
6
  from typing_extensions import Literal
7
7
 
8
8
  import httpx
9
9
 
10
10
  from ..types import reranking_create_params
11
- from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
11
+ from .._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given
12
12
  from .._utils import maybe_transform, async_maybe_transform
13
13
  from .._compat import cached_property
14
14
  from .._resource import SyncAPIResource, AsyncAPIResource
@@ -19,7 +19,7 @@ from .._response import (
19
19
  async_to_streamed_response_wrapper,
20
20
  )
21
21
  from .._base_client import make_request_options
22
- from ..types.reranking import Reranking
22
+ from ..types.reranking_response import RerankingResponse
23
23
 
24
24
  __all__ = ["RerankingsResource", "AsyncRerankingsResource"]
25
25
 
@@ -49,18 +49,18 @@ class RerankingsResource(SyncAPIResource):
49
49
  *,
50
50
  model: Literal["kanon-universal-classifier", "kanon-universal-classifier-mini"],
51
51
  query: str,
52
- texts: List[str],
53
- chunking_options: Optional[reranking_create_params.ChunkingOptions] | NotGiven = NOT_GIVEN,
54
- is_iql: bool | NotGiven = NOT_GIVEN,
55
- scoring_method: Literal["auto", "chunk_max", "chunk_avg", "chunk_min"] | NotGiven = NOT_GIVEN,
56
- top_n: Optional[int] | NotGiven = NOT_GIVEN,
52
+ texts: SequenceNotStr[str],
53
+ chunking_options: Optional[reranking_create_params.ChunkingOptions] | Omit = omit,
54
+ is_iql: bool | Omit = omit,
55
+ scoring_method: Literal["auto", "chunk_max", "chunk_avg", "chunk_min"] | Omit = omit,
56
+ top_n: Optional[int] | Omit = omit,
57
57
  # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
58
58
  # The extra values given here take precedence over values defined on the client or passed to this method.
59
59
  extra_headers: Headers | None = None,
60
60
  extra_query: Query | None = None,
61
61
  extra_body: Body | None = None,
62
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
63
- ) -> Reranking:
62
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
63
+ ) -> RerankingResponse:
64
64
  """
65
65
  Rerank legal documents by their relevance to a query with an Isaacus legal AI
66
66
  reranker.
@@ -131,7 +131,7 @@ class RerankingsResource(SyncAPIResource):
131
131
  options=make_request_options(
132
132
  extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
133
133
  ),
134
- cast_to=Reranking,
134
+ cast_to=RerankingResponse,
135
135
  )
136
136
 
137
137
 
@@ -160,18 +160,18 @@ class AsyncRerankingsResource(AsyncAPIResource):
160
160
  *,
161
161
  model: Literal["kanon-universal-classifier", "kanon-universal-classifier-mini"],
162
162
  query: str,
163
- texts: List[str],
164
- chunking_options: Optional[reranking_create_params.ChunkingOptions] | NotGiven = NOT_GIVEN,
165
- is_iql: bool | NotGiven = NOT_GIVEN,
166
- scoring_method: Literal["auto", "chunk_max", "chunk_avg", "chunk_min"] | NotGiven = NOT_GIVEN,
167
- top_n: Optional[int] | NotGiven = NOT_GIVEN,
163
+ texts: SequenceNotStr[str],
164
+ chunking_options: Optional[reranking_create_params.ChunkingOptions] | Omit = omit,
165
+ is_iql: bool | Omit = omit,
166
+ scoring_method: Literal["auto", "chunk_max", "chunk_avg", "chunk_min"] | Omit = omit,
167
+ top_n: Optional[int] | Omit = omit,
168
168
  # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
169
169
  # The extra values given here take precedence over values defined on the client or passed to this method.
170
170
  extra_headers: Headers | None = None,
171
171
  extra_query: Query | None = None,
172
172
  extra_body: Body | None = None,
173
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
174
- ) -> Reranking:
173
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
174
+ ) -> RerankingResponse:
175
175
  """
176
176
  Rerank legal documents by their relevance to a query with an Isaacus legal AI
177
177
  reranker.
@@ -242,7 +242,7 @@ class AsyncRerankingsResource(AsyncAPIResource):
242
242
  options=make_request_options(
243
243
  extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
244
244
  ),
245
- cast_to=Reranking,
245
+ cast_to=RerankingResponse,
246
246
  )
247
247
 
248
248
 
isaacus/types/__init__.py CHANGED
@@ -2,5 +2,7 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from .reranking import Reranking as Reranking
5
+ from .embedding_response import EmbeddingResponse as EmbeddingResponse
6
+ from .reranking_response import RerankingResponse as RerankingResponse
7
+ from .embedding_create_params import EmbeddingCreateParams as EmbeddingCreateParams
6
8
  from .reranking_create_params import RerankingCreateParams as RerankingCreateParams
@@ -3,4 +3,4 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  from .universal_create_params import UniversalCreateParams as UniversalCreateParams
6
- from .universal_classification import UniversalClassification as UniversalClassification
6
+ from .universal_classification_response import UniversalClassificationResponse as UniversalClassificationResponse
@@ -4,7 +4,7 @@ from typing import List, Optional
4
4
 
5
5
  from ..._models import BaseModel
6
6
 
7
- __all__ = ["UniversalClassification", "Classification", "ClassificationChunk", "Usage"]
7
+ __all__ = ["UniversalClassificationResponse", "Classification", "ClassificationChunk", "Usage"]
8
8
 
9
9
 
10
10
  class ClassificationChunk(BaseModel):
@@ -72,7 +72,7 @@ class Usage(BaseModel):
72
72
  """The number of tokens inputted to the model."""
73
73
 
74
74
 
75
- class UniversalClassification(BaseModel):
75
+ class UniversalClassificationResponse(BaseModel):
76
76
  classifications: List[Classification]
77
77
  """
78
78
  The classifications of the texts, by relevance to the query, in order from
@@ -2,9 +2,11 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from typing import List, Optional
5
+ from typing import Optional
6
6
  from typing_extensions import Literal, Required, TypedDict
7
7
 
8
+ from ..._types import SequenceNotStr
9
+
8
10
  __all__ = ["UniversalCreateParams", "ChunkingOptions"]
9
11
 
10
12
 
@@ -26,7 +28,7 @@ class UniversalCreateParams(TypedDict, total=False):
26
28
  the maximum input length of the universal classifier.
27
29
  """
28
30
 
29
- texts: Required[List[str]]
31
+ texts: Required[SequenceNotStr[str]]
30
32
  """The texts to classify.
31
33
 
32
34
  Each text must contain at least one non-whitespace character.
@@ -0,0 +1,49 @@
1
+ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import Union, Optional
6
+ from typing_extensions import Literal, Required, TypedDict
7
+
8
+ from .._types import SequenceNotStr
9
+
10
+ __all__ = ["EmbeddingCreateParams"]
11
+
12
+
13
+ class EmbeddingCreateParams(TypedDict, total=False):
14
+ model: Required[Literal["kanon-2-embedder"]]
15
+ """
16
+ The ID of the [model](https://docs.isaacus.com/models#embedding) to use for
17
+ embedding.
18
+ """
19
+
20
+ texts: Required[Union[SequenceNotStr[str], str]]
21
+ """The text or array of texts to embed.
22
+
23
+ Each text must contain at least one non-whitespace character.
24
+
25
+ No more than 128 texts can be embedded in a single request.
26
+ """
27
+
28
+ dimensions: Optional[int]
29
+ """A whole number greater than or equal to 1."""
30
+
31
+ overflow_strategy: Optional[Literal["drop_end"]]
32
+ """The strategy to employ when content exceeds the model's maximum input length.
33
+
34
+ `drop_end`, which is the default setting, drops tokens from the end of the
35
+ content exceeding the limit.
36
+
37
+ If `null`, an error will be raised if any content exceeds the model's maximum
38
+ input length.
39
+ """
40
+
41
+ task: Optional[Literal["retrieval/query", "retrieval/document"]]
42
+ """The task the embeddings will be used for.
43
+
44
+ `retrieval/query` is meant for queries and statements, and `retrieval/document`
45
+ is meant for anything to be retrieved using query embeddings.
46
+
47
+ If `null`, which is the default setting, embeddings will not be optimized for
48
+ any particular task.
49
+ """
@@ -0,0 +1,31 @@
1
+ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
+
3
+ from typing import List
4
+
5
+ from .._models import BaseModel
6
+
7
+ __all__ = ["EmbeddingResponse", "Embedding", "Usage"]
8
+
9
+
10
+ class Embedding(BaseModel):
11
+ embedding: List[float]
12
+ """The embedding of the content represented as an array of floating point numbers."""
13
+
14
+ index: int
15
+ """
16
+ The position of the content in the input array of contents, starting from `0`
17
+ (and, therefore, ending at the number of contents minus `1`).
18
+ """
19
+
20
+
21
+ class Usage(BaseModel):
22
+ input_tokens: int
23
+ """The number of tokens inputted to the model."""
24
+
25
+
26
+ class EmbeddingResponse(BaseModel):
27
+ embeddings: List[Embedding]
28
+ """The embeddings of the inputs."""
29
+
30
+ usage: Usage
31
+ """Statistics about the usage of resources in the process of embedding the inputs."""
@@ -3,4 +3,4 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  from .qa_create_params import QaCreateParams as QaCreateParams
6
- from .answer_extraction import AnswerExtraction as AnswerExtraction
6
+ from .answer_extraction_response import AnswerExtractionResponse as AnswerExtractionResponse
@@ -4,7 +4,7 @@ from typing import List
4
4
 
5
5
  from ..._models import BaseModel
6
6
 
7
- __all__ = ["AnswerExtraction", "Extraction", "ExtractionAnswer", "Usage"]
7
+ __all__ = ["AnswerExtractionResponse", "Extraction", "ExtractionAnswer", "Usage"]
8
8
 
9
9
 
10
10
  class ExtractionAnswer(BaseModel):
@@ -57,7 +57,7 @@ class Usage(BaseModel):
57
57
  """The number of tokens inputted to the model."""
58
58
 
59
59
 
60
- class AnswerExtraction(BaseModel):
60
+ class AnswerExtractionResponse(BaseModel):
61
61
  extractions: List[Extraction]
62
62
  """
63
63
  The results of extracting answers from the texts, ordered from highest to lowest
@@ -2,17 +2,20 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from typing import List, Optional
5
+ from typing import Optional
6
6
  from typing_extensions import Literal, Required, TypedDict
7
7
 
8
+ from ..._types import SequenceNotStr
9
+
8
10
  __all__ = ["QaCreateParams", "ChunkingOptions"]
9
11
 
10
12
 
11
13
  class QaCreateParams(TypedDict, total=False):
12
14
  model: Required[Literal["kanon-answer-extractor", "kanon-answer-extractor-mini"]]
13
15
  """
14
- The ID of the [model](https://docs.isaacus.com/models#extractive-qa) to use for
15
- extractive question answering.
16
+ The ID of the
17
+ [model](https://docs.isaacus.com/models#extractive-question-answering) to use
18
+ for extractive question answering.
16
19
  """
17
20
 
18
21
  query: Required[str]
@@ -24,7 +27,7 @@ class QaCreateParams(TypedDict, total=False):
24
27
  long that it exceeds the maximum input length of the model.
25
28
  """
26
29
 
27
- texts: Required[List[str]]
30
+ texts: Required[SequenceNotStr[str]]
28
31
  """The texts to search for the answer in and extract the answer from.
29
32
 
30
33
  There must be at least one text.
@@ -2,9 +2,11 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from typing import List, Optional
5
+ from typing import Optional
6
6
  from typing_extensions import Literal, Required, TypedDict
7
7
 
8
+ from .._types import SequenceNotStr
9
+
8
10
  __all__ = ["RerankingCreateParams", "ChunkingOptions"]
9
11
 
10
12
 
@@ -24,7 +26,7 @@ class RerankingCreateParams(TypedDict, total=False):
24
26
  maximum input length of the reranker.
25
27
  """
26
28
 
27
- texts: Required[List[str]]
29
+ texts: Required[SequenceNotStr[str]]
28
30
  """The texts to rerank.
29
31
 
30
32
  There must be at least one text.
@@ -4,7 +4,7 @@ from typing import List
4
4
 
5
5
  from .._models import BaseModel
6
6
 
7
- __all__ = ["Reranking", "Result", "Usage"]
7
+ __all__ = ["RerankingResponse", "Result", "Usage"]
8
8
 
9
9
 
10
10
  class Result(BaseModel):
@@ -26,7 +26,7 @@ class Usage(BaseModel):
26
26
  """The number of tokens inputted to the model."""
27
27
 
28
28
 
29
- class Reranking(BaseModel):
29
+ class RerankingResponse(BaseModel):
30
30
  results: List[Result]
31
31
  """
32
32
  The rerankings of the texts, by relevance to the query, in order from highest to
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: isaacus
3
- Version: 0.8.0
3
+ Version: 0.9.0
4
4
  Summary: The official Python library for the isaacus API
5
5
  Project-URL: Homepage, https://github.com/isaacus-dev/isaacus-python
6
6
  Project-URL: Repository, https://github.com/isaacus-dev/isaacus-python
@@ -67,12 +67,14 @@ client = Isaacus(
67
67
  api_key=os.environ.get("ISAACUS_API_KEY"), # This is the default and can be omitted
68
68
  )
69
69
 
70
- universal_classification = client.classifications.universal.create(
71
- model="kanon-universal-classifier",
72
- query="This is a confidentiality clause.",
73
- texts=["I agree not to tell anyone about the document."],
70
+ embedding_response = client.embeddings.create(
71
+ model="kanon-2-embedder",
72
+ texts=[
73
+ "Are restraints of trade enforceable under English law?",
74
+ "What is a non-compete clause?",
75
+ ],
74
76
  )
75
- print(universal_classification.classifications)
77
+ print(embedding_response.embeddings)
76
78
  ```
77
79
 
78
80
  While you can provide an `api_key` keyword argument,
@@ -95,12 +97,14 @@ client = AsyncIsaacus(
95
97
 
96
98
 
97
99
  async def main() -> None:
98
- universal_classification = await client.classifications.universal.create(
99
- model="kanon-universal-classifier",
100
- query="This is a confidentiality clause.",
101
- texts=["I agree not to tell anyone about the document."],
100
+ embedding_response = await client.embeddings.create(
101
+ model="kanon-2-embedder",
102
+ texts=[
103
+ "Are restraints of trade enforceable under English law?",
104
+ "What is a non-compete clause?",
105
+ ],
102
106
  )
103
- print(universal_classification.classifications)
107
+ print(embedding_response.embeddings)
104
108
 
105
109
 
106
110
  asyncio.run(main())
@@ -132,12 +136,14 @@ async def main() -> None:
132
136
  api_key="My API Key",
133
137
  http_client=DefaultAioHttpClient(),
134
138
  ) as client:
135
- universal_classification = await client.classifications.universal.create(
136
- model="kanon-universal-classifier",
137
- query="This is a confidentiality clause.",
138
- texts=["I agree not to tell anyone about the document."],
139
+ embedding_response = await client.embeddings.create(
140
+ model="kanon-2-embedder",
141
+ texts=[
142
+ "Are restraints of trade enforceable under English law?",
143
+ "What is a non-compete clause?",
144
+ ],
139
145
  )
140
- print(universal_classification.classifications)
146
+ print(embedding_response.embeddings)
141
147
 
142
148
 
143
149
  asyncio.run(main())
@@ -161,7 +167,7 @@ from isaacus import Isaacus
161
167
 
162
168
  client = Isaacus()
163
169
 
164
- universal_classification = client.classifications.universal.create(
170
+ universal_classification_response = client.classifications.universal.create(
165
171
  model="kanon-universal-classifier",
166
172
  query="This is a confidentiality clause.",
167
173
  texts=["I agree not to tell anyone about the document."],
@@ -171,7 +177,7 @@ universal_classification = client.classifications.universal.create(
171
177
  "size": 512,
172
178
  },
173
179
  )
174
- print(universal_classification.classifications)
180
+ print(universal_classification_response.classifications)
175
181
  ```
176
182
 
177
183
  ## Handling errors
@@ -190,10 +196,12 @@ from isaacus import Isaacus
190
196
  client = Isaacus()
191
197
 
192
198
  try:
193
- client.classifications.universal.create(
194
- model="kanon-universal-classifier",
195
- query="This is a confidentiality clause.",
196
- texts=["I agree not to tell anyone about the document."],
199
+ client.embeddings.create(
200
+ model="kanon-2-embedder",
201
+ texts=[
202
+ "Are restraints of trade enforceable under English law?",
203
+ "What is a non-compete clause?",
204
+ ],
197
205
  )
198
206
  except isaacus.APIConnectionError as e:
199
207
  print("The server could not be reached")
@@ -237,10 +245,12 @@ client = Isaacus(
237
245
  )
238
246
 
239
247
  # Or, configure per-request:
240
- client.with_options(max_retries=5).classifications.universal.create(
241
- model="kanon-universal-classifier",
242
- query="This is a confidentiality clause.",
243
- texts=["I agree not to tell anyone about the document."],
248
+ client.with_options(max_retries=5).embeddings.create(
249
+ model="kanon-2-embedder",
250
+ texts=[
251
+ "Are restraints of trade enforceable under English law?",
252
+ "What is a non-compete clause?",
253
+ ],
244
254
  )
245
255
  ```
246
256
 
@@ -264,10 +274,12 @@ client = Isaacus(
264
274
  )
265
275
 
266
276
  # Override per-request:
267
- client.with_options(timeout=5.0).classifications.universal.create(
268
- model="kanon-universal-classifier",
269
- query="This is a confidentiality clause.",
270
- texts=["I agree not to tell anyone about the document."],
277
+ client.with_options(timeout=5.0).embeddings.create(
278
+ model="kanon-2-embedder",
279
+ texts=[
280
+ "Are restraints of trade enforceable under English law?",
281
+ "What is a non-compete clause?",
282
+ ],
271
283
  )
272
284
  ```
273
285
 
@@ -309,15 +321,14 @@ The "raw" Response object can be accessed by prefixing `.with_raw_response.` to
309
321
  from isaacus import Isaacus
310
322
 
311
323
  client = Isaacus()
312
- response = client.classifications.universal.with_raw_response.create(
313
- model="kanon-universal-classifier",
314
- query="This is a confidentiality clause.",
315
- texts=["I agree not to tell anyone about the document."],
324
+ response = client.embeddings.with_raw_response.create(
325
+ model="kanon-2-embedder",
326
+ texts=["Are restraints of trade enforceable under English law?", "What is a non-compete clause?"],
316
327
  )
317
328
  print(response.headers.get('X-My-Header'))
318
329
 
319
- universal = response.parse() # get the object that `classifications.universal.create()` would have returned
320
- print(universal.classifications)
330
+ embedding = response.parse() # get the object that `embeddings.create()` would have returned
331
+ print(embedding.embeddings)
321
332
  ```
322
333
 
323
334
  These methods return an [`APIResponse`](https://github.com/isaacus-dev/isaacus-python/tree/main/src/isaacus/_response.py) object.
@@ -331,10 +342,12 @@ The above interface eagerly reads the full response body when you make the reque
331
342
  To stream the response body, use `.with_streaming_response` instead, which requires a context manager and only reads the response body once you call `.read()`, `.text()`, `.json()`, `.iter_bytes()`, `.iter_text()`, `.iter_lines()` or `.parse()`. In the async client, these are async methods.
332
343
 
333
344
  ```python
334
- with client.classifications.universal.with_streaming_response.create(
335
- model="kanon-universal-classifier",
336
- query="This is a confidentiality clause.",
337
- texts=["I agree not to tell anyone about the document."],
345
+ with client.embeddings.with_streaming_response.create(
346
+ model="kanon-2-embedder",
347
+ texts=[
348
+ "Are restraints of trade enforceable under English law?",
349
+ "What is a non-compete clause?",
350
+ ],
338
351
  ) as response:
339
352
  print(response.headers.get("X-My-Header"))
340
353
 
@@ -0,0 +1,52 @@
1
+ isaacus/__init__.py,sha256=wtI0vXNsVgND6Lmq0G6l2iQALnXyM7HqyL9C6gOiaFE,2633
2
+ isaacus/_base_client.py,sha256=Az9XYe7zI4rpf8OKfALV4Yd1yFTPbx7jm_Fv-me8_w4,67048
3
+ isaacus/_client.py,sha256=EOZacvJHWvcvEyvpjPGFiwr3egLvFQEwWDiVnJY_veA,17149
4
+ isaacus/_compat.py,sha256=DQBVORjFb33zch24jzkhM14msvnzY7mmSmgDLaVFUM8,6562
5
+ isaacus/_constants.py,sha256=S14PFzyN9-I31wiV7SmIlL5Ga0MLHxdvegInGdXH7tM,462
6
+ isaacus/_exceptions.py,sha256=L82uluhizzc94VydHIaJkNxkcG-2DAe74tNhrE2eN2A,3222
7
+ isaacus/_files.py,sha256=KnEzGi_O756MvKyJ4fOCW_u3JhOeWPQ4RsmDvqihDQU,3545
8
+ isaacus/_models.py,sha256=lKnskYPONAWDvWo8tmbbVk7HmG7UOsI0Nve0vSMmkRc,30452
9
+ isaacus/_qs.py,sha256=craIKyvPktJ94cvf9zn8j8ekG9dWJzhWv0ob34lIOv4,4828
10
+ isaacus/_resource.py,sha256=iP_oYhz5enCI58mK7hlwLoPMPh4Q5s8-KBv-jGfv2aM,1106
11
+ isaacus/_response.py,sha256=aXLF5ia58bjjQXTxY574lh7JfKXiGL2tDTX09klm8lw,28794
12
+ isaacus/_streaming.py,sha256=tMBfwrfEFWm0v7vWFgjn_lizsoD70lPkYigIBuADaCM,10104
13
+ isaacus/_types.py,sha256=Bc2gbOMQA8Un1hMX-lkCISJ0_WFoL7iXMs9l9siVwEg,7237
14
+ isaacus/_version.py,sha256=ZkqD0WJK4erB_yaQT_LZ-xcMaownzF36jZcBFywvSn4,159
15
+ isaacus/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
+ isaacus/_utils/__init__.py,sha256=7fch0GT9zpNnErbciSpUNa-SjTxxjY6kxHxKMOM4AGs,2305
17
+ isaacus/_utils/_compat.py,sha256=D8gtAvjJQrDWt9upS0XaG9Rr5l1QhiAx_I_1utT_tt0,1195
18
+ isaacus/_utils/_datetime_parse.py,sha256=bABTs0Bc6rabdFvnIwXjEhWL15TcRgWZ_6XGTqN8xUk,4204
19
+ isaacus/_utils/_logs.py,sha256=rwa1Yzjbs2JaFn9KQ06rH5c_GSNa--BVwWnWhvvT1tY,777
20
+ isaacus/_utils/_proxy.py,sha256=aglnj2yBTDyGX9Akk2crZHrl10oqRmceUy2Zp008XEs,1975
21
+ isaacus/_utils/_reflection.py,sha256=ZmGkIgT_PuwedyNBrrKGbxoWtkpytJNU1uU4QHnmEMU,1364
22
+ isaacus/_utils/_resources_proxy.py,sha256=vW2q6wobLs4JH9DnlVsdaotKEzn5bWqqe8WhNTAOv_k,594
23
+ isaacus/_utils/_streams.py,sha256=SMC90diFFecpEg_zgDRVbdR3hSEIgVVij4taD-noMLM,289
24
+ isaacus/_utils/_sync.py,sha256=TpGLrrhRNWTJtODNE6Fup3_k7zrWm1j2RlirzBwre-0,2862
25
+ isaacus/_utils/_transform.py,sha256=NjCzmnfqYrsAikUHQig6N9QfuTVbKipuP3ur9mcNF-E,15951
26
+ isaacus/_utils/_typing.py,sha256=N_5PPuFNsaygbtA_npZd98SVN1LQQvFTKL6bkWPBZGU,4786
27
+ isaacus/_utils/_utils.py,sha256=0dDqauUbVZEXV0NVl7Bwu904Wwo5eyFCZpQThhFNhyA,12253
28
+ isaacus/lib/.keep,sha256=wuNrz-5SXo3jJaJOJgz4vFHM41YH_g20F5cRQo0vLes,224
29
+ isaacus/resources/__init__.py,sha256=hYuuyfGpHUmFIXOnO_qcjbgyyoaSm4yt6EhDL_NXUoU,2188
30
+ isaacus/resources/embeddings.py,sha256=Ol-K756e376CnzdoEBuuRRIIdbNpYUOLD0bUN7o6ffA,9470
31
+ isaacus/resources/rerankings.py,sha256=6mfn-T7BKBD0gJcjqOoPhUJsHr7nb2QW0ZYuRND5p8k,11193
32
+ isaacus/resources/classifications/__init__.py,sha256=tYSnDm-o0CVuTC95VoNJzOqHsb8jTzYmW8hdwW14K60,1158
33
+ isaacus/resources/classifications/classifications.py,sha256=Td5Gscg1PNJJeobxow_hJq_RicpFe3ibEYN0Gh3Kpsg,4018
34
+ isaacus/resources/classifications/universal.py,sha256=TuTkM2d0bC5A3Eo_1km06IISu2nctEGpWzKHKHnW2IE,10714
35
+ isaacus/resources/extractions/__init__.py,sha256=24ccXv3kRlfXwnZJ4572kWNjJKiJ0Cd5vWeRkKCuMyY,1015
36
+ isaacus/resources/extractions/extractions.py,sha256=RaUnv1OG4i5J3JhpBNfpnxELpSHvmkqZmS2_DVL9Wvw,3671
37
+ isaacus/resources/extractions/qa.py,sha256=5KgPXamGlkR4qTglw0LBsIiytxGwdALQ-RBV7hS5kGo,10029
38
+ isaacus/types/__init__.py,sha256=rJe7gBVr0PgUiBlmSw0LoKhUsBVGsj2NoSSMieJGEpM,433
39
+ isaacus/types/embedding_create_params.py,sha256=Vcqa_CSnj_rXl8CWyGzBtjJKin18dRmy-q8N0eNFYQ8,1575
40
+ isaacus/types/embedding_response.py,sha256=LMBDLmYDu8oMoFH-Ov1_N4LovyUPNOeDg8aOHmzjykw,846
41
+ isaacus/types/reranking_create_params.py,sha256=T6N23D1N6zE8rC_wMh1xylj3XSOSqd1-truPNrxZwlE,2522
42
+ isaacus/types/reranking_response.py,sha256=uZDUZrYYbgygj2GpyV2seUWaCiXAzCZBQxmLxV_hEoU,927
43
+ isaacus/types/classifications/__init__.py,sha256=5wz2ChA8Ld8Yfx-7z7PShbfeyvE3wXRfpkctjS27t10,321
44
+ isaacus/types/classifications/universal_classification_response.py,sha256=C-dPzlM4WAHO6ylPcUXZSY2rzjHkwnX77u4LQrHrbSY,2454
45
+ isaacus/types/classifications/universal_create_params.py,sha256=ygK0Ge6PH92c_SyZB94zNGrvwxt9lskza8UTO_RFoDc,2319
46
+ isaacus/types/extractions/__init__.py,sha256=em0yfSoMG1XqO_LfmqveKlKh03y5x6g7hbaUn7ck21c,279
47
+ isaacus/types/extractions/answer_extraction_response.py,sha256=2-EHPss-y5s519W3-PpWyJzjLjUl4OkUfr9teWIN02o,2168
48
+ isaacus/types/extractions/qa_create_params.py,sha256=2ethTpU4W2aOkDO6SZfd4TBkk4aVWeK38nrIzO3EXjg,2127
49
+ isaacus-0.9.0.dist-info/METADATA,sha256=FPzfRdjxMpqKuOuUi9uU9yI5UulsLYaiu49MJn5Ec98,15438
50
+ isaacus-0.9.0.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
51
+ isaacus-0.9.0.dist-info/licenses/LICENSE,sha256=lUen4LYVFVGEVXBsntBAPsQsOWgMkno1e9WfgWkpZ-k,11337
52
+ isaacus-0.9.0.dist-info/RECORD,,
@@ -1,47 +0,0 @@
1
- isaacus/__init__.py,sha256=UFaPvzUXNjeEhBaZBmS5pHsOT4jdgePx9tN-_-7rQvQ,2587
2
- isaacus/_base_client.py,sha256=d6I9vr1FKcXgokcfwYkXtgkHKrrsJmeKS14QJ6wOIDQ,66133
3
- isaacus/_client.py,sha256=-2sN8vyAm_qN2P6JuLxKel6BfvA6mragbo4TPOwLs3Y,16544
4
- isaacus/_compat.py,sha256=VWemUKbj6DDkQ-O4baSpHVLJafotzeXmCQGJugfVTIw,6580
5
- isaacus/_constants.py,sha256=S14PFzyN9-I31wiV7SmIlL5Ga0MLHxdvegInGdXH7tM,462
6
- isaacus/_exceptions.py,sha256=L82uluhizzc94VydHIaJkNxkcG-2DAe74tNhrE2eN2A,3222
7
- isaacus/_files.py,sha256=mf4dOgL4b0ryyZlbqLhggD3GVgDf6XxdGFAgce01ugE,3549
8
- isaacus/_models.py,sha256=KvjsMfb88XZlFUKVoOxr8OyDj47MhoH2OKqWNEbBhk4,30010
9
- isaacus/_qs.py,sha256=AOkSz4rHtK4YI3ZU_kzea-zpwBUgEY8WniGmTPyEimc,4846
10
- isaacus/_resource.py,sha256=iP_oYhz5enCI58mK7hlwLoPMPh4Q5s8-KBv-jGfv2aM,1106
11
- isaacus/_response.py,sha256=aXLF5ia58bjjQXTxY574lh7JfKXiGL2tDTX09klm8lw,28794
12
- isaacus/_streaming.py,sha256=tMBfwrfEFWm0v7vWFgjn_lizsoD70lPkYigIBuADaCM,10104
13
- isaacus/_types.py,sha256=OWOeOlYfnHj59_Lq34ua9FIuR_UTSHQzo4SxlaQBS9Y,6198
14
- isaacus/_version.py,sha256=zwbXJiibrQFE8HHRyah1KPLOSRL6I7y8cADvmmrXCHw,159
15
- isaacus/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
- isaacus/_utils/__init__.py,sha256=PNZ_QJuzZEgyYXqkO1HVhGkj5IU9bglVUcw7H-Knjzw,2062
17
- isaacus/_utils/_logs.py,sha256=rwa1Yzjbs2JaFn9KQ06rH5c_GSNa--BVwWnWhvvT1tY,777
18
- isaacus/_utils/_proxy.py,sha256=aglnj2yBTDyGX9Akk2crZHrl10oqRmceUy2Zp008XEs,1975
19
- isaacus/_utils/_reflection.py,sha256=ZmGkIgT_PuwedyNBrrKGbxoWtkpytJNU1uU4QHnmEMU,1364
20
- isaacus/_utils/_resources_proxy.py,sha256=vW2q6wobLs4JH9DnlVsdaotKEzn5bWqqe8WhNTAOv_k,594
21
- isaacus/_utils/_streams.py,sha256=SMC90diFFecpEg_zgDRVbdR3hSEIgVVij4taD-noMLM,289
22
- isaacus/_utils/_sync.py,sha256=TpGLrrhRNWTJtODNE6Fup3_k7zrWm1j2RlirzBwre-0,2862
23
- isaacus/_utils/_transform.py,sha256=n7kskEWz6o__aoNvhFoGVyDoalNe6mJwp-g7BWkdj88,15617
24
- isaacus/_utils/_typing.py,sha256=D0DbbNu8GnYQTSICnTSHDGsYXj8TcAKyhejb0XcnjtY,4602
25
- isaacus/_utils/_utils.py,sha256=ts4CiiuNpFiGB6YMdkQRh2SZvYvsl7mAF-JWHCcLDf4,12312
26
- isaacus/lib/.keep,sha256=wuNrz-5SXo3jJaJOJgz4vFHM41YH_g20F5cRQo0vLes,224
27
- isaacus/resources/__init__.py,sha256=BFfbYDoVs497KDWA4AOZbtK2Wv8zx9pip4GZQphfGSI,1686
28
- isaacus/resources/rerankings.py,sha256=V_6f8HIdW4N5GzA41U_fFxoD2pua8cndFg8et23rWsI,11174
29
- isaacus/resources/classifications/__init__.py,sha256=tYSnDm-o0CVuTC95VoNJzOqHsb8jTzYmW8hdwW14K60,1158
30
- isaacus/resources/classifications/classifications.py,sha256=Td5Gscg1PNJJeobxow_hJq_RicpFe3ibEYN0Gh3Kpsg,4018
31
- isaacus/resources/classifications/universal.py,sha256=AyG0_P3Af0rQQAfDv9oTHLGzjsvyIH5c7eWgg2UWxT8,10677
32
- isaacus/resources/extractions/__init__.py,sha256=24ccXv3kRlfXwnZJ4572kWNjJKiJ0Cd5vWeRkKCuMyY,1015
33
- isaacus/resources/extractions/extractions.py,sha256=RaUnv1OG4i5J3JhpBNfpnxELpSHvmkqZmS2_DVL9Wvw,3671
34
- isaacus/resources/extractions/qa.py,sha256=nPddCTtSxAyNVu4OCSjObeaGNWopm8_-eZuu345cCRo,9932
35
- isaacus/types/__init__.py,sha256=PswomxWpSak5TApm_28hneqI_l_g0-4NF5W6kdedI0Y,253
36
- isaacus/types/reranking.py,sha256=MQRUoH2UB185Q369H01jnvaT9pz4D9z0oOoz8oLMqjc,911
37
- isaacus/types/reranking_create_params.py,sha256=eoZcGr0_o8o-NBZ9W7yqW4cZH5dhxMmfIM9jFtIiUu0,2481
38
- isaacus/types/classifications/__init__.py,sha256=GX6WFRzjx9qcuJhdRZjFLJRYMM4d5J8F5N-BUq4ZgP0,296
39
- isaacus/types/classifications/universal_classification.py,sha256=qeAaPg26wXDq1R_93SZIDzTp9AkfLRMucpd6Tie7NNU,2438
40
- isaacus/types/classifications/universal_create_params.py,sha256=0RYreNEY9j5BqxJLMoDxIvibYZnCZcNTFN0zERz7kpg,2277
41
- isaacus/types/extractions/__init__.py,sha256=9FahSE48xCUSAdkb1DaqRBBznuQ805lmRdlY22iLtQw,254
42
- isaacus/types/extractions/answer_extraction.py,sha256=KIqfIsdlYAAjDDwzM8XuFOW_1u5zv34RWbIa3WchBxc,2152
43
- isaacus/types/extractions/qa_create_params.py,sha256=iaGIDnuJ8Kek5jW6QMQdbAQJYYiihcjZ1k_iSDbt1Mk,2065
44
- isaacus-0.8.0.dist-info/METADATA,sha256=iSs7rQtVwrt8TEiuZgMoeoGGsctLaus2Dytg9VYL0ww,15546
45
- isaacus-0.8.0.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
46
- isaacus-0.8.0.dist-info/licenses/LICENSE,sha256=lUen4LYVFVGEVXBsntBAPsQsOWgMkno1e9WfgWkpZ-k,11337
47
- isaacus-0.8.0.dist-info/RECORD,,