openaivec 0.99.2__py3-none-any.whl → 0.99.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. openaivec/__init__.py +2 -2
  2. openaivec/_cache/__init__.py +12 -0
  3. openaivec/{_proxy.py → _cache/proxy.py} +1 -1
  4. openaivec/_embeddings.py +1 -1
  5. openaivec/_prompt.py +1 -1
  6. openaivec/_responses.py +1 -1
  7. openaivec/_schema/__init__.py +9 -0
  8. openaivec/{_schema.py → _schema/infer.py} +6 -6
  9. openaivec/pandas_ext.py +12 -12
  10. openaivec/spark.py +4 -4
  11. openaivec/task/__init__.py +8 -6
  12. openaivec/task/customer_support/customer_sentiment.py +1 -1
  13. openaivec/task/customer_support/inquiry_classification.py +1 -1
  14. openaivec/task/customer_support/inquiry_summary.py +1 -1
  15. openaivec/task/customer_support/intent_analysis.py +1 -1
  16. openaivec/task/customer_support/response_suggestion.py +1 -1
  17. openaivec/task/customer_support/urgency_analysis.py +1 -1
  18. openaivec/task/nlp/dependency_parsing.py +1 -1
  19. openaivec/task/nlp/keyword_extraction.py +1 -1
  20. openaivec/task/nlp/morphological_analysis.py +1 -1
  21. openaivec/task/nlp/named_entity_recognition.py +1 -1
  22. openaivec/task/nlp/sentiment_analysis.py +1 -1
  23. openaivec/task/nlp/translation.py +1 -1
  24. openaivec/task/table/fillna.py +1 -1
  25. {openaivec-0.99.2.dist-info → openaivec-0.99.3.dist-info}/METADATA +3 -3
  26. openaivec-0.99.3.dist-info/RECORD +39 -0
  27. openaivec-0.99.2.dist-info/RECORD +0 -37
  28. /openaivec/{_optimize.py → _cache/optimize.py} +0 -0
  29. /openaivec/{_dynamic.py → _schema/spec.py} +0 -0
  30. {openaivec-0.99.2.dist-info → openaivec-0.99.3.dist-info}/WHEEL +0 -0
  31. {openaivec-0.99.2.dist-info → openaivec-0.99.3.dist-info}/licenses/LICENSE +0 -0
openaivec/__init__.py CHANGED
@@ -2,7 +2,7 @@ from ._embeddings import AsyncBatchEmbeddings, BatchEmbeddings
2
2
  from ._model import PreparedTask
3
3
  from ._prompt import FewShotPrompt, FewShotPromptBuilder
4
4
  from ._responses import AsyncBatchResponses, BatchResponses
5
- from ._schema import InferredSchema, SchemaInferenceInput, SchemaInferer
5
+ from ._schema import SchemaInferenceInput, SchemaInferenceOutput, SchemaInferer
6
6
 
7
7
  __all__ = [
8
8
  "AsyncBatchEmbeddings",
@@ -11,7 +11,7 @@ __all__ = [
11
11
  "BatchResponses",
12
12
  "FewShotPrompt",
13
13
  "FewShotPromptBuilder",
14
- "InferredSchema",
14
+ "SchemaInferenceOutput",
15
15
  "PreparedTask",
16
16
  "SchemaInferenceInput",
17
17
  "SchemaInferer",
@@ -0,0 +1,12 @@
1
+ """Caching utilities used across OpenAIVec."""
2
+
3
+ from .optimize import BatchSizeSuggester, PerformanceMetric
4
+ from .proxy import AsyncBatchingMapProxy, BatchingMapProxy, ProxyBase
5
+
6
+ __all__ = [
7
+ "AsyncBatchingMapProxy",
8
+ "BatchSizeSuggester",
9
+ "BatchingMapProxy",
10
+ "PerformanceMetric",
11
+ "ProxyBase",
12
+ ]
@@ -4,7 +4,7 @@ from collections.abc import Awaitable, Callable, Hashable
4
4
  from dataclasses import dataclass, field
5
5
  from typing import Any, Generic, TypeVar
6
6
 
7
- from openaivec._optimize import BatchSizeSuggester
7
+ from openaivec._cache import BatchSizeSuggester
8
8
 
9
9
  __all__ = []
10
10
 
openaivec/_embeddings.py CHANGED
@@ -5,8 +5,8 @@ import numpy as np
5
5
  from numpy.typing import NDArray
6
6
  from openai import AsyncOpenAI, InternalServerError, OpenAI, RateLimitError
7
7
 
8
+ from openaivec._cache import AsyncBatchingMapProxy, BatchingMapProxy
8
9
  from openaivec._log import observe
9
- from openaivec._proxy import AsyncBatchingMapProxy, BatchingMapProxy
10
10
  from openaivec._util import backoff, backoff_async
11
11
 
12
12
  __all__ = [
openaivec/_prompt.py CHANGED
@@ -6,7 +6,7 @@ construction of a prompt in a structured way, including setting the
6
6
  purpose, adding cautions, and providing examples.
7
7
 
8
8
  ```python
9
- from openaivec.prompt import FewShotPromptBuilder
9
+ from openaivec import FewShotPromptBuilder
10
10
 
11
11
  prompt_str: str = (
12
12
  FewShotPromptBuilder()
openaivec/_responses.py CHANGED
@@ -7,9 +7,9 @@ from openai import AsyncOpenAI, BadRequestError, InternalServerError, OpenAI, Ra
7
7
  from openai.types.responses import ParsedResponse
8
8
  from pydantic import BaseModel
9
9
 
10
+ from openaivec._cache import AsyncBatchingMapProxy, BatchingMapProxy
10
11
  from openaivec._log import observe
11
12
  from openaivec._model import PreparedTask, ResponseFormat
12
- from openaivec._proxy import AsyncBatchingMapProxy, BatchingMapProxy
13
13
  from openaivec._util import backoff, backoff_async
14
14
 
15
15
  __all__ = [
@@ -0,0 +1,9 @@
1
+ """Schema inference package.
2
+
3
+ Internal helpers now live in :mod:`openaivec._schema.infer`; this module simply
4
+ re-exports the main entry points so ``from openaivec._schema import ...`` still
5
+ behaves the same."""
6
+
7
+ from .infer import SchemaInferenceInput, SchemaInferenceOutput, SchemaInferer
8
+
9
+ __all__ = ["SchemaInferenceOutput", "SchemaInferenceInput", "SchemaInferer"]
@@ -61,14 +61,14 @@ from openai import OpenAI
61
61
  from openai.types.responses import ParsedResponse
62
62
  from pydantic import BaseModel, Field
63
63
 
64
- from openaivec._dynamic import ObjectSpec, _build_model
65
64
  from openaivec._model import PreparedTask
65
+ from openaivec._schema.spec import ObjectSpec, _build_model
66
66
 
67
67
  # Internal module: explicitly not part of public API
68
68
  __all__: list[str] = []
69
69
 
70
70
 
71
- class InferredSchema(BaseModel):
71
+ class SchemaInferenceOutput(BaseModel):
72
72
  """Result of a schema inference round.
73
73
 
74
74
  Contains the normalized *instructions*, objective *examples_summary*, the root
@@ -123,7 +123,7 @@ class InferredSchema(BaseModel):
123
123
  )
124
124
 
125
125
  @classmethod
126
- def load(cls, path: str) -> "InferredSchema":
126
+ def load(cls, path: str) -> "SchemaInferenceOutput":
127
127
  """Load an inferred schema from a JSON file.
128
128
 
129
129
  Args:
@@ -265,7 +265,7 @@ class SchemaInferer:
265
265
  client: OpenAI
266
266
  model_name: str
267
267
 
268
- def infer_schema(self, data: SchemaInferenceInput, *args, max_retries: int = 8, **kwargs) -> InferredSchema:
268
+ def infer_schema(self, data: SchemaInferenceInput, *args, max_retries: int = 8, **kwargs) -> SchemaInferenceOutput:
269
269
  """Infer a validated schema from representative examples.
270
270
 
271
271
  Workflow:
@@ -315,11 +315,11 @@ class SchemaInferer:
315
315
  )
316
316
  instructions = _INFER_INSTRUCTIONS + "\n\n" + "\n".join(feedback_lines)
317
317
 
318
- response: ParsedResponse[InferredSchema] = self.client.responses.parse(
318
+ response: ParsedResponse[SchemaInferenceOutput] = self.client.responses.parse(
319
319
  model=self.model_name,
320
320
  instructions=instructions,
321
321
  input=data.model_dump_json(),
322
- text_format=InferredSchema,
322
+ text_format=SchemaInferenceOutput,
323
323
  *args,
324
324
  **kwargs,
325
325
  )
openaivec/pandas_ext.py CHANGED
@@ -54,12 +54,12 @@ import tiktoken
54
54
  from openai import AsyncOpenAI, OpenAI
55
55
  from pydantic import BaseModel
56
56
 
57
+ from openaivec._cache import AsyncBatchingMapProxy, BatchingMapProxy
57
58
  from openaivec._embeddings import AsyncBatchEmbeddings, BatchEmbeddings
58
59
  from openaivec._model import EmbeddingsModelName, PreparedTask, ResponseFormat, ResponsesModelName
59
60
  from openaivec._provider import CONTAINER, _check_azure_v1_api_url
60
- from openaivec._proxy import AsyncBatchingMapProxy, BatchingMapProxy
61
61
  from openaivec._responses import AsyncBatchResponses, BatchResponses
62
- from openaivec._schema import InferredSchema, SchemaInferenceInput, SchemaInferer
62
+ from openaivec._schema import SchemaInferenceInput, SchemaInferenceOutput, SchemaInferer
63
63
  from openaivec.task.table import FillNaResponse, fillna
64
64
 
65
65
  __all__ = [
@@ -308,7 +308,7 @@ class OpenAIVecSeriesAccessor:
308
308
 
309
309
  Example:
310
310
  ```python
311
- from openaivec._proxy import BatchingMapProxy
311
+ from openaivec._cache import BatchingMapProxy
312
312
  import numpy as np
313
313
 
314
314
  # Create a shared cache with custom batch size
@@ -387,7 +387,7 @@ class OpenAIVecSeriesAccessor:
387
387
 
388
388
  Example:
389
389
  ```python
390
- from openaivec._proxy import BatchingMapProxy
390
+ from openaivec._cache import BatchingMapProxy
391
391
  shared_cache = BatchingMapProxy(batch_size=64)
392
392
  reviews.ai.task_with_cache(sentiment_task, cache=shared_cache)
393
393
  ```
@@ -503,7 +503,7 @@ class OpenAIVecSeriesAccessor:
503
503
  schema model, aligned with the original Series index.
504
504
  """
505
505
 
506
- schema: InferredSchema | None = None
506
+ schema: SchemaInferenceOutput | None = None
507
507
  if response_format is None:
508
508
  schema = self.infer_schema(instructions=instructions, max_examples=max_examples, **api_kwargs)
509
509
 
@@ -588,7 +588,7 @@ class OpenAIVecSeriesAccessor:
588
588
  **api_kwargs,
589
589
  )
590
590
 
591
- def infer_schema(self, instructions: str, max_examples: int = 100, **api_kwargs) -> InferredSchema:
591
+ def infer_schema(self, instructions: str, max_examples: int = 100, **api_kwargs) -> SchemaInferenceOutput:
592
592
  """Infer a structured data schema from Series content using AI.
593
593
 
594
594
  This method analyzes a sample of Series values to automatically generate
@@ -730,7 +730,7 @@ class OpenAIVecDataFrameAccessor:
730
730
 
731
731
  Example:
732
732
  ```python
733
- from openaivec._proxy import BatchingMapProxy
733
+ from openaivec._cache import BatchingMapProxy
734
734
 
735
735
  # Create a shared cache with custom batch size
736
736
  shared_cache = BatchingMapProxy(batch_size=64)
@@ -990,7 +990,7 @@ class OpenAIVecDataFrameAccessor:
990
990
  **api_kwargs,
991
991
  )
992
992
 
993
- def infer_schema(self, instructions: str, max_examples: int = 100, **api_kwargs) -> InferredSchema:
993
+ def infer_schema(self, instructions: str, max_examples: int = 100, **api_kwargs) -> SchemaInferenceOutput:
994
994
  """Infer a structured data schema from DataFrame rows using AI.
995
995
 
996
996
  This method analyzes a sample of DataFrame rows to automatically infer
@@ -1317,7 +1317,7 @@ class AsyncOpenAIVecSeriesAccessor:
1317
1317
 
1318
1318
  Example:
1319
1319
  ```python
1320
- from openaivec._proxy import AsyncBatchingMapProxy
1320
+ from openaivec._cache import AsyncBatchingMapProxy
1321
1321
  import numpy as np
1322
1322
 
1323
1323
  # Create a shared cache with custom batch size and concurrency
@@ -1424,7 +1424,7 @@ class AsyncOpenAIVecSeriesAccessor:
1424
1424
  Example:
1425
1425
  ```python
1426
1426
  from openaivec._model import PreparedTask
1427
- from openaivec._proxy import AsyncBatchingMapProxy
1427
+ from openaivec._cache import AsyncBatchingMapProxy
1428
1428
 
1429
1429
  # Create a shared cache with custom batch size and concurrency
1430
1430
  shared_cache = AsyncBatchingMapProxy(batch_size=64, max_concurrency=4)
@@ -1556,7 +1556,7 @@ class AsyncOpenAIVecSeriesAccessor:
1556
1556
  Note:
1557
1557
  This is an asynchronous method and must be awaited.
1558
1558
  """
1559
- schema: InferredSchema | None = None
1559
+ schema: SchemaInferenceOutput | None = None
1560
1560
  if response_format is None:
1561
1561
  # Use synchronous schema inference
1562
1562
  schema = self._obj.ai.infer_schema(instructions=instructions, max_examples=max_examples)
@@ -1650,7 +1650,7 @@ class AsyncOpenAIVecDataFrameAccessor:
1650
1650
 
1651
1651
  Example:
1652
1652
  ```python
1653
- from openaivec._proxy import AsyncBatchingMapProxy
1653
+ from openaivec._cache import AsyncBatchingMapProxy
1654
1654
 
1655
1655
  # Create a shared cache with custom batch size and concurrency
1656
1656
  shared_cache = AsyncBatchingMapProxy(batch_size=64, max_concurrency=4)
openaivec/spark.py CHANGED
@@ -142,10 +142,10 @@ from pyspark.sql.udf import UserDefinedFunction
142
142
  from typing_extensions import Literal
143
143
 
144
144
  from openaivec import pandas_ext
145
+ from openaivec._cache import AsyncBatchingMapProxy
145
146
  from openaivec._model import EmbeddingsModelName, PreparedTask, ResponseFormat, ResponsesModelName
146
147
  from openaivec._provider import CONTAINER
147
- from openaivec._proxy import AsyncBatchingMapProxy
148
- from openaivec._schema import InferredSchema, SchemaInferenceInput, SchemaInferer
148
+ from openaivec._schema import SchemaInferenceInput, SchemaInferenceOutput, SchemaInferer
149
149
  from openaivec._serialize import deserialize_base_model, serialize_base_model
150
150
  from openaivec._util import TextChunker
151
151
 
@@ -518,7 +518,7 @@ def infer_schema(
518
518
  example_table_name: str,
519
519
  example_field_name: str,
520
520
  max_examples: int = 100,
521
- ) -> InferredSchema:
521
+ ) -> SchemaInferenceOutput:
522
522
  """Infer the schema for a response format based on example data.
523
523
 
524
524
  This function retrieves examples from a Spark table and infers the schema
@@ -606,7 +606,7 @@ def parse_udf(
606
606
  if not response_format and not (example_field_name and example_table_name):
607
607
  raise ValueError("Either response_format or example_table_name and example_field_name must be provided.")
608
608
 
609
- schema: InferredSchema | None = None
609
+ schema: SchemaInferenceOutput | None = None
610
610
 
611
611
  if not response_format:
612
612
  schema = infer_schema(
@@ -32,7 +32,7 @@ Specialized tasks for customer service operations:
32
32
  ### Quick Start with Default Tasks
33
33
  ```python
34
34
  from openai import OpenAI
35
- from openaivec._responses import BatchResponses
35
+ from openaivec import BatchResponses
36
36
  from openaivec.task import nlp, customer_support
37
37
 
38
38
  client = OpenAI()
@@ -90,15 +90,17 @@ results_df = df.ai.extract("sentiment")
90
90
 
91
91
  ### Spark Integration
92
92
  ```python
93
- from openaivec.spark import ResponsesUDFBuilder
93
+ from openaivec.spark import task_udf
94
94
 
95
95
  # Register UDF for large-scale processing
96
96
  spark.udf.register(
97
97
  "analyze_sentiment",
98
- ResponsesUDFBuilder.of_openai(
99
- api_key=api_key,
100
- model_name="gpt-4.1-mini"
101
- ).build_from_task(task=nlp.SENTIMENT_ANALYSIS)
98
+ task_udf(
99
+ task=nlp.SENTIMENT_ANALYSIS,
100
+ model_name="gpt-4.1-mini",
101
+ batch_size=64,
102
+ max_concurrency=8,
103
+ ),
102
104
  )
103
105
 
104
106
  # Use in Spark SQL
@@ -9,7 +9,7 @@ Example:
9
9
 
10
10
  ```python
11
11
  from openai import OpenAI
12
- from openaivec._responses import BatchResponses
12
+ from openaivec import BatchResponses
13
13
  from openaivec.task import customer_support
14
14
 
15
15
  client = OpenAI()
@@ -8,7 +8,7 @@ Example:
8
8
 
9
9
  ```python
10
10
  from openai import OpenAI
11
- from openaivec._responses import BatchResponses
11
+ from openaivec import BatchResponses
12
12
  from openaivec.task import customer_support
13
13
 
14
14
  client = OpenAI()
@@ -9,7 +9,7 @@ Example:
9
9
 
10
10
  ```python
11
11
  from openai import OpenAI
12
- from openaivec._responses import BatchResponses
12
+ from openaivec import BatchResponses
13
13
  from openaivec.task import customer_support
14
14
 
15
15
  client = OpenAI()
@@ -8,7 +8,7 @@ Example:
8
8
 
9
9
  ```python
10
10
  from openai import OpenAI
11
- from openaivec._responses import BatchResponses
11
+ from openaivec import BatchResponses
12
12
  from openaivec.task import customer_support
13
13
 
14
14
  client = OpenAI()
@@ -9,7 +9,7 @@ Example:
9
9
 
10
10
  ```python
11
11
  from openai import OpenAI
12
- from openaivec._responses import BatchResponses
12
+ from openaivec import BatchResponses
13
13
  from openaivec.task import customer_support
14
14
 
15
15
  client = OpenAI()
@@ -8,7 +8,7 @@ Example:
8
8
 
9
9
  ```python
10
10
  from openai import OpenAI
11
- from openaivec._responses import BatchResponses
11
+ from openaivec import BatchResponses
12
12
  from openaivec.task import customer_support
13
13
 
14
14
  client = OpenAI()
@@ -8,7 +8,7 @@ Example:
8
8
 
9
9
  ```python
10
10
  from openai import OpenAI
11
- from openaivec._responses import BatchResponses
11
+ from openaivec import BatchResponses
12
12
  from openaivec.task import nlp
13
13
 
14
14
  client = OpenAI()
@@ -8,7 +8,7 @@ Example:
8
8
 
9
9
  ```python
10
10
  from openai import OpenAI
11
- from openaivec._responses import BatchResponses
11
+ from openaivec import BatchResponses
12
12
  from openaivec.task import nlp
13
13
 
14
14
  client = OpenAI()
@@ -9,7 +9,7 @@ Example:
9
9
 
10
10
  ```python
11
11
  from openai import OpenAI
12
- from openaivec._responses import BatchResponses
12
+ from openaivec import BatchResponses
13
13
  from openaivec.task import nlp
14
14
 
15
15
  client = OpenAI()
@@ -8,7 +8,7 @@ Example:
8
8
 
9
9
  ```python
10
10
  from openai import OpenAI
11
- from openaivec._responses import BatchResponses
11
+ from openaivec import BatchResponses
12
12
  from openaivec.task import nlp
13
13
 
14
14
  client = OpenAI()
@@ -8,7 +8,7 @@ Example:
8
8
 
9
9
  ```python
10
10
  from openai import OpenAI
11
- from openaivec._responses import BatchResponses
11
+ from openaivec import BatchResponses
12
12
  from openaivec.task import nlp
13
13
 
14
14
  client = OpenAI()
@@ -13,7 +13,7 @@ Example:
13
13
 
14
14
  ```python
15
15
  from openai import OpenAI
16
- from openaivec._responses import BatchResponses
16
+ from openaivec import BatchResponses
17
17
  from openaivec.task import nlp
18
18
 
19
19
  client = OpenAI()
@@ -33,7 +33,7 @@ Example:
33
33
 
34
34
  ```python
35
35
  from openai import OpenAI
36
- from openaivec._responses import BatchResponses
36
+ from openaivec import BatchResponses
37
37
  from openaivec.task.table import fillna
38
38
 
39
39
  client = OpenAI()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: openaivec
3
- Version: 0.99.2
3
+ Version: 0.99.3
4
4
  Summary: Generative mutation for tabular calculation
5
5
  Project-URL: Homepage, https://microsoft.github.io/openaivec/
6
6
  Project-URL: Repository, https://github.com/microsoft/openaivec
@@ -49,7 +49,7 @@ Description-Content-Type: text/markdown
49
49
  - Drop-in `.ai` and `.aio` DataFrame accessors keep pandas analysts in their favorite tools.
50
50
  - Smart batching (`BatchingMapProxy`) deduplicates prompts, enforces ordered outputs, and shortens runtimes without manual tuning.
51
51
  - Built-in caches, retry logic, and reasoning model safeguards cut noisy boilerplate from production pipelines.
52
- - Ready-made Spark UDF builders and Microsoft Fabric guides take AI workloads from notebooks into enterprise-scale ETL.
52
+ - Ready-made Spark UDF helpers and Microsoft Fabric guides take AI workloads from notebooks into enterprise-scale ETL.
53
53
  - Pre-configured task library and `FewShotPromptBuilder` ship curated prompts and structured outputs validated by Pydantic.
54
54
  - Supports OpenAI and Azure OpenAI clients interchangeably, including async workloads and embeddings.
55
55
 
@@ -142,7 +142,7 @@ automatically in notebook environments when `show_progress=True`.
142
142
  - Vectorized request batching with automatic deduplication, retries, and cache hooks for any OpenAI-compatible client.
143
143
  - pandas `.ai` and `.aio` accessors for synchronous and asynchronous DataFrame pipelines, including `ai.extract` helpers.
144
144
  - Task library with Pydantic-backed schemas for consistent structured outputs across pandas and Spark jobs.
145
- - Spark UDF builders (`responses_udf`, `embeddings_udf`, `parse_udf`, `task_udf`, etc.) for large-scale ETL and BI.
145
+ - Spark UDF helpers (`responses_udf`, `embeddings_udf`, `parse_udf`, `task_udf`, etc.) for large-scale ETL and BI.
146
146
  - Embeddings, token counting, and similarity utilities for search and retrieval use cases.
147
147
  - Prompt tooling (`FewShotPromptBuilder`, `improve`) to craft and iterate production-ready instructions.
148
148
 
@@ -0,0 +1,39 @@
1
+ openaivec/__init__.py,sha256=0RsOGIt4TMgPdjppWiz89W1gb1X4qM-4nWqFFi-eK8w,555
2
+ openaivec/_di.py,sha256=Cl1ZoNBlQsJL1bpzoMDl08uT9pZFVSlqOdLbS3_MwPE,11462
3
+ openaivec/_embeddings.py,sha256=2JWFUZdHR1dvPdWPT4nVSZo0_TAz4gr8oLR3EhhtUyE,8200
4
+ openaivec/_log.py,sha256=LHNs6AbJzM4weaRARZFroigxR6D148d7WSIMLk1IhbU,1439
5
+ openaivec/_model.py,sha256=71oiENUKwpY58ilj1LE7fDOAhs7PUSiZRiUHKUIuu7Y,3235
6
+ openaivec/_prompt.py,sha256=_fPATuWKaAdFD48Kuu0UQorlChA9mNZCDJx88bu_BuY,20626
7
+ openaivec/_provider.py,sha256=8z8gPYY5-Z7rzDlj_NC6hR__DUqVAH7VLHJn6LalzRg,6158
8
+ openaivec/_responses.py,sha256=Lb37ajlFQoVVac_p9oVf3scUDS3AI1ro4tRlk_UBqVg,20412
9
+ openaivec/_serialize.py,sha256=u2Om94Sc_QgJkTlW2BAGw8wd6gYDhc6IRqvS-qevFSs,8399
10
+ openaivec/_util.py,sha256=XfueAycVCQvgRLS7wF7e306b53lebORvZOBzbQjy4vE,6438
11
+ openaivec/pandas_ext.py,sha256=XEmB08FS6lFtk6V7zzM4XHnzPkLCZ08OFFlkX-f0Oko,86730
12
+ openaivec/spark.py,sha256=PIZxy3pVSrUv9PB2KBXQNM8beEvn_abaCYGS1DZmanY,32764
13
+ openaivec/_cache/__init__.py,sha256=IYUH5GKsJXuCX-k3XtT259rEz49EZm9KW2TIOTGW4uQ,314
14
+ openaivec/_cache/optimize.py,sha256=3nS8VehbS7iGC1tPDDQh-iAgyKHbVYmMbCRBWM77U_U,3827
15
+ openaivec/_cache/proxy.py,sha256=mBUaYNFLrix6ZDblSHXmKlrd4qraaoVpbHGJ-_RlK-s,29666
16
+ openaivec/_schema/__init__.py,sha256=XUj3Jv6ZVDjyYzSmH6Q5lmDj-hBMfUg_eBNeZACXR6Q,368
17
+ openaivec/_schema/infer.py,sha256=gcrpw0OVJMWdmUlzimP-C14cuCAAOnHQd8-bUNR220o,15705
18
+ openaivec/_schema/spec.py,sha256=7ZaC59w2Edemnao57XeZVO4qmSOA-Kus6TchZC3Dd5o,14821
19
+ openaivec/task/__init__.py,sha256=E82gfwhLOn1S8rgNZZAci3-H7bDXdGFM-cl1fpeN7_o,6154
20
+ openaivec/task/customer_support/__init__.py,sha256=KWfGyXPdZyfGdRH17x7hPpJJ1N2EP9PPhZx0fvBAwSI,884
21
+ openaivec/task/customer_support/customer_sentiment.py,sha256=6-qbJB21p0SKzV2vsesVIF-uUU_YB-QjNFUNfMLepBA,7521
22
+ openaivec/task/customer_support/inquiry_classification.py,sha256=wORsEUL7dQ_0Mz8TWC16m6BtAD1KAvr9LqN4Fmji7LM,9591
23
+ openaivec/task/customer_support/inquiry_summary.py,sha256=hrGLku5YjCt4PMXTrOSiCEnx_XOeKW4o_sv8Ocrxt0I,6885
24
+ openaivec/task/customer_support/intent_analysis.py,sha256=lOW7MLH20PnXHq2Dqt7pQ6xmNbb1QK6A0MtXImlHA48,7467
25
+ openaivec/task/customer_support/response_suggestion.py,sha256=MMbeQytm7E13HYW4eMMrLpudM85JFfaPj7l94_wNp_0,8295
26
+ openaivec/task/customer_support/urgency_analysis.py,sha256=3JMxw0quJ8gnWL8hAlOKX49uiv30qtogRdKxtT9lebU,11520
27
+ openaivec/task/nlp/__init__.py,sha256=QoQ0egEK9IEh5hdrE07rZ_KCmC0gy_2FPrWJYRWiipY,512
28
+ openaivec/task/nlp/dependency_parsing.py,sha256=N9c-HMdSebee4hbeNeACqGQHaBd6uw4K_4STbfoz_Sw,2821
29
+ openaivec/task/nlp/keyword_extraction.py,sha256=ghijuMQbtlIp7kJdwOnFmTfVzj3rnmJa-_Yc9uWLa1E,2808
30
+ openaivec/task/nlp/morphological_analysis.py,sha256=StCteMdWd49QjsZg4FDlzOO5SxdRqMx-A4r3pnOvZz8,2405
31
+ openaivec/task/nlp/named_entity_recognition.py,sha256=0k2qQLG6L7s9lvpxzwB8csL88jqUKlOlEn2MCJ-oEVE,3041
32
+ openaivec/task/nlp/sentiment_analysis.py,sha256=1igoAhns-VgsDE8XI47Dw-zeOcR5wEY9IFIp9LPTC_E,3089
33
+ openaivec/task/nlp/translation.py,sha256=TtV7F6bmKPqLi3_Ok7GoOqT_GKJiemotVq-YEbKd6IA,6617
34
+ openaivec/task/table/__init__.py,sha256=kJz15WDJXjyC7UIHKBvlTRhCf347PCDMH5T5fONV2sU,83
35
+ openaivec/task/table/fillna.py,sha256=4j27fWT5IzOhQqCPwLhomjBOAWPBslyIBbBMspjqtbw,6877
36
+ openaivec-0.99.3.dist-info/METADATA,sha256=8XE3uDaUBz5JbsTku-vGO2czFREo3aL_BTyc5HaM5GM,30441
37
+ openaivec-0.99.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
38
+ openaivec-0.99.3.dist-info/licenses/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
39
+ openaivec-0.99.3.dist-info/RECORD,,
@@ -1,37 +0,0 @@
1
- openaivec/__init__.py,sha256=qHgiPPAPijdZHx0KjifOkdo7U6nl_lMQLiCe4SHjqiE,541
2
- openaivec/_di.py,sha256=Cl1ZoNBlQsJL1bpzoMDl08uT9pZFVSlqOdLbS3_MwPE,11462
3
- openaivec/_dynamic.py,sha256=7ZaC59w2Edemnao57XeZVO4qmSOA-Kus6TchZC3Dd5o,14821
4
- openaivec/_embeddings.py,sha256=nirLqOu69fTB7aSCYhbbRbwAA6ggwEYJiQoPDsHqAqQ,8200
5
- openaivec/_log.py,sha256=LHNs6AbJzM4weaRARZFroigxR6D148d7WSIMLk1IhbU,1439
6
- openaivec/_model.py,sha256=71oiENUKwpY58ilj1LE7fDOAhs7PUSiZRiUHKUIuu7Y,3235
7
- openaivec/_optimize.py,sha256=3nS8VehbS7iGC1tPDDQh-iAgyKHbVYmMbCRBWM77U_U,3827
8
- openaivec/_prompt.py,sha256=NWE7jZKYphkD856haynJLmRadPugJ68emT42pd7Ciso,20633
9
- openaivec/_provider.py,sha256=8z8gPYY5-Z7rzDlj_NC6hR__DUqVAH7VLHJn6LalzRg,6158
10
- openaivec/_proxy.py,sha256=AvTM2ESEJnScP7vxN-ISLE_HPUnMGsDGwYs9YILeDIY,29669
11
- openaivec/_responses.py,sha256=qBrYv4qblDIs5dRvj9t96r8UfAJmy4ZvtAe6csNZ7oM,20412
12
- openaivec/_schema.py,sha256=iOeR5J_ihZRDZtzmqvOK1ZtInKcx4OnoR38DB3VmmQw,15666
13
- openaivec/_serialize.py,sha256=u2Om94Sc_QgJkTlW2BAGw8wd6gYDhc6IRqvS-qevFSs,8399
14
- openaivec/_util.py,sha256=XfueAycVCQvgRLS7wF7e306b53lebORvZOBzbQjy4vE,6438
15
- openaivec/pandas_ext.py,sha256=W-n2dlcouJHVAyyEnDrJ3zUFUCWFcnIYlJweuy5x4zs,86695
16
- openaivec/spark.py,sha256=ooRyeS75WDoh_3ePvThWZbmF_DzEprAJurLTXZrvFQo,32743
17
- openaivec/task/__init__.py,sha256=RkYIKrcE83M_9Um9cSMkeGzL9kPRAovajfRvr31YxLE,6178
18
- openaivec/task/customer_support/__init__.py,sha256=KWfGyXPdZyfGdRH17x7hPpJJ1N2EP9PPhZx0fvBAwSI,884
19
- openaivec/task/customer_support/customer_sentiment.py,sha256=d8spZUtImjePK0xWGvIW98ghbdyOZ0KEZmaUpG8QB7M,7532
20
- openaivec/task/customer_support/inquiry_classification.py,sha256=NKz1oTm06eU6W-plHe3T3o20lCk6M2NemVXZ4Y_IozU,9602
21
- openaivec/task/customer_support/inquiry_summary.py,sha256=8X1J8lZwlgX6s02cs86-K0moZ5gTrX7E7WEKiY2vpiQ,6896
22
- openaivec/task/customer_support/intent_analysis.py,sha256=Jnokzi0wTlHpuTRl5uqxdoHClYU71b9iFTzn3KNeNVM,7478
23
- openaivec/task/customer_support/response_suggestion.py,sha256=IykZE-BJ_ENhe5frnVl4bQKpArwOuNAITGlBxlu62c0,8306
24
- openaivec/task/customer_support/urgency_analysis.py,sha256=fdBT0Ud-InGqou-ZuFcVc3EpUNAq5N55_Q9D6D74WlQ,11531
25
- openaivec/task/nlp/__init__.py,sha256=QoQ0egEK9IEh5hdrE07rZ_KCmC0gy_2FPrWJYRWiipY,512
26
- openaivec/task/nlp/dependency_parsing.py,sha256=V7pd4_EbBBvdpnFDkfZh08u7kfJ7XJLq_qLkec48yr0,2832
27
- openaivec/task/nlp/keyword_extraction.py,sha256=e6niCt8XU0EPJLGYOJXQvbfWtl7w9CgfnCE188kecb4,2819
28
- openaivec/task/nlp/morphological_analysis.py,sha256=qTFFBkFP8CRZU87S59ju5ygXWlEBCtjYlH9Su7czLjs,2416
29
- openaivec/task/nlp/named_entity_recognition.py,sha256=9BFKYk0PZlyNN8pItGIEFecvZew4K_F5GgY5Ub8xDtM,3052
30
- openaivec/task/nlp/sentiment_analysis.py,sha256=u-zpqAaQYcr7I3mqMv_CTJXkfxtoLft3qm-qwmqb_p4,3100
31
- openaivec/task/nlp/translation.py,sha256=kgWj2oN8pUId3vuHTJNx636gB49AGEKXWICA_XJgE_0,6628
32
- openaivec/task/table/__init__.py,sha256=kJz15WDJXjyC7UIHKBvlTRhCf347PCDMH5T5fONV2sU,83
33
- openaivec/task/table/fillna.py,sha256=zL6m5hGD4kamV7qHETnn__B59wIY540Ks0EzNgUJgdI,6888
34
- openaivec-0.99.2.dist-info/METADATA,sha256=phtJV19iRpLmo51lxR-74kpd1rwcq2KV1H11967QrFs,30443
35
- openaivec-0.99.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
36
- openaivec-0.99.2.dist-info/licenses/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
37
- openaivec-0.99.2.dist-info/RECORD,,
File without changes
File without changes