arize 8.0.0a12__py3-none-any.whl → 8.0.0a13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. arize/_flight/client.py +79 -3
  2. arize/_generated/api_client/__init__.py +1 -43
  3. arize/_generated/api_client/api/__init__.py +1 -17
  4. arize/_generated/api_client/api/datasets_api.py +10 -11
  5. arize/_generated/api_client/api/experiments_api.py +276 -0
  6. arize/_generated/api_client/api_client.py +4 -4
  7. arize/_generated/api_client/models/__init__.py +1 -26
  8. arize/_generated/api_client/models/dataset.py +5 -13
  9. arize/_generated/api_client/models/dataset_version.py +5 -13
  10. arize/_generated/api_client/models/datasets_create_request.py +5 -13
  11. arize/_generated/api_client/models/datasets_list200_response.py +5 -0
  12. arize/_generated/api_client/models/datasets_list_examples200_response.py +8 -3
  13. arize/_generated/api_client/models/error.py +5 -13
  14. arize/_generated/api_client/models/experiment.py +5 -13
  15. arize/_generated/api_client/models/experiments_list200_response.py +14 -9
  16. arize/_generated/api_client/test/test_datasets_list_examples200_response.py +2 -2
  17. arize/_generated/api_client/test/test_experiments_api.py +7 -0
  18. arize/_generated/api_client/test/test_experiments_list200_response.py +2 -2
  19. arize/_generated/api_client_README.md +2 -2
  20. arize/_generated/protocol/flight/export_pb2.py +8 -8
  21. arize/_lazy.py +4 -0
  22. arize/client.py +17 -4
  23. arize/config.py +13 -3
  24. arize/datasets/__init__.py +70 -0
  25. arize/datasets/client.py +119 -1
  26. arize/utils/proto.py +0 -36
  27. arize/version.py +1 -1
  28. {arize-8.0.0a12.dist-info → arize-8.0.0a13.dist-info}/METADATA +90 -1
  29. {arize-8.0.0a12.dist-info → arize-8.0.0a13.dist-info}/RECORD +31 -33
  30. arize/_generated/api_client/models/datasets_create201_response.py +0 -87
  31. arize/_generated/api_client/test/test_datasets_create201_response.py +0 -52
  32. {arize-8.0.0a12.dist-info → arize-8.0.0a13.dist-info}/WHEEL +0 -0
  33. {arize-8.0.0a12.dist-info → arize-8.0.0a13.dist-info}/licenses/LICENSE.md +0 -0
arize/datasets/client.py CHANGED
@@ -1,4 +1,18 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from typing import Any, Dict, List
5
+
6
+ import pandas as pd
7
+ import pyarrow as pa
8
+
9
+ from arize._flight.client import ArizeFlightClient
1
10
  from arize.config import SDKConfiguration
11
+ from arize.exceptions.base import INVALID_ARROW_CONVERSION_MSG
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+ REST_LIMIT_DATASET_EXAMPLES = 3
2
16
 
3
17
 
4
18
  class DatasetsClient:
@@ -14,6 +28,110 @@ class DatasetsClient:
14
28
  # Forward methods to preserve exact runtime signatures/docs
15
29
  self.list = self._api.datasets_list
16
30
  self.get = self._api.datasets_get
17
- self.create = self._api.datasets_create
18
31
  self.delete = self._api.datasets_delete
19
32
  self.list_examples = self._api.datasets_list_examples
33
+
34
+ # Custom methods
35
+ self.create = self._create_dataset
36
+
37
+ def _create_dataset(
38
+ self,
39
+ name: str,
40
+ space_id: str,
41
+ examples: List[Dict[str, Any]] | pd.DataFrame,
42
+ ):
43
+ if (
44
+ isinstance(examples, list)
45
+ and len(examples) > REST_LIMIT_DATASET_EXAMPLES
46
+ ):
47
+ logger.info(
48
+ f"Uploading {len(examples)} examples via REST may be slow. "
49
+ "Trying to convert to DataFrame for more efficient upload."
50
+ )
51
+ # If we have too many examples, try to convert to a dataframe
52
+ try:
53
+ data = pd.DataFrame(examples)
54
+ except Exception as e:
55
+ logger.warning(
56
+ f"Could not convert examples to DataFrame: {e}. "
57
+ "Falling back to list upload, which may be less performant."
58
+ )
59
+ # If we can’t convert to a dataframe, just use the list
60
+ data = examples
61
+ else:
62
+ # If we have a dataframe or a small list, just use it directly
63
+ data = examples
64
+
65
+ if isinstance(data, list):
66
+ # If the data is in list format, use the REST endpoint
67
+ from arize._generated import api_client as gen
68
+
69
+ body = gen.DatasetsCreateRequest(
70
+ name=name,
71
+ spaceId=space_id,
72
+ examples=data,
73
+ )
74
+ return self._api.datasets_create(datasets_create_request=body)
75
+ elif isinstance(data, pd.DataFrame):
76
+ # Convert datetime columns to int64 (ms since epoch)
77
+ # TODO(Kiko): Missing validation block
78
+ # data = _convert_datetime_columns_to_int(data)
79
+ # df = self._set_default_columns_for_dataset(data)
80
+ # if convert_dict_to_json:
81
+ # df = _convert_default_columns_to_json_str(df)
82
+ # df = _convert_boolean_columns_to_str(df)
83
+ # validation_errors = Validator.validate(df)
84
+ # validation_errors.extend(
85
+ # Validator.validate_max_chunk_size(max_chunk_size)
86
+ # )
87
+ # if validation_errors:
88
+ # raise RuntimeError(
89
+ # [e.error_message() for e in validation_errors]
90
+ # )
91
+
92
+ # Convert to Arrow table
93
+ try:
94
+ logger.debug("Converting data to Arrow format")
95
+ pa_table = pa.Table.from_pandas(data)
96
+ except pa.ArrowInvalid as e:
97
+ logger.error(f"{INVALID_ARROW_CONVERSION_MSG}: {str(e)}")
98
+ raise pa.ArrowInvalid(
99
+ f"Error converting to Arrow format: {str(e)}"
100
+ ) from e
101
+ except Exception as e:
102
+ logger.error(f"Unexpected error creating Arrow table: {str(e)}")
103
+ raise
104
+
105
+ response = None
106
+ with ArizeFlightClient(
107
+ api_key=self._sdk_config.api_key,
108
+ host=self._sdk_config.flight_server_host,
109
+ port=self._sdk_config.flight_server_port,
110
+ scheme=self._sdk_config.flight_scheme,
111
+ request_verify=self._sdk_config.request_verify,
112
+ ) as flight_client:
113
+ try:
114
+ response = flight_client.create_dataset(
115
+ space_id=space_id,
116
+ dataset_name=name,
117
+ pa_table=pa_table,
118
+ )
119
+ except Exception as e:
120
+ msg = f"Error during update request: {str(e)}"
121
+ logger.error(msg)
122
+ raise RuntimeError(msg) from e
123
+ if response is None:
124
+ # This should not happen with proper Flight client implementation,
125
+ # but we handle it defensively
126
+ msg = "No response received from flight server during update"
127
+ logger.error(msg)
128
+ raise RuntimeError(msg)
129
+ # The response from flightserver is the dataset ID. To return the dataset
130
+ # object we make a GET query
131
+ dataset = self.get(dataset_id=response)
132
+ return dataset
133
+
134
+ else:
135
+ raise TypeError(
136
+ "Examples must be a list of dicts or a pandas DataFrame"
137
+ )
arize/utils/proto.py CHANGED
@@ -7,9 +7,7 @@ from google.protobuf import json_format, message
7
7
  from google.protobuf.timestamp_pb2 import Timestamp
8
8
  from google.protobuf.wrappers_pb2 import DoubleValue, StringValue
9
9
 
10
- from arize._flight.types import FlightRequestType
11
10
  from arize._generated.protocol.flight import export_pb2 as flight_exp_pb2
12
- from arize._generated.protocol.flight import ingest_pb2 as flight_ing_pb2
13
11
  from arize._generated.protocol.rec import public_pb2 as pb2
14
12
  from arize.exceptions.parameters import InvalidValueType
15
13
  from arize.types import (
@@ -368,40 +366,6 @@ def get_pb_schema_tracing(
368
366
  return s
369
367
 
370
368
 
371
- def get_pb_flight_doput_request(
372
- space_id,
373
- model_id: str,
374
- request_type: FlightRequestType,
375
- ) -> flight_ing_pb2.DoPutRequest:
376
- """Return a DoPutRequest for the given request_type."""
377
- common_args: dict[str, str] = {
378
- "space_id": space_id,
379
- "external_model_id": model_id,
380
- }
381
-
382
- match request_type:
383
- case FlightRequestType.EVALUATION:
384
- return flight_ing_pb2.DoPutRequest(
385
- write_span_evaluation_request=flight_ing_pb2.WriteSpanEvaluationRequest(
386
- **common_args
387
- )
388
- )
389
- case FlightRequestType.ANNOTATION:
390
- return flight_ing_pb2.DoPutRequest(
391
- write_span_annotation_request=flight_ing_pb2.WriteSpanAnnotationRequest(
392
- **common_args
393
- )
394
- )
395
- case FlightRequestType.METADATA:
396
- return flight_ing_pb2.DoPutRequest(
397
- write_span_attributes_metadata_request=flight_ing_pb2.WriteSpanAttributesMetadataRequest(
398
- **common_args
399
- )
400
- )
401
- case _:
402
- raise ValueError(f"Unsupported request_type: {request_type}")
403
-
404
-
405
369
  def message_to_dict(
406
370
  msg: message.Message,
407
371
  preserve_names: bool = True,
arize/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "8.0.0a12"
1
+ __version__ = "8.0.0a13"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: arize
3
- Version: 8.0.0a12
3
+ Version: 8.0.0a13
4
4
  Summary: A helper library to interact with Arize AI APIs
5
5
  Project-URL: Homepage, https://arize.com
6
6
  Project-URL: Documentation, https://docs.arize.com/arize
@@ -34,6 +34,8 @@ Requires-Dist: pillow<11,>=8.4.0; extra == 'auto-embeddings'
34
34
  Requires-Dist: tokenizers<1,>=0.13; extra == 'auto-embeddings'
35
35
  Requires-Dist: torch<3,>=1.13; extra == 'auto-embeddings'
36
36
  Requires-Dist: transformers<5,>=4.25; extra == 'auto-embeddings'
37
+ Provides-Extra: datasets-experiments
38
+ Requires-Dist: pydantic; extra == 'datasets-experiments'
37
39
  Provides-Extra: dev
38
40
  Requires-Dist: pytest==8.4.2; extra == 'dev'
39
41
  Requires-Dist: ruff==0.13.2; extra == 'dev'
@@ -92,6 +94,11 @@ Description-Content-Type: text/markdown
92
94
  - [Log a batch of ML Data for a Object Detection use-case](#log-a-batch-of-ml-data-for-a-object-detection-use-case)
93
95
  - [Exporting ML Data](#exporting-ml-data)
94
96
  - [Generate embeddings for your data](#generate-embeddings-for-your-data)
97
+ - [Operations on Datasets](#operations-on-datasets)
98
+ - [List Datasets](#list-datasets)
99
+ - [Create a Dataset](#create-a-dataset)
100
+ - [Get Dataset by ID](#get-dataset-by-id)
101
+ - [Delete a Dataset](#delete-a-dataset)
95
102
  - [Configure Logging](#configure-logging)
96
103
  - [In Code](#in-code)
97
104
  - [Via Environment Variables](#via-environment-variables)
@@ -368,6 +375,88 @@ generator = EmbeddingGenerator.from_use_case(
368
375
  df["text_vector"] = generator.generate_embeddings(text_col=df["text"])
369
376
  ```
370
377
 
378
+ ## Operations on Datasets
379
+
380
+ ### List Datasets
381
+
382
+ You can list all datasets that the user has access to using `client.datasets.list()`. You can use the `limit` parameter to specify the maximum number of datasets desired in the response and you can specify the `space_id` to target the list operation to a particular space.
383
+
384
+ ```python
385
+ resp = client.datasets.list(
386
+ limit=... # Optional
387
+ space_id=... # Optional
388
+ )
389
+ ```
390
+
391
+ The response is an object of type `DatasetsList200Response`, and you can access the list of datasets via its `datasets` attribute. In addition, you can transform the response object to a dictionary, to JSON format, or a pandas dataframe.
392
+
393
+ ```python
394
+ # Get the list of datasets from the response
395
+ dataset_list = resp.datasets
396
+ # Get the response as a dictionary
397
+ resp_dict = resp.to_dict()
398
+ # Get the response in JSON format
399
+ resp_dict = resp.to_json()
400
+ # Get the response as a pandas dataframe
401
+ resp_dict = resp.to_df()
402
+ ```
403
+
404
+ ### Create a Dataset
405
+
406
+ You can create a dataset using `client.datasets.create()`. You must pass examples, we currently don't support creating an empty dataset, for instance, these are 2 rows of examples, as a list of dictionaries. You can also pass a pandas dataframe for the examples.
407
+
408
+ ```python
409
+ examples = [
410
+ {
411
+ "eval.Correctness Basic.explanation": "The query indicates that the user is having trouble accessing their account on their laptop, while access on their phone is still working. This suggests a potential issue with the login process on the laptop, which aligns with the 'Login Issues' queue. The mention of a possible change in the account could relate to login credentials or settings affecting the laptop specifically, but it still falls under the broader category of login issues.",
412
+ "eval.Correctness Basic.label": "correct",
413
+ "eval.Correctness Basic.score": 1,
414
+ "llm output": "Login Issues",
415
+ "query": "I can't get in on my laptop anymore, but my phone still works fine — could this be because I changed something in my account?"
416
+ },
417
+ {
418
+ "eval.Correctness Basic.explanation": "The query is about a user who signed up but is unable to log in because the system says no account is found. This issue is related to the login process, as the user is trying to access their account and is facing a problem with the login system recognizing their account. Therefore, assigning this query to the 'Login Issues' queue is appropriate.",
419
+ "eval.Correctness Basic.label": "correct",
420
+ "eval.Correctness Basic.score": 1,
421
+ "llm output": "Login Issues",
422
+ "query": "Signed up ages ago but never got around to logging in — now it says no account found. Do I start over?"
423
+ }
424
+ ]
425
+ ```
426
+
427
+ If the number of examples (rows in dataframe, items in list) is too large, the client SDK will try to send the data via Arrow Flight via gRPC for better performance. If you want to force the data transfer to HTTP you can use the `force_http` flag. The response is always a `Dataset` object.
428
+
429
+ ```python
430
+ created_dataset = client.datasets.create(
431
+ space_i="<target-space-id>",
432
+ name="<your-dataset-name>", # Name must be unique within a space
433
+ examples=..., # List of dictionaries or pandas dataframe
434
+ )
435
+ ```
436
+
437
+
438
+
439
+ ### Get Dataset by ID
440
+
441
+ To get a dataset by its ID use `client.datasets.get()`, you can optionally also pass the version ID of a particular version of interest of the dataset.
442
+
443
+ ```python
444
+ dataset = client.datasets.get(
445
+ dataset_id=... # The unique identifier of the dataset
446
+ dataset_version_id=... # The unique identifier of the dataset version
447
+ )
448
+ ```
449
+
450
+ ### Delete a Dataset
451
+
452
+ To delete a dataset by its ID use `client.datasets.delete()`. The call returns `None` if successful deletion took place, error otherwise.
453
+
454
+ ```python
455
+ client.datasets.delete(
456
+ dataset_id=... # The unique identifier of the dataset
457
+ )
458
+ ```
459
+
371
460
  # Configure Logging
372
461
 
373
462
  ## In Code
@@ -1,54 +1,52 @@
1
1
  arize/__init__.py,sha256=-4bbbZwcjGS9OfAunsB-lmKRCzccPdFvZmvJQJEky3E,534
2
- arize/_lazy.py,sha256=MVep6D93sJWvArg4pgm4CVNGc6tu-XRK_Z7EDMuc76I,2358
3
- arize/client.py,sha256=kDdOWC1rwYgPPExO3wT3-KU3qpMwQ0ogrAdjvf7Ls3M,5860
4
- arize/config.py,sha256=iynVEZhrOPdTNJTQ_KQmwKOPiwL0LfEP8AUIDYW86Xw,5801
2
+ arize/_lazy.py,sha256=1Lnm4l42t7W-m2JYCYD-S7ASBOIl0XJkBuli3Ei1VXA,2474
3
+ arize/client.py,sha256=H1VdHwtruq9koc37LPP1eSZEax32iQR0porczhCOLYg,6070
4
+ arize/config.py,sha256=_EucdoM6k-6DZSurwX_6_n7VIePcKcUccil-Iwk4JH0,6015
5
5
  arize/logging.py,sha256=OahBaJRG-z5DPqWrj2_rbe2n0r4fMGOrXpxN_4M_i_w,7244
6
6
  arize/types.py,sha256=z1yg5-brmTD4kVHDmmTVkYke53JpusXXeOOpdQw7rYg,69508
7
- arize/version.py,sha256=pIYyrrKVIV-mJxlHLm5Y4gHaueX3_4oWwSPkGBkbmdI,25
7
+ arize/version.py,sha256=1g3Ne_YSIwt3dSGmtEtvWJDZ7TJpxNJ6t6cItY3e-YY,25
8
8
  arize/_exporter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  arize/_exporter/client.py,sha256=eAxJX1sUfdpLrtaQ0ynMTd5jI37JOp9fbl3NWp4WFEA,15216
10
10
  arize/_exporter/validation.py,sha256=6ROu5p7uaolxQ93lO_Eiwv9NVw_uyi3E5T--C5Klo5Q,1021
11
11
  arize/_exporter/parsers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
12
  arize/_exporter/parsers/tracing_data_parser.py,sha256=zVS-w8t1HJkz-AIC_JCdjPJ7gJXgFpfELfqNM_vK42E,5395
13
13
  arize/_flight/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
- arize/_flight/client.py,sha256=OCqMf0SEclc2TLzMqu3wgORuLHHGkZOODDZsUu8F8a0,6895
14
+ arize/_flight/client.py,sha256=5nH5F-jmtVOK9wklyGW6sBDJHxhTKdq0t1LaEijpggw,9781
15
15
  arize/_flight/types.py,sha256=OuLupzkGYt7r0PEzsX4NmXV9uq3lD11AeRaHHI5NsSw,146
16
16
  arize/_generated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- arize/_generated/api_client_README.md,sha256=AM6a2RwMFfDPPt1nCFC-SYW0T4vNAwNfId9FYcPFjMM,4844
18
- arize/_generated/api_client/__init__.py,sha256=nwOsLqmUBDWbfo3w8DRiEoWL1rD2v8XrW8hFNNzlz4g,5367
19
- arize/_generated/api_client/api_client.py,sha256=Wo2j_q09pQQNF19QJZ_lxSui-jrVyBXLmCsg9wXuvRc,27990
17
+ arize/_generated/api_client_README.md,sha256=Ww8o5NA3M07uLm8_SI51HDC8cPJO1tLTOqTU1Pnwo5s,4942
18
+ arize/_generated/api_client/__init__.py,sha256=8Gwd24UqBYZXEAscsVIwcxpJ7z1f-MFtu9yJxqbVE9E,2792
19
+ arize/_generated/api_client/api_client.py,sha256=dsM3yuWYR6O_OHPry5R5S8HVsMvibksjpHm7IWIGwXw,27990
20
20
  arize/_generated/api_client/api_response.py,sha256=eMxw1mpmJcoGZ3gs9z6jM4oYoZ10Gjk333s9sKxGv7s,652
21
21
  arize/_generated/api_client/configuration.py,sha256=M4-qSDv2FwLqjyl8uRfIGu48JCRrk1eWMHALBlKkqEg,18994
22
22
  arize/_generated/api_client/exceptions.py,sha256=5Vnmr2HaL2cbZrqT2Zm44q0VPCFimQS28qszUpsPfYs,6593
23
23
  arize/_generated/api_client/rest.py,sha256=oBXdZdxKrAW3xr9HImyyNK3F3JOdxSFIbHNFN0tvbyo,9603
24
- arize/_generated/api_client/api/__init__.py,sha256=hCjRIZx6rAgRGdjUNjbPHl3XJDoCEOsZn3uXNoKnLh0,641
25
- arize/_generated/api_client/api/datasets_api.py,sha256=tSXL2Uki6P8A9eMMyMF9si3VWrv6kuF5zxExmI_4-ho,58987
26
- arize/_generated/api_client/api/experiments_api.py,sha256=kCSDbA04W7qWzhys2pHhMIVNRV0bv2tPCLT9g8_2ZVI,12719
27
- arize/_generated/api_client/models/__init__.py,sha256=XfFle5pGefZfuzYOencNoUMxHelO-ZDVs5YpqOr8KlQ,2382
28
- arize/_generated/api_client/models/dataset.py,sha256=K0Q6KmzI9TZY-CLz9d5OZN6WnSPWvio8ESYyn6zcN8Q,4650
29
- arize/_generated/api_client/models/dataset_version.py,sha256=DOid8M0AdvjIC62ndAAw0Y2vEFgPCVQGAXcyykXYhUI,4073
30
- arize/_generated/api_client/models/datasets_create201_response.py,sha256=AZ5f7PbbZCPOSrdFE5pj2itaqvuOEqfbj7FDNHop2gk,2716
31
- arize/_generated/api_client/models/datasets_create_request.py,sha256=FJMO2LW398S2_FzKA33nGXCWXeyYAR2sJ8yTjUsPOw0,3453
32
- arize/_generated/api_client/models/datasets_list200_response.py,sha256=NblZnqa7DBIFtZKFDNK6XU3uhuV4k8B-pJdQ2FLEpro,3168
33
- arize/_generated/api_client/models/datasets_list_examples200_response.py,sha256=uAAcFlGMVoPqa-emVqZsMlrAUa3rVr_pdPKe8Wy5IDw,2734
34
- arize/_generated/api_client/models/error.py,sha256=Q54b32Cio6llepX22rzsZ3uib6C-IEcOPN3GBPsYaUA,3192
35
- arize/_generated/api_client/models/experiment.py,sha256=tJVqnROy8U6gDnPiZRr2-nk4vRSgWH-60KeC4bn4tjU,4798
36
- arize/_generated/api_client/models/experiments_list200_response.py,sha256=4AXkHBq8IFTLgDdqzonIIMAF5MaN19qn2cGcRd77aK8,3147
24
+ arize/_generated/api_client/api/__init__.py,sha256=mi-qaCL6KBckrbJtN8ZwPHcc2M8GSDKjCOngIGOWVyw,192
25
+ arize/_generated/api_client/api/datasets_api.py,sha256=we4_t-2f9xIvXLCg3CR2dsyMWg_Z6OL-0aeEOipyA9M,58685
26
+ arize/_generated/api_client/api/experiments_api.py,sha256=p0U5Miilw7k9e9paUA4j5PrNQeN3OP6MGK-BBhe69KI,23293
27
+ arize/_generated/api_client/models/__init__.py,sha256=ME1bwtOgn2VVJnCruOWucVxSMYPNXZcYDvkSqJe3rac,1166
28
+ arize/_generated/api_client/models/dataset.py,sha256=7AdN40sZYQbmvkbXefam0aQz9mBk0UoNuTT8HIOMKNQ,4305
29
+ arize/_generated/api_client/models/dataset_version.py,sha256=JFWJWH05ED4UX32MfIa1zoPD4Sr0nPvODEdppgvRDoU,3735
30
+ arize/_generated/api_client/models/datasets_create_request.py,sha256=EGafmuzsr0KvlkTMdZ6cA-hdsNRTj9gBf-ZwK9dg8AQ,3122
31
+ arize/_generated/api_client/models/datasets_list200_response.py,sha256=nFq4dEzsDM9gTNafyN95dBY6rJJEZGHxpkFcugxzZ24,3434
32
+ arize/_generated/api_client/models/datasets_list_examples200_response.py,sha256=7kbFCb1pn6sSSs4YeY4vEvf5s0HugK1d9VUIj9WY2wE,3024
33
+ arize/_generated/api_client/models/error.py,sha256=nlrEYG7t0Vag4HF7EaongORASK09hm8wumwDKrPO1aM,2845
34
+ arize/_generated/api_client/models/experiment.py,sha256=fYHNJacVEMpbr4gIw1j3Nf8Qx6_90EQ41SM5LBKD4Y0,4456
35
+ arize/_generated/api_client/models/experiments_list200_response.py,sha256=5M7FkJV4cRILLHBw1n5fui7A3WCnwqN7zzxdFOgsrS8,3500
37
36
  arize/_generated/api_client/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
37
  arize/_generated/api_client/test/test_dataset.py,sha256=SBCRDWfkVpjW4KpvPQYewQ6B-dkZNQ7aUat-6kNuoms,2237
39
38
  arize/_generated/api_client/test/test_dataset_version.py,sha256=GF7kNg9icnRm9D8NxOaaVh1YDl_IMEOwfxtSDxB-2gA,1788
40
39
  arize/_generated/api_client/test/test_datasets_api.py,sha256=87YkU1FQUGMPhlE3Tut7gA8BC7bkywz7323k6wfAs2k,1516
41
- arize/_generated/api_client/test/test_datasets_create201_response.py,sha256=pXzfp0DCtF-f27OfVvPmLwoHSw20WoHWMGbT7vhYZnk,1688
42
40
  arize/_generated/api_client/test/test_datasets_create_request.py,sha256=9YTIb9x33IGen_iuNw-S_KhXCKAZZroEcCwZx-dyAno,1813
43
41
  arize/_generated/api_client/test/test_datasets_list200_response.py,sha256=9Zc_ZNaqhrCFvnQ_VL05G1ZG2DEb-bQNwLk5nA6i7L4,3630
44
- arize/_generated/api_client/test/test_datasets_list_examples200_response.py,sha256=Urj8Qx185a1KkBww1Rv9sa9qUHRyLOwwy6Flo9pDSjA,1841
42
+ arize/_generated/api_client/test/test_datasets_list_examples200_response.py,sha256=TQKmd1MtRhi-jQTY2BV7Pm--mf1GZfGcWoOhpUapx8M,1849
45
43
  arize/_generated/api_client/test/test_error.py,sha256=0XtZ4I6Q6cvaV22W_gmTzvFNFhFSj3Kv7muJVMoQywc,1410
46
44
  arize/_generated/api_client/test/test_experiment.py,sha256=PV415RGPEAjz5zNIZGDoaXyS6N3JXFjBTALdF8n5JKE,1916
47
- arize/_generated/api_client/test/test_experiments_api.py,sha256=Mu9AekLnYX5U7O1nM9c5BtKAOMIrnrDdzlPFf5flTok,940
48
- arize/_generated/api_client/test/test_experiments_list200_response.py,sha256=xqkh0dbFjqLMlUiK5fhMw4qIZKfiaDDjQQAQDrA1yMU,2880
45
+ arize/_generated/api_client/test/test_experiments_api.py,sha256=NZ0Dy3-ygmmrit3LJFnd7g2TkMhYUa2QhtJLV0EYS0k,1093
46
+ arize/_generated/api_client/test/test_experiments_list200_response.py,sha256=BetH2525aeMCBYYoxx0a-5gBbb-mzRC7QKpt4QIGSJ4,2894
49
47
  arize/_generated/protocol/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50
48
  arize/_generated/protocol/flight/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
- arize/_generated/protocol/flight/export_pb2.py,sha256=yhmOUp6XVbpAJnED6V-jCAWmpwtp7AZZDuU4BEfyj4k,4237
49
+ arize/_generated/protocol/flight/export_pb2.py,sha256=fF5-2ygRwyLgPD_of_cDrqyi5He9ajXEcarJWivg8ZU,4320
52
50
  arize/_generated/protocol/flight/ingest_pb2.py,sha256=-wC5rbLK4yjROQuXOU9c_gPwA4aHKBCN86u6IeivUZo,24302
53
51
  arize/_generated/protocol/rec/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
52
  arize/_generated/protocol/rec/public_pb2.py,sha256=vgP-yTSZLeomVwfIzcOo6t3i1mPCCNJGgd41ZkfLNng,79898
@@ -57,8 +55,8 @@ arize/constants/config.py,sha256=wDoptoadZviB0_7GgwqwkIWeyZFpLJdBy-SfaiGfwdE,107
57
55
  arize/constants/ml.py,sha256=X_vtKpt1AdhLoT2DWEyKDSXAVEuzjwGFacIbgUOpB3M,2358
58
56
  arize/constants/model_mapping.json,sha256=OPE54rBATzmwRhx0tycsxnGae1jBhtqEmQqQvzleTSc,5725
59
57
  arize/constants/spans.py,sha256=EfMgbEIK_2EUcvUY5BGnNAbS7bupBKePlI3j2L5T5CE,2532
60
- arize/datasets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
61
- arize/datasets/client.py,sha256=Rk3TQF2IzJwi3JqF1GYt1tUs68gPIiVFRgKjEmY7igE,743
58
+ arize/datasets/__init__.py,sha256=GVNsjaqzQt4x-nILE41BvWyZqfYAxPQB0oHZgIDW7ws,2289
59
+ arize/datasets/client.py,sha256=HBzxSQ_SmCtda3nkEnunztiuxmAQOqtR4vNn5BaqXEs,5327
62
60
  arize/embeddings/__init__.py,sha256=6_C8908W_qDixkoBJl1wapgmQCzI8TPLH207kzbYsFA,156
63
61
  arize/embeddings/auto_generator.py,sha256=ukZUJWRkiG9HFgSHXhr44rt2tdVHn1phb7_nOxYXWEg,4111
64
62
  arize/embeddings/base_generators.py,sha256=HybEUAzeESswEDmkmvPayzFab1y8deg5X20HSphGp8Q,8855
@@ -117,10 +115,10 @@ arize/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
117
115
  arize/utils/arrow.py,sha256=4In1gQc0i4Rb8zuwI0w-Hv-10wiItu5opqqGrJ8tSzo,5277
118
116
  arize/utils/casting.py,sha256=KUrPUQN6qJEVe39nxbr0T-0GjAJLHjf4xWuzV71QezI,12468
119
117
  arize/utils/dataframe.py,sha256=I0FloPgNiqlKga32tMOvTE70598QA8Hhrgf-6zjYMAM,1120
120
- arize/utils/proto.py,sha256=9vLo53INYjdF78ffjm3E48jFwK6LbPD2FfKei7VaDy8,35477
118
+ arize/utils/proto.py,sha256=7PwggGH7iz1Ldwv0BCIxu9-WBeojNXUcT2pDU502Mto,34175
121
119
  arize/utils/online_tasks/__init__.py,sha256=nDuTLUTYnZaWgyJoYR1P7O8ZKA-Nba7X6tJ9OislbWM,144
122
120
  arize/utils/online_tasks/dataframe_preprocessor.py,sha256=YyeeeFu_FwCYImbYvBZvQIH_5TK2lHru8KSfqV893ps,8884
123
- arize-8.0.0a12.dist-info/METADATA,sha256=-1PeD1-Kf8yRY1Nw9PyuRDaOa88Rkg98Jd6l5ruq7Hk,14725
124
- arize-8.0.0a12.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
125
- arize-8.0.0a12.dist-info/licenses/LICENSE.md,sha256=8vLN8Gms62NCBorxIv9MUvuK7myueb6_-dhXHPmm4H0,1479
126
- arize-8.0.0a12.dist-info/RECORD,,
121
+ arize-8.0.0a13.dist-info/METADATA,sha256=rtL4iOu2_1BXcOEVhfvzJ8R0l3Ff3QWGbVRaNnbCny8,18881
122
+ arize-8.0.0a13.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
123
+ arize-8.0.0a13.dist-info/licenses/LICENSE.md,sha256=8vLN8Gms62NCBorxIv9MUvuK7myueb6_-dhXHPmm4H0,1479
124
+ arize-8.0.0a13.dist-info/RECORD,,
@@ -1,87 +0,0 @@
1
- # coding: utf-8
2
-
3
- """
4
- Arize REST API
5
-
6
- API specification for the backend data server. The API is hosted globally at https://app.arize.com/api/v1 or in your own environment. You can access the OpenAPI spec for this API at https://app.arize.com/api/v1/spec.yaml
7
-
8
- The version of the OpenAPI document: 0.0.1
9
- Generated by OpenAPI Generator (https://openapi-generator.tech)
10
-
11
- Do not edit the class manually.
12
- """ # noqa: E501
13
-
14
-
15
- from __future__ import annotations
16
- import pprint
17
- import re # noqa: F401
18
- import json
19
-
20
- from pydantic import BaseModel, ConfigDict, Field, StrictStr
21
- from typing import Any, ClassVar, Dict
22
- from typing import Optional, Set
23
- from typing_extensions import Self
24
-
25
- class DatasetsCreate201Response(BaseModel):
26
- """
27
- DatasetsCreate201Response
28
- """ # noqa: E501
29
- dataset_id: StrictStr = Field(description="The ID of the created dataset")
30
- __properties: ClassVar[List[str]] = ["dataset_id"]
31
-
32
- model_config = ConfigDict(
33
- populate_by_name=True,
34
- validate_assignment=True,
35
- protected_namespaces=(),
36
- )
37
-
38
-
39
- def to_str(self) -> str:
40
- """Returns the string representation of the model using alias"""
41
- return pprint.pformat(self.model_dump(by_alias=True))
42
-
43
- def to_json(self) -> str:
44
- """Returns the JSON representation of the model using alias"""
45
- # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
46
- return json.dumps(self.to_dict())
47
-
48
- @classmethod
49
- def from_json(cls, json_str: str) -> Optional[Self]:
50
- """Create an instance of DatasetsCreate201Response from a JSON string"""
51
- return cls.from_dict(json.loads(json_str))
52
-
53
- def to_dict(self) -> Dict[str, Any]:
54
- """Return the dictionary representation of the model using alias.
55
-
56
- This has the following differences from calling pydantic's
57
- `self.model_dump(by_alias=True)`:
58
-
59
- * `None` is only added to the output dict for nullable fields that
60
- were set at model initialization. Other fields with value `None`
61
- are ignored.
62
- """
63
- excluded_fields: Set[str] = set([
64
- ])
65
-
66
- _dict = self.model_dump(
67
- by_alias=True,
68
- exclude=excluded_fields,
69
- exclude_none=True,
70
- )
71
- return _dict
72
-
73
- @classmethod
74
- def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
75
- """Create an instance of DatasetsCreate201Response from a dict"""
76
- if obj is None:
77
- return None
78
-
79
- if not isinstance(obj, dict):
80
- return cls.model_validate(obj)
81
-
82
- _obj = cls.model_validate({
83
- "dataset_id": obj.get("dataset_id")
84
- })
85
- return _obj
86
-
87
-
@@ -1,52 +0,0 @@
1
- # coding: utf-8
2
-
3
- """
4
- Arize REST API
5
-
6
- API specification for the backend data server. The API is hosted globally at https://app.arize.com/api/v1 or in your own environment. You can access the OpenAPI spec for this API at https://app.arize.com/api/v1/spec.yaml
7
-
8
- The version of the OpenAPI document: 0.0.1
9
- Generated by OpenAPI Generator (https://openapi-generator.tech)
10
-
11
- Do not edit the class manually.
12
- """ # noqa: E501
13
-
14
-
15
- import unittest
16
-
17
- from arize._generated.api_client.models.datasets_create201_response import DatasetsCreate201Response
18
-
19
- class TestDatasetsCreate201Response(unittest.TestCase):
20
- """DatasetsCreate201Response unit test stubs"""
21
-
22
- def setUp(self):
23
- pass
24
-
25
- def tearDown(self):
26
- pass
27
-
28
- def make_instance(self, include_optional) -> DatasetsCreate201Response:
29
- """Test DatasetsCreate201Response
30
- include_optional is a boolean, when False only required
31
- params are included, when True both required and
32
- optional params are included """
33
- # uncomment below to create an instance of `DatasetsCreate201Response`
34
- """
35
- model = DatasetsCreate201Response()
36
- if include_optional:
37
- return DatasetsCreate201Response(
38
- dataset_id = ''
39
- )
40
- else:
41
- return DatasetsCreate201Response(
42
- dataset_id = '',
43
- )
44
- """
45
-
46
- def testDatasetsCreate201Response(self):
47
- """Test DatasetsCreate201Response"""
48
- # inst_req_only = self.make_instance(include_optional=False)
49
- # inst_req_and_optional = self.make_instance(include_optional=True)
50
-
51
- if __name__ == '__main__':
52
- unittest.main()