llama-cloud 0.1.41__py3-none-any.whl → 0.1.42__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

llama_cloud/__init__.py CHANGED
@@ -16,6 +16,9 @@ from .types import (
16
16
  AgentDeploymentList,
17
17
  AgentDeploymentSummary,
18
18
  AggregateGroup,
19
+ ApiKey,
20
+ ApiKeyQueryResponse,
21
+ ApiKeyType,
19
22
  AutoTransformConfig,
20
23
  AzureOpenAiEmbedding,
21
24
  AzureOpenAiEmbeddingConfig,
@@ -428,6 +431,9 @@ __all__ = [
428
431
  "AgentDeploymentList",
429
432
  "AgentDeploymentSummary",
430
433
  "AggregateGroup",
434
+ "ApiKey",
435
+ "ApiKeyQueryResponse",
436
+ "ApiKeyType",
431
437
  "AutoTransformConfig",
432
438
  "AzureOpenAiEmbedding",
433
439
  "AzureOpenAiEmbeddingConfig",
@@ -6,7 +6,6 @@ from json.decoder import JSONDecodeError
6
6
 
7
7
  from ...core.api_error import ApiError
8
8
  from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
9
- from ...core.jsonable_encoder import jsonable_encoder
10
9
  from ...core.remove_none_from_dict import remove_none_from_dict
11
10
  from ...errors.unprocessable_entity_error import UnprocessableEntityError
12
11
  from ...types.http_validation_error import HttpValidationError
@@ -20,40 +19,31 @@ try:
20
19
  except ImportError:
21
20
  import pydantic # type: ignore
22
21
 
23
- # this is used as the default value for optional parameters
24
- OMIT = typing.cast(typing.Any, ...)
25
-
26
22
 
27
23
  class AlphaClient:
28
24
  def __init__(self, *, client_wrapper: SyncClientWrapper):
29
25
  self._client_wrapper = client_wrapper
30
26
 
31
27
  def upload_file_v_2(
32
- self,
33
- *,
34
- project_id: typing.Optional[str] = None,
35
- organization_id: typing.Optional[str] = None,
36
- configuration: str,
37
- file: typing.Optional[str] = OMIT,
28
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
38
29
  ) -> ParsingJob:
39
30
  """
40
31
  Parameters:
41
32
  - project_id: typing.Optional[str].
42
33
 
43
34
  - organization_id: typing.Optional[str].
35
+ ---
36
+ from llama_cloud.client import LlamaCloud
44
37
 
45
- - configuration: str.
46
-
47
- - file: typing.Optional[str].
38
+ client = LlamaCloud(
39
+ token="YOUR_TOKEN",
40
+ )
41
+ client.alpha.upload_file_v_2()
48
42
  """
49
- _request: typing.Dict[str, typing.Any] = {"configuration": configuration}
50
- if file is not OMIT:
51
- _request["file"] = file
52
43
  _response = self._client_wrapper.httpx_client.request(
53
44
  "POST",
54
45
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v2alpha1/parse/upload"),
55
46
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
56
- json=jsonable_encoder(_request),
57
47
  headers=self._client_wrapper.get_headers(),
58
48
  timeout=60,
59
49
  )
@@ -73,31 +63,25 @@ class AsyncAlphaClient:
73
63
  self._client_wrapper = client_wrapper
74
64
 
75
65
  async def upload_file_v_2(
76
- self,
77
- *,
78
- project_id: typing.Optional[str] = None,
79
- organization_id: typing.Optional[str] = None,
80
- configuration: str,
81
- file: typing.Optional[str] = OMIT,
66
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
82
67
  ) -> ParsingJob:
83
68
  """
84
69
  Parameters:
85
70
  - project_id: typing.Optional[str].
86
71
 
87
72
  - organization_id: typing.Optional[str].
73
+ ---
74
+ from llama_cloud.client import AsyncLlamaCloud
88
75
 
89
- - configuration: str.
90
-
91
- - file: typing.Optional[str].
76
+ client = AsyncLlamaCloud(
77
+ token="YOUR_TOKEN",
78
+ )
79
+ await client.alpha.upload_file_v_2()
92
80
  """
93
- _request: typing.Dict[str, typing.Any] = {"configuration": configuration}
94
- if file is not OMIT:
95
- _request["file"] = file
96
81
  _response = await self._client_wrapper.httpx_client.request(
97
82
  "POST",
98
83
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v2alpha1/parse/upload"),
99
84
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
100
- json=jsonable_encoder(_request),
101
85
  headers=self._client_wrapper.get_headers(),
102
86
  timeout=60,
103
87
  )