retab 0.0.88__py3-none-any.whl → 0.0.90__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
retab/client.py CHANGED
@@ -10,7 +10,7 @@ import backoff.types
10
10
  import httpx
11
11
  import truststore
12
12
 
13
- from .resources import documents, models, schemas, projects, extractions, edit
13
+ from .resources import documents, models, schemas, projects, extractions, edit, workflows, jobs
14
14
  from .types.standards import PreparedRequest, FieldUnset
15
15
 
16
16
 
@@ -189,7 +189,8 @@ class Retab(BaseRetab):
189
189
  self.models = models.Models(client=self)
190
190
  self.schemas = schemas.Schemas(client=self)
191
191
  self.edit = edit.Edit(client=self)
192
-
192
+ self.workflows = workflows.Workflows(client=self)
193
+ self.jobs = jobs.Jobs(client=self)
193
194
  def _request(
194
195
  self,
195
196
  method: str,
@@ -487,7 +488,9 @@ class AsyncRetab(BaseRetab):
487
488
  self.models = models.AsyncModels(client=self)
488
489
  self.schemas = schemas.AsyncSchemas(client=self)
489
490
  self.edit = edit.AsyncEdit(client=self)
490
-
491
+ self.workflows = workflows.AsyncWorkflows(client=self)
492
+ self.jobs = jobs.AsyncJobs(client=self)
493
+
491
494
  def _parse_response(self, response: httpx.Response) -> Any:
492
495
  """Parse response based on content-type.
493
496
 
@@ -0,0 +1,3 @@
1
+ from .client import AsyncJobs, Jobs
2
+
3
+ __all__ = ["Jobs", "AsyncJobs"]
@@ -0,0 +1,252 @@
1
+ """
2
+ Jobs API Resource
3
+
4
+ Provides synchronous and asynchronous clients for the Jobs API.
5
+ """
6
+
7
+ from typing import Any
8
+
9
+ from ..._resource import AsyncAPIResource, SyncAPIResource
10
+ from ...types.jobs import Job, JobListResponse, JobStatus, SupportedEndpoint
11
+ from ...types.standards import PreparedRequest
12
+
13
+
14
+ class BaseJobsMixin:
15
+ """Shared methods for preparing Jobs API requests."""
16
+
17
+ def _prepare_create(
18
+ self,
19
+ endpoint: SupportedEndpoint,
20
+ request: dict[str, Any],
21
+ metadata: dict[str, str] | None = None,
22
+ ) -> PreparedRequest:
23
+ data = {
24
+ "endpoint": endpoint,
25
+ "request": request,
26
+ }
27
+ if metadata is not None:
28
+ data["metadata"] = metadata
29
+ return PreparedRequest(method="POST", url="/v1/jobs", data=data)
30
+
31
+ def _prepare_retrieve(self, job_id: str) -> PreparedRequest:
32
+ return PreparedRequest(method="GET", url=f"/v1/jobs/{job_id}")
33
+
34
+ def _prepare_cancel(self, job_id: str) -> PreparedRequest:
35
+ return PreparedRequest(method="POST", url=f"/v1/jobs/{job_id}/cancel")
36
+
37
+ def _prepare_list(
38
+ self,
39
+ after: str | None = None,
40
+ limit: int = 20,
41
+ status: JobStatus | None = None,
42
+ ) -> PreparedRequest:
43
+ params: dict[str, Any] = {"limit": limit}
44
+ if after is not None:
45
+ params["after"] = after
46
+ if status is not None:
47
+ params["status"] = status
48
+ return PreparedRequest(method="GET", url="/v1/jobs", params=params)
49
+
50
+
51
+ class Jobs(SyncAPIResource, BaseJobsMixin):
52
+ """
53
+ Synchronous Jobs API client.
54
+
55
+ The Jobs API allows you to submit long-running extract or parse operations
56
+ asynchronously and poll for their results.
57
+
58
+ Example:
59
+ >>> from retab import Retab
60
+ >>> client = Retab(api_key="your-api-key")
61
+ >>>
62
+ >>> # Create an async extraction job
63
+ >>> job = client.jobs.create(
64
+ ... endpoint="/v1/documents/extract",
65
+ ... request={
66
+ ... "document": {"content": "...", "mime_type": "application/pdf"},
67
+ ... "json_schema": {"type": "object", ...},
68
+ ... "model": "gpt-4o",
69
+ ... }
70
+ ... )
71
+ >>>
72
+ >>> # Poll for completion
73
+ >>> while job.status not in ("completed", "failed", "cancelled"):
74
+ ... import time
75
+ ... time.sleep(5)
76
+ ... job = client.jobs.retrieve(job.id)
77
+ >>>
78
+ >>> if job.status == "completed":
79
+ ... print(job.response.body)
80
+ """
81
+
82
+ def create(
83
+ self,
84
+ endpoint: SupportedEndpoint,
85
+ request: dict[str, Any],
86
+ metadata: dict[str, str] | None = None,
87
+ ) -> Job:
88
+ """
89
+ Create a new asynchronous job.
90
+
91
+ Args:
92
+ endpoint: The API endpoint to call ("/v1/documents/extract" or "/v1/documents/parse")
93
+ request: The full request body for the target endpoint
94
+ metadata: Optional metadata (max 16 pairs; keys ≤64 chars, values ≤512 chars)
95
+
96
+ Returns:
97
+ Job: The created job with status "queued"
98
+ """
99
+ prepared = self._prepare_create(endpoint, request, metadata)
100
+ response = self._client._prepared_request(prepared)
101
+ return Job.model_validate(response)
102
+
103
+ def retrieve(self, job_id: str) -> Job:
104
+ """
105
+ Retrieve a job by ID.
106
+
107
+ Args:
108
+ job_id: The job ID to retrieve
109
+
110
+ Returns:
111
+ Job: The job with current status and result (if completed)
112
+ """
113
+ prepared = self._prepare_retrieve(job_id)
114
+ response = self._client._prepared_request(prepared)
115
+ return Job.model_validate(response)
116
+
117
+ def cancel(self, job_id: str) -> Job:
118
+ """
119
+ Cancel a queued or in-progress job.
120
+
121
+ Args:
122
+ job_id: The job ID to cancel
123
+
124
+ Returns:
125
+ Job: The updated job with status "cancelled"
126
+ """
127
+ prepared = self._prepare_cancel(job_id)
128
+ response = self._client._prepared_request(prepared)
129
+ return Job.model_validate(response)
130
+
131
+ def list(
132
+ self,
133
+ after: str | None = None,
134
+ limit: int = 20,
135
+ status: JobStatus | None = None,
136
+ ) -> JobListResponse:
137
+ """
138
+ List jobs with pagination and optional status filtering.
139
+
140
+ Args:
141
+ after: Pagination cursor (last ID from previous page)
142
+ limit: Number of jobs to return (1-100, default 20)
143
+ status: Filter by job status
144
+
145
+ Returns:
146
+ JobListResponse: List of jobs with pagination info
147
+ """
148
+ prepared = self._prepare_list(after, limit, status)
149
+ response = self._client._prepared_request(prepared)
150
+ return JobListResponse.model_validate(response)
151
+
152
+
153
+ class AsyncJobs(AsyncAPIResource, BaseJobsMixin):
154
+ """
155
+ Asynchronous Jobs API client.
156
+
157
+ The Jobs API allows you to submit long-running extract or parse operations
158
+ asynchronously and poll for their results.
159
+
160
+ Example:
161
+ >>> from retab import AsyncRetab
162
+ >>> client = AsyncRetab(api_key="your-api-key")
163
+ >>>
164
+ >>> # Create an async extraction job
165
+ >>> job = await client.jobs.create(
166
+ ... endpoint="/v1/documents/extract",
167
+ ... request={
168
+ ... "document": {"content": "...", "mime_type": "application/pdf"},
169
+ ... "json_schema": {"type": "object", ...},
170
+ ... "model": "gpt-4o",
171
+ ... }
172
+ ... )
173
+ >>>
174
+ >>> # Poll for completion
175
+ >>> while job.status not in ("completed", "failed", "cancelled"):
176
+ ... import asyncio
177
+ ... await asyncio.sleep(5)
178
+ ... job = await client.jobs.retrieve(job.id)
179
+ >>>
180
+ >>> if job.status == "completed":
181
+ ... print(job.response.body)
182
+ """
183
+
184
+ async def create(
185
+ self,
186
+ endpoint: SupportedEndpoint,
187
+ request: dict[str, Any],
188
+ metadata: dict[str, str] | None = None,
189
+ ) -> Job:
190
+ """
191
+ Create a new asynchronous job.
192
+
193
+ Args:
194
+ endpoint: The API endpoint to call ("/v1/documents/extract" or "/v1/documents/parse")
195
+ request: The full request body for the target endpoint
196
+ metadata: Optional metadata (max 16 pairs; keys ≤64 chars, values ≤512 chars)
197
+
198
+ Returns:
199
+ Job: The created job with status "queued"
200
+ """
201
+ prepared = self._prepare_create(endpoint, request, metadata)
202
+ response = await self._client._prepared_request(prepared)
203
+ return Job.model_validate(response)
204
+
205
+ async def retrieve(self, job_id: str) -> Job:
206
+ """
207
+ Retrieve a job by ID.
208
+
209
+ Args:
210
+ job_id: The job ID to retrieve
211
+
212
+ Returns:
213
+ Job: The job with current status and result (if completed)
214
+ """
215
+ prepared = self._prepare_retrieve(job_id)
216
+ response = await self._client._prepared_request(prepared)
217
+ return Job.model_validate(response)
218
+
219
+ async def cancel(self, job_id: str) -> Job:
220
+ """
221
+ Cancel a queued or in-progress job.
222
+
223
+ Args:
224
+ job_id: The job ID to cancel
225
+
226
+ Returns:
227
+ Job: The updated job with status "cancelled"
228
+ """
229
+ prepared = self._prepare_cancel(job_id)
230
+ response = await self._client._prepared_request(prepared)
231
+ return Job.model_validate(response)
232
+
233
+ async def list(
234
+ self,
235
+ after: str | None = None,
236
+ limit: int = 20,
237
+ status: JobStatus | None = None,
238
+ ) -> JobListResponse:
239
+ """
240
+ List jobs with pagination and optional status filtering.
241
+
242
+ Args:
243
+ after: Pagination cursor (last ID from previous page)
244
+ limit: Number of jobs to return (1-100, default 20)
245
+ status: Filter by job status
246
+
247
+ Returns:
248
+ JobListResponse: List of jobs with pagination info
249
+ """
250
+ prepared = self._prepare_list(after, limit, status)
251
+ response = await self._client._prepared_request(prepared)
252
+ return JobListResponse.model_validate(response)
@@ -1,6 +1,6 @@
1
1
  from io import IOBase
2
2
  from pathlib import Path
3
- from typing import Any, Dict
3
+ from typing import Any, Dict, Optional
4
4
 
5
5
  import PIL.Image
6
6
  from pydantic import HttpUrl
@@ -21,15 +21,19 @@ class WorkflowRunsMixin:
21
21
  def prepare_create(
22
22
  self,
23
23
  workflow_id: str,
24
- documents: Dict[str, DocumentInput],
24
+ documents: Optional[Dict[str, DocumentInput]] = None,
25
+ json_inputs: Optional[Dict[str, Dict[str, Any]]] = None,
26
+ text_inputs: Optional[Dict[str, str]] = None,
25
27
  ) -> PreparedRequest:
26
- """Prepare a request to run a workflow with input documents.
28
+ """Prepare a request to run a workflow with input documents, JSON data, and/or text data.
27
29
 
28
30
  Args:
29
31
  workflow_id: The ID of the workflow to run
30
32
  documents: Mapping of start node IDs to their input documents.
31
33
  Each document can be a file path, bytes, file-like object,
32
34
  MIMEData, PIL Image, or HttpUrl.
35
+ json_inputs: Mapping of start_json node IDs to their input JSON data.
36
+ text_inputs: Mapping of start_text node IDs to their input text.
33
37
 
34
38
  Returns:
35
39
  PreparedRequest: The prepared request
@@ -40,20 +44,37 @@ class WorkflowRunsMixin:
40
44
  ... documents={
41
45
  ... "start-node-1": Path("invoice.pdf"),
42
46
  ... "start-node-2": Path("receipt.pdf"),
47
+ ... },
48
+ ... json_inputs={
49
+ ... "json-node-1": {"key": "value"},
50
+ ... },
51
+ ... text_inputs={
52
+ ... "text-node-1": "Hello, world!",
43
53
  ... }
44
54
  ... )
45
55
  """
56
+ data: Dict[str, Any] = {}
57
+
46
58
  # Convert each document to MIMEData and then to the format expected by the backend
47
- documents_payload: Dict[str, Dict[str, Any]] = {}
48
- for node_id, document in documents.items():
49
- mime_data = prepare_mime_document(document)
50
- documents_payload[node_id] = {
51
- "filename": mime_data.filename,
52
- "content": mime_data.content,
53
- "mime_type": mime_data.mime_type,
54
- }
55
-
56
- data = {"documents": documents_payload}
59
+ if documents:
60
+ documents_payload: Dict[str, Dict[str, Any]] = {}
61
+ for node_id, document in documents.items():
62
+ mime_data = prepare_mime_document(document)
63
+ documents_payload[node_id] = {
64
+ "filename": mime_data.filename,
65
+ "content": mime_data.content,
66
+ "mime_type": mime_data.mime_type,
67
+ }
68
+ data["documents"] = documents_payload
69
+
70
+ # Add JSON inputs directly
71
+ if json_inputs:
72
+ data["json_inputs"] = json_inputs
73
+
74
+ # Add text inputs directly
75
+ if text_inputs:
76
+ data["text_inputs"] = text_inputs
77
+
57
78
  return PreparedRequest(method="POST", url=f"/v1/workflows/{workflow_id}/run", data=data)
58
79
 
59
80
  def prepare_get(self, run_id: str) -> PreparedRequest:
@@ -77,9 +98,11 @@ class WorkflowRuns(SyncAPIResource, WorkflowRunsMixin):
77
98
  def create(
78
99
  self,
79
100
  workflow_id: str,
80
- documents: Dict[str, DocumentInput],
101
+ documents: Optional[Dict[str, DocumentInput]] = None,
102
+ json_inputs: Optional[Dict[str, Dict[str, Any]]] = None,
103
+ text_inputs: Optional[Dict[str, str]] = None,
81
104
  ) -> WorkflowRun:
82
- """Run a workflow with the provided input documents.
105
+ """Run a workflow with the provided inputs.
83
106
 
84
107
  This creates a workflow run and starts execution in the background.
85
108
  The returned WorkflowRun will have status "running" - use get()
@@ -90,25 +113,37 @@ class WorkflowRuns(SyncAPIResource, WorkflowRunsMixin):
90
113
  documents: Mapping of start node IDs to their input documents.
91
114
  Each document can be a file path, bytes, file-like object,
92
115
  MIMEData, PIL Image, or HttpUrl.
116
+ json_inputs: Mapping of start_json node IDs to their input JSON data.
117
+ text_inputs: Mapping of start_text node IDs to their input text.
93
118
 
94
119
  Returns:
95
120
  WorkflowRun: The created workflow run with status "running"
96
121
 
97
122
  Raises:
98
123
  HTTPException: If the request fails (e.g., workflow not found,
99
- missing input documents for start nodes)
124
+ missing inputs for start nodes)
100
125
 
101
126
  Example:
102
127
  >>> run = client.workflows.runs.create(
103
128
  ... workflow_id="wf_abc123",
104
129
  ... documents={
105
130
  ... "start-node-1": Path("invoice.pdf"),
106
- ... "start-node-2": Path("receipt.pdf"),
131
+ ... },
132
+ ... json_inputs={
133
+ ... "json-node-1": {"key": "value"},
134
+ ... },
135
+ ... text_inputs={
136
+ ... "text-node-1": "Hello, world!",
107
137
  ... }
108
138
  ... )
109
139
  >>> print(f"Run started: {run.id}, status: {run.status}")
110
140
  """
111
- request = self.prepare_create(workflow_id=workflow_id, documents=documents)
141
+ request = self.prepare_create(
142
+ workflow_id=workflow_id,
143
+ documents=documents,
144
+ json_inputs=json_inputs,
145
+ text_inputs=text_inputs,
146
+ )
112
147
  response = self._client._prepared_request(request)
113
148
  return WorkflowRun.model_validate(response)
114
149
 
@@ -138,9 +173,11 @@ class AsyncWorkflowRuns(AsyncAPIResource, WorkflowRunsMixin):
138
173
  async def create(
139
174
  self,
140
175
  workflow_id: str,
141
- documents: Dict[str, DocumentInput],
176
+ documents: Optional[Dict[str, DocumentInput]] = None,
177
+ json_inputs: Optional[Dict[str, Dict[str, Any]]] = None,
178
+ text_inputs: Optional[Dict[str, str]] = None,
142
179
  ) -> WorkflowRun:
143
- """Run a workflow with the provided input documents.
180
+ """Run a workflow with the provided inputs.
144
181
 
145
182
  This creates a workflow run and starts execution in the background.
146
183
  The returned WorkflowRun will have status "running" - use get()
@@ -151,25 +188,37 @@ class AsyncWorkflowRuns(AsyncAPIResource, WorkflowRunsMixin):
151
188
  documents: Mapping of start node IDs to their input documents.
152
189
  Each document can be a file path, bytes, file-like object,
153
190
  MIMEData, PIL Image, or HttpUrl.
191
+ json_inputs: Mapping of start_json node IDs to their input JSON data.
192
+ text_inputs: Mapping of start_text node IDs to their input text.
154
193
 
155
194
  Returns:
156
195
  WorkflowRun: The created workflow run with status "running"
157
196
 
158
197
  Raises:
159
198
  HTTPException: If the request fails (e.g., workflow not found,
160
- missing input documents for start nodes)
199
+ missing inputs for start nodes)
161
200
 
162
201
  Example:
163
202
  >>> run = await client.workflows.runs.create(
164
203
  ... workflow_id="wf_abc123",
165
204
  ... documents={
166
205
  ... "start-node-1": Path("invoice.pdf"),
167
- ... "start-node-2": Path("receipt.pdf"),
206
+ ... },
207
+ ... json_inputs={
208
+ ... "json-node-1": {"key": "value"},
209
+ ... },
210
+ ... text_inputs={
211
+ ... "text-node-1": "Hello, world!",
168
212
  ... }
169
213
  ... )
170
214
  >>> print(f"Run started: {run.id}, status: {run.status}")
171
215
  """
172
- request = self.prepare_create(workflow_id=workflow_id, documents=documents)
216
+ request = self.prepare_create(
217
+ workflow_id=workflow_id,
218
+ documents=documents,
219
+ json_inputs=json_inputs,
220
+ text_inputs=text_inputs,
221
+ )
173
222
  response = await self._client._prepared_request(request)
174
223
  return WorkflowRun.model_validate(response)
175
224
 
retab/types/jobs.py ADDED
@@ -0,0 +1,90 @@
1
+ """
2
+ Jobs API Types
3
+
4
+ Pydantic models for the asynchronous Jobs API.
5
+ """
6
+
7
+ from typing import Any, Literal
8
+
9
+ from pydantic import BaseModel, Field
10
+
11
+
12
+ JobStatus = Literal[
13
+ "validating",
14
+ "queued",
15
+ "in_progress",
16
+ "completed",
17
+ "failed",
18
+ "cancelled",
19
+ "expired",
20
+ ]
21
+
22
+ SupportedEndpoint = Literal[
23
+ "/v1/documents/extract",
24
+ "/v1/documents/parse",
25
+ "/v1/documents/split",
26
+ "/v1/documents/classify",
27
+ "/v1/schemas/generate",
28
+ "/v1/edit/agent/fill",
29
+ "/v1/edit/templates/fill",
30
+ "/v1/edit/templates/generate",
31
+ "/v1/projects/extract", # Requires "project_id" in request body
32
+ ]
33
+
34
+
35
+ class JobResponse(BaseModel):
36
+ """Response stored when job completes successfully."""
37
+ status_code: int
38
+ body: dict[str, Any]
39
+
40
+
41
+ class JobError(BaseModel):
42
+ """Error details when job fails."""
43
+ code: str
44
+ message: str
45
+ details: dict[str, Any] | None = None
46
+
47
+
48
+ class Job(BaseModel):
49
+ """
50
+ Job object representing an asynchronous operation.
51
+
52
+ Use this to track the status of long-running operations like extract, parse,
53
+ split, classify, schema generation, and template operations.
54
+ """
55
+ id: str
56
+ object: Literal["job"] = "job"
57
+ status: JobStatus
58
+ endpoint: SupportedEndpoint
59
+ request: dict[str, Any]
60
+ response: JobResponse | None = None
61
+ error: JobError | None = None
62
+
63
+ # Timestamps (Unix timestamps)
64
+ created_at: int
65
+ started_at: int | None = None
66
+ completed_at: int | None = None
67
+ expires_at: int
68
+
69
+ # User context
70
+ organization_id: str
71
+ metadata: dict[str, str] | None = None
72
+
73
+
74
+ class CreateJobRequest(BaseModel):
75
+ """Request body for creating a new job."""
76
+ endpoint: SupportedEndpoint
77
+ request: dict[str, Any]
78
+ metadata: dict[str, str] | None = Field(
79
+ default=None,
80
+ description="Max 16 pairs; keys ≤64 chars, values ≤512 chars"
81
+ )
82
+
83
+
84
+ class JobListResponse(BaseModel):
85
+ """Response for listing jobs."""
86
+ object: Literal["list"] = "list"
87
+ data: list[Job]
88
+ first_id: str | None = None
89
+ last_id: str | None = None
90
+ has_more: bool = False
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: retab
3
- Version: 0.0.88
3
+ Version: 0.0.90
4
4
  Summary: Retab official python library
5
5
  Home-page: https://github.com/retab-dev/retab
6
6
  Author: Retab
@@ -1,6 +1,6 @@
1
1
  retab/__init__.py,sha256=s4GawWTRBYz4VY-CyAV5-ZdFtdw8V5oopGIYm9GgdSo,188
2
2
  retab/_resource.py,sha256=JfAU4UTa05ugWfbrpO7fsVr_pFewht99NkoIfK6kBQM,577
3
- retab/client.py,sha256=VrOzEtZQPR4uydO8QJJYkMOoAiC1TfPbkXmTnatSQ0w,30172
3
+ retab/client.py,sha256=Ds-Sy3ynN9GusN5rDrc2ogX3ATv-Dq1MuiZeDnLOWGk,30408
4
4
  retab/generate_types.py,sha256=cUu1IX65uU__MHivmEb_PZtzAi8DYsvppZvcY30hj90,8425
5
5
  retab/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  retab/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -16,15 +16,18 @@ retab/resources/edit/templates/__init__.py,sha256=n-zA_HXo7iGgeIclSwcsxmSueXJIRM
16
16
  retab/resources/edit/templates/client.py,sha256=kEyqat5I84_QBeWSjptteSwvlMGRZ1UF9KDzH7p0f9s,20173
17
17
  retab/resources/extractions/__init__.py,sha256=2H1ezUG8hI5SmTRy6NFzXdYLOdGFFsFrI60uzkitV20,97
18
18
  retab/resources/extractions/client.py,sha256=sEoNjOgX91FTOgoJUV-I1A9A9xl1ciCdPlhYwjhEjbA,11035
19
+ retab/resources/jobs/__init__.py,sha256=g7WnNAw69CExMSyfxU9ROcSj-KODjxeLe2YlUqi8l0c,69
20
+ retab/resources/jobs/client.py,sha256=Cf7bafUzECqCXbCeKW396Q4fRFOMgjKDtgQ3e_ThIQY,8115
19
21
  retab/resources/projects/__init__.py,sha256=tPR3_3tr7bsoYd618qmGjnYN2R23PmF5oCFd7Z5_HGY,85
20
22
  retab/resources/projects/client.py,sha256=5LPAhJt5-nqBP4VWYvo0k7cW6HLGF6K9xMiHKQzIXho,15593
21
23
  retab/resources/workflows/__init__.py,sha256=-I0QNX7XKEr8ZJTV4-awMyKxZqGlSkKMdibiHiB7cZ0,89
22
24
  retab/resources/workflows/client.py,sha256=G1dYV66Wsas_QWQ9O2N7s1VUt72TP1W1ZG-_cEWEURM,755
23
25
  retab/resources/workflows/runs/__init__.py,sha256=5hPZ-70StN0U8bOlhm9H_ZXFljBjy8VoWQRu1_cGAVM,101
24
- retab/resources/workflows/runs/client.py,sha256=8l87Sf5RNNLIJNyhCwCprqA9ffq3J9zSlwoQHdyrEN4,6771
26
+ retab/resources/workflows/runs/client.py,sha256=GopedV363XnGl0mL3bZHWaOay12uAeTqq4iIEJSadMA,8739
25
27
  retab/types/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
28
  retab/types/chat.py,sha256=x9VbtPMa4w6Gc0HrFC3ILl6cCnfEn5ytDnwJtZmlcys,1436
27
29
  retab/types/inference_settings.py,sha256=wIivYffvEE7v6lhbjbhAZGssK4uYr64Oq6cZKxzY5_M,1131
30
+ retab/types/jobs.py,sha256=iVCl2EmlvvgvdO217gOGqvkuX-38le89C9XR7gnEb3E,2086
28
31
  retab/types/mime.py,sha256=ZLNCD3pvgn5cbGfJwzrdkjgB9dMHCbN67YEV9bx47zE,10063
29
32
  retab/types/modality.py,sha256=4B8LctdUBZVgIjtS2FjrJpljn2Eyse0XE1bpFsGb9O4,131
30
33
  retab/types/pagination.py,sha256=A0Fw06baPTfEaYwo3kvNs4vaupzlqylBc6tQH-2DFuY,279
@@ -59,7 +62,7 @@ retab/utils/hashing.py,sha256=_BMVUvftOcJav68QL0rLkH2dbhW9RRJPzeGC2akR0fc,757
59
62
  retab/utils/json_schema.py,sha256=zP4pQLpVHBKWo_abCjb_dU4kA0azhHopd-1TFUgVEvc,20655
60
63
  retab/utils/mime.py,sha256=mTP_lqSPttOP5DYJxopiWaeFXrUCPjhwd7y53nCVGO4,6189
61
64
  retab/utils/stream_context_managers.py,sha256=gI1gVQSj3nWz6Mvjz7Ix5AiY0g6vSL-c2tPfuP04izo,2314
62
- retab-0.0.88.dist-info/METADATA,sha256=6Tzn_jnoHNc0-UkawkeLW5LQL-rGrfU-6A_mHEbROac,4532
63
- retab-0.0.88.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
64
- retab-0.0.88.dist-info/top_level.txt,sha256=waQR0EGdhLIQtztoE3AXg7ik5ONQ9q_bsKVpyFuJdq0,6
65
- retab-0.0.88.dist-info/RECORD,,
65
+ retab-0.0.90.dist-info/METADATA,sha256=ea40JF37wk3xxuQRUv17mvliwsqiOHP8ufyna9ScpyY,4532
66
+ retab-0.0.90.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
67
+ retab-0.0.90.dist-info/top_level.txt,sha256=waQR0EGdhLIQtztoE3AXg7ik5ONQ9q_bsKVpyFuJdq0,6
68
+ retab-0.0.90.dist-info/RECORD,,
File without changes