airia 0.1.12__py3-none-any.whl → 0.1.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. airia/client/_request_handler/__init__.py +4 -0
  2. airia/client/_request_handler/async_request_handler.py +272 -0
  3. airia/client/_request_handler/base_request_handler.py +108 -0
  4. airia/client/_request_handler/sync_request_handler.py +255 -0
  5. airia/client/async_client.py +25 -584
  6. airia/client/base_client.py +2 -209
  7. airia/client/conversations/__init__.py +4 -0
  8. airia/client/conversations/async_conversations.py +187 -0
  9. airia/client/conversations/base_conversations.py +135 -0
  10. airia/client/conversations/sync_conversations.py +182 -0
  11. airia/client/pipeline_execution/__init__.py +4 -0
  12. airia/client/pipeline_execution/async_pipeline_execution.py +178 -0
  13. airia/client/pipeline_execution/base_pipeline_execution.py +96 -0
  14. airia/client/pipeline_execution/sync_pipeline_execution.py +178 -0
  15. airia/client/pipelines_config/__init__.py +4 -0
  16. airia/client/pipelines_config/async_pipelines_config.py +127 -0
  17. airia/client/pipelines_config/base_pipelines_config.py +76 -0
  18. airia/client/pipelines_config/sync_pipelines_config.py +127 -0
  19. airia/client/project/__init__.py +4 -0
  20. airia/client/project/async_project.py +122 -0
  21. airia/client/project/base_project.py +74 -0
  22. airia/client/project/sync_project.py +120 -0
  23. airia/client/store/__init__.py +4 -0
  24. airia/client/store/async_store.py +377 -0
  25. airia/client/store/base_store.py +243 -0
  26. airia/client/store/sync_store.py +352 -0
  27. airia/client/sync_client.py +25 -563
  28. airia/constants.py +13 -2
  29. airia/exceptions.py +8 -8
  30. airia/logs.py +10 -32
  31. airia/types/__init__.py +0 -0
  32. airia/types/_request_data.py +29 -2
  33. airia/types/api/__init__.py +0 -19
  34. airia/types/api/conversations/__init__.py +3 -0
  35. airia/types/api/conversations/_conversations.py +115 -0
  36. airia/types/api/pipeline_execution/__init__.py +13 -0
  37. airia/types/api/pipeline_execution/_pipeline_execution.py +76 -0
  38. airia/types/api/pipelines_config/__init__.py +3 -0
  39. airia/types/api/pipelines_config/get_pipeline_config.py +401 -0
  40. airia/types/api/project/__init__.py +3 -0
  41. airia/types/api/project/get_projects.py +91 -0
  42. airia/types/api/store/__init__.py +4 -0
  43. airia/types/api/store/get_file.py +145 -0
  44. airia/types/api/store/get_files.py +21 -0
  45. airia/types/sse/__init__.py +8 -0
  46. airia/types/sse/sse_messages.py +209 -0
  47. airia/utils/sse_parser.py +40 -7
  48. airia-0.1.14.dist-info/METADATA +221 -0
  49. airia-0.1.14.dist-info/RECORD +55 -0
  50. airia/types/api/conversations.py +0 -14
  51. airia/types/api/get_pipeline_config.py +0 -183
  52. airia/types/api/get_projects.py +0 -35
  53. airia/types/api/pipeline_execution.py +0 -29
  54. airia-0.1.12.dist-info/METADATA +0 -705
  55. airia-0.1.12.dist-info/RECORD +0 -23
  56. {airia-0.1.12.dist-info → airia-0.1.14.dist-info}/WHEEL +0 -0
  57. {airia-0.1.12.dist-info → airia-0.1.14.dist-info}/licenses/LICENSE +0 -0
  58. {airia-0.1.12.dist-info → airia-0.1.14.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,401 @@
1
+ """
2
+ Pydantic models for pipeline configuration API responses.
3
+
4
+ This module defines comprehensive data structures for pipeline configuration exports,
5
+ including all components like agents, models, tools, data sources, and deployment settings.
6
+ """
7
+
8
+ from typing import Any, Dict, List, Optional
9
+
10
+ from pydantic import BaseModel, Field
11
+
12
+
13
+ class Metadata(BaseModel):
14
+ """Pipeline metadata and export configuration.
15
+
16
+ Contains version information, export settings, and descriptive metadata
17
+ about the pipeline configuration.
18
+
19
+ Attributes:
20
+ id: Unique identifier for the pipeline metadata
21
+ export_version: Version of the export format
22
+ tagline: Optional tagline describing the pipeline
23
+ agent_description: Optional description of the agent
24
+ industry: Optional industry classification
25
+ tasks: Optional description of tasks the pipeline performs
26
+ credential_export_option: Export option for credentials
27
+ data_source_export_option: Export option for data sources
28
+ version_information: Information about the pipeline version
29
+ state: Current state of the pipeline
30
+ """
31
+
32
+ id: str
33
+ export_version: str = Field(alias="exportVersion")
34
+ tagline: Optional[str] = None
35
+ agent_description: Optional[str] = Field(alias="agentDescription", default=None)
36
+ industry: Optional[str] = None
37
+ tasks: Optional[str] = None
38
+ credential_export_option: str = Field(alias="credentialExportOption")
39
+ data_source_export_option: str = Field(alias="dataSourceExportOption")
40
+ version_information: str = Field(alias="versionInformation")
41
+ state: str
42
+
43
+
44
+ class Agent(BaseModel):
45
+ """AI agent configuration and workflow definition.
46
+
47
+ Represents the core agent that executes the pipeline, including its
48
+ identity, industry specialization, and step-by-step workflow configuration.
49
+
50
+ Attributes:
51
+ name: Display name of the agent
52
+ execution_name: Name used during execution
53
+ agent_description: Optional description of the agent's capabilities
54
+ video_link: Optional link to demonstration video
55
+ industry: Optional industry the agent specializes in
56
+ sub_industries: List of sub-industry specializations
57
+ agent_details: Dictionary containing additional agent configuration
58
+ id: Unique identifier for the agent
59
+ agent_icon: Optional icon identifier or URL
60
+ steps: List of workflow steps the agent executes
61
+ """
62
+
63
+ name: str
64
+ execution_name: str = Field(alias="executionName")
65
+ agent_description: Optional[str] = Field(alias="agentDescription", default=None)
66
+ video_link: Optional[str] = Field(alias="videoLink", default=None)
67
+ industry: Optional[str] = None
68
+ sub_industries: List[str] = Field(alias="subIndustries", default_factory=list)
69
+ agent_details: Dict[str, Any] = Field(alias="agentDetails", default_factory=dict)
70
+ id: str
71
+ agent_icon: Optional[str] = Field(alias="agentIcon", default=None)
72
+ steps: List[Dict[str, Any]]
73
+
74
+
75
+ class PromptMessage(BaseModel):
76
+ """Individual message within a prompt template.
77
+
78
+ Attributes:
79
+ text: The message content
80
+ order: Order of the message in the prompt sequence
81
+ """
82
+
83
+ text: str
84
+ order: int
85
+
86
+
87
+ class Prompt(BaseModel):
88
+ """Prompt template configuration.
89
+
90
+ Attributes:
91
+ name: Name of the prompt template
92
+ version_change_description: Description of changes in this version
93
+ prompt_message_list: List of messages in the prompt
94
+ id: Unique identifier for the prompt
95
+ """
96
+
97
+ name: str
98
+ version_change_description: str = Field(alias="versionChangeDescription")
99
+ prompt_message_list: List[PromptMessage] = Field(alias="promptMessageList")
100
+ id: str
101
+
102
+
103
+ class CredentialData(BaseModel):
104
+ """Individual credential key-value pair.
105
+
106
+ Attributes:
107
+ key: The credential key name
108
+ value: The credential value
109
+ """
110
+
111
+ key: str
112
+ value: str
113
+
114
+
115
+ class CredentialsDefinition(BaseModel):
116
+ """Credentials configuration and authentication settings.
117
+
118
+ Attributes:
119
+ name: Name of the credentials definition
120
+ credential_type: Type of credentials (API key, OAuth, etc.)
121
+ source_type: Source where credentials are stored
122
+ credential_data_list: List of credential key-value pairs
123
+ id: Unique identifier for the credentials definition
124
+ """
125
+
126
+ name: str
127
+ credential_type: str = Field(alias="credentialType")
128
+ source_type: str = Field(alias="sourceType")
129
+ credential_data_list: List[CredentialData] = Field(alias="credentialDataList")
130
+ id: str
131
+
132
+
133
+ class HeaderDefinition(BaseModel):
134
+ """HTTP header definition for API requests.
135
+
136
+ Attributes:
137
+ key: Header name
138
+ value: Header value
139
+ """
140
+
141
+ key: str
142
+ value: str
143
+
144
+
145
+ class ParameterDefinition(BaseModel):
146
+ """Parameter definition for tool configuration.
147
+
148
+ Attributes:
149
+ name: Name of the parameter
150
+ parameter_type: Type of the parameter (string, integer, etc.)
151
+ parameter_description: Description of the parameter's purpose
152
+ default: Default value for the parameter
153
+ valid_options: List of valid options for the parameter
154
+ id: Unique identifier for the parameter definition
155
+ """
156
+
157
+ name: str
158
+ parameter_type: str = Field(alias="parameterType")
159
+ parameter_description: str = Field(alias="parameterDescription")
160
+ default: str
161
+ valid_options: List[str] = Field(alias="validOptions", default_factory=list)
162
+ id: str
163
+
164
+
165
+ class Tool(BaseModel):
166
+ """Tool configuration for external API integrations.
167
+
168
+ Attributes:
169
+ tool_type: Type of tool (API, function, etc.)
170
+ name: Display name of the tool
171
+ standardized_name: Standardized name for the tool
172
+ tool_description: Description of the tool's functionality
173
+ purpose: Purpose or use case for the tool
174
+ api_endpoint: API endpoint URL
175
+ credentials_definition: Optional credentials required for the tool
176
+ headers_definition: List of HTTP headers for API requests
177
+ body: Request body template
178
+ parameters_definition: List of parameter definitions
179
+ method_type: HTTP method type (GET, POST, etc.)
180
+ route_through_acc: Whether to route through ACC
181
+ use_user_credentials: Whether to use user credentials
182
+ use_user_credentials_type: Type of user credentials to use
183
+ id: Unique identifier for the tool
184
+ """
185
+
186
+ tool_type: str = Field(alias="toolType")
187
+ name: str
188
+ standardized_name: str = Field(alias="standardizedName")
189
+ tool_description: str = Field(alias="toolDescription")
190
+ purpose: str
191
+ api_endpoint: str = Field(alias="apiEndpoint")
192
+ credentials_definition: Optional[CredentialsDefinition] = Field(
193
+ alias="credentialsDefinition"
194
+ )
195
+ headers_definition: List[HeaderDefinition] = Field(alias="headersDefinition")
196
+ body: str
197
+ parameters_definition: List[ParameterDefinition] = Field(
198
+ alias="parametersDefinition"
199
+ )
200
+ method_type: str = Field(alias="methodType")
201
+ route_through_acc: bool = Field(alias="routeThroughACC")
202
+ use_user_credentials: bool = Field(alias="useUserCredentials")
203
+ use_user_credentials_type: str = Field(alias="useUserCredentialsType")
204
+ id: str
205
+
206
+
207
+ class Model(BaseModel):
208
+ """Language model configuration and deployment settings.
209
+
210
+ Defines an AI model used in the pipeline, including its deployment details,
211
+ pricing configuration, authentication settings, and capabilities.
212
+
213
+ Attributes:
214
+ id: Unique identifier for the model
215
+ display_name: Display name of the model
216
+ model_name: Technical name of the model
217
+ prompt_id: Optional ID of associated prompt template
218
+ system_prompt_definition: Optional system prompt configuration
219
+ url: Model endpoint URL
220
+ input_type: Type of input the model accepts
221
+ provider: Model provider (OpenAI, Anthropic, etc.)
222
+ credentials_definition: Optional credentials for model access
223
+ deployment_type: Type of deployment (cloud, on-premise, etc.)
224
+ source_type: Source type of the model
225
+ connection_string: Optional connection string for deployment
226
+ container_name: Optional container name for deployment
227
+ deployed_key: Optional key for deployed model
228
+ deployed_url: Optional URL for deployed model
229
+ state: Optional current state of the model
230
+ uploaded_container_id: Optional ID of uploaded container
231
+ library_model_id: Optional ID from model library
232
+ input_token_price: Price per input token
233
+ output_token_price: Price per output token
234
+ token_units: Number of token units
235
+ has_tool_support: Whether the model supports tool calling
236
+ allow_airia_credentials: Whether Airia credentials are allowed
237
+ allow_byok_credentials: Whether bring-your-own-key credentials are allowed
238
+ author: Optional author of the model
239
+ price_type: Type of pricing model
240
+ """
241
+
242
+ id: str
243
+ display_name: str = Field(alias="displayName")
244
+ model_name: str = Field(alias="modelName")
245
+ prompt_id: Optional[str] = Field(alias="promptId", default=None)
246
+ system_prompt_definition: Optional[Any] = Field(
247
+ alias="systemPromptDefinition", default=None
248
+ )
249
+ url: str
250
+ input_type: str = Field(alias="inputType")
251
+ provider: str
252
+ credentials_definition: Optional[CredentialsDefinition] = Field(
253
+ alias="credentialsDefinition"
254
+ )
255
+ deployment_type: str = Field(alias="deploymentType")
256
+ source_type: str = Field(alias="sourceType")
257
+ connection_string: Optional[str] = Field(alias="connectionString", default=None)
258
+ container_name: Optional[str] = Field(alias="containerName", default=None)
259
+ deployed_key: Optional[str] = Field(alias="deployedKey", default=None)
260
+ deployed_url: Optional[str] = Field(alias="deployedUrl", default=None)
261
+ state: Optional[str] = None
262
+ uploaded_container_id: Optional[str] = Field(
263
+ alias="uploadedContainerId", default=None
264
+ )
265
+ library_model_id: Optional[str] = Field(alias="libraryModelId")
266
+ input_token_price: str = Field(alias="inputTokenPrice")
267
+ output_token_price: str = Field(alias="outputTokenPrice")
268
+ token_units: int = Field(alias="tokenUnits")
269
+ has_tool_support: bool = Field(alias="hasToolSupport")
270
+ allow_airia_credentials: bool = Field(alias="allowAiriaCredentials")
271
+ allow_byok_credentials: bool = Field(alias="allowBYOKCredentials")
272
+ author: Optional[str]
273
+ price_type: str = Field(alias="priceType")
274
+
275
+
276
+ class PythonCodeBlock(BaseModel):
277
+ """Python code block for custom functionality.
278
+
279
+ Attributes:
280
+ id: Unique identifier for the code block
281
+ code: Python code content
282
+ """
283
+
284
+ id: str
285
+ code: str
286
+
287
+
288
+ class Router(BaseModel):
289
+ """Router configuration for model selection and routing.
290
+
291
+ Attributes:
292
+ id: Unique identifier for the router
293
+ model_id: ID of the associated model
294
+ model: Optional model object
295
+ router_config: Dictionary containing router configuration
296
+ """
297
+
298
+ id: str
299
+ model_id: str = Field(alias="modelId")
300
+ model: Optional[Any] = None
301
+ router_config: Dict[str, Dict[str, Any]] = Field(alias="routerConfig")
302
+
303
+
304
+ class ChunkingConfig(BaseModel):
305
+ """Configuration for text chunking in data processing.
306
+
307
+ Attributes:
308
+ id: Unique identifier for the chunking configuration
309
+ chunk_size: Size of each text chunk
310
+ chunk_overlap: Number of characters to overlap between chunks
311
+ strategy_type: Type of chunking strategy to use
312
+ """
313
+
314
+ id: str
315
+ chunk_size: int = Field(alias="chunkSize")
316
+ chunk_overlap: int = Field(alias="chunkOverlap")
317
+ strategy_type: str = Field(alias="strategyType")
318
+
319
+
320
+ class DataSourceFile(BaseModel):
321
+ """File reference within a data source.
322
+
323
+ Attributes:
324
+ data_source_id: ID of the data source containing this file
325
+ file_path: Optional path to the file
326
+ input_token: Optional input token for file access
327
+ file_count: Optional count of files
328
+ """
329
+
330
+ data_source_id: str = Field(alias="dataSourceId")
331
+ file_path: Optional[str] = Field(None, alias="filePath")
332
+ input_token: Optional[str] = Field(None, alias="inputToken")
333
+ file_count: Optional[int] = Field(None, alias="fileCount")
334
+
335
+
336
+ class DataSource(BaseModel):
337
+ """Data source configuration for pipeline data input.
338
+
339
+ Attributes:
340
+ id: Unique identifier for the data source
341
+ name: Optional name of the data source
342
+ execution_name: Optional name used during execution
343
+ chunking_config: Configuration for text chunking
344
+ data_source_type: Type of data source (file, database, etc.)
345
+ database_type: Type of database if applicable
346
+ embedding_provider: Provider for text embeddings
347
+ is_user_specific: Whether the data source is user-specific
348
+ files: Optional list of files in the data source
349
+ configuration_json: Optional JSON configuration string
350
+ credentials: Optional credentials for data source access
351
+ is_image_processing_enabled: Whether image processing is enabled
352
+ """
353
+
354
+ id: str = Field(alias="id")
355
+ name: Optional[str] = None
356
+ execution_name: Optional[str] = Field(None, alias="executionName")
357
+ chunking_config: ChunkingConfig = Field(alias="chunkingConfig")
358
+ data_source_type: str = Field(alias="dataSourceType")
359
+ database_type: str = Field(alias="databaseType")
360
+ embedding_provider: str = Field(alias="embeddingProvider")
361
+ is_user_specific: bool = Field(alias="isUserSpecific")
362
+ files: Optional[List[DataSourceFile]] = None
363
+ configuration_json: Optional[str] = Field(None, alias="configurationJson")
364
+ credentials: Optional[CredentialsDefinition]
365
+ is_image_processing_enabled: bool = Field(alias="isImageProcessingEnabled")
366
+
367
+
368
+ class GetPipelineConfigResponse(BaseModel):
369
+ """Complete pipeline configuration export response.
370
+
371
+ This is the root response model containing all components of a pipeline
372
+ configuration, including the agent definition, associated resources,
373
+ and deployment settings.
374
+
375
+ Attributes:
376
+ metadata: Pipeline metadata and export configuration
377
+ agent: AI agent configuration and workflow definition
378
+ data_sources: Optional list of data sources for the pipeline
379
+ prompts: Optional list of prompt templates
380
+ tools: Optional list of external tools and integrations
381
+ models: Optional list of AI models used in the pipeline
382
+ memories: Optional memory/context storage configurations
383
+ python_code_blocks: Optional list of custom Python code blocks
384
+ routers: Optional list of model routing configurations
385
+ deployment: Optional deployment configuration
386
+ """
387
+
388
+ metadata: Metadata
389
+ agent: Agent
390
+ data_sources: Optional[List[DataSource]] = Field(
391
+ alias="dataSources", default_factory=list
392
+ )
393
+ prompts: Optional[List[Prompt]] = Field(default_factory=list)
394
+ tools: Optional[List[Tool]] = Field(default_factory=list)
395
+ models: Optional[List[Model]] = Field(default_factory=list)
396
+ memories: Optional[Any] = None
397
+ python_code_blocks: Optional[List[PythonCodeBlock]] = Field(
398
+ alias="pythonCodeBlocks", default_factory=list
399
+ )
400
+ routers: Optional[List[Router]] = Field(default_factory=list)
401
+ deployment: Optional[Any] = None
@@ -0,0 +1,3 @@
1
+ from .get_projects import ProjectItem
2
+
3
+ __all__ = ["ProjectItem"]
@@ -0,0 +1,91 @@
1
+ """
2
+ Pydantic models for project management API responses.
3
+
4
+ This module defines the data structures returned by project-related endpoints,
5
+ including project listings and associated pipeline information.
6
+ """
7
+
8
+ from datetime import datetime
9
+ from typing import Any, List, Optional
10
+
11
+ from pydantic import BaseModel, Field
12
+
13
+
14
+ class Pipeline(BaseModel):
15
+ """
16
+ Basic pipeline information associated with a project.
17
+
18
+ Represents a simplified view of pipeline data within project contexts,
19
+ containing only essential identification information.
20
+ """
21
+
22
+ id: str
23
+ name: str
24
+
25
+
26
+ class DataSource(BaseModel):
27
+ """
28
+ Basic data source information associated with a project.
29
+ """
30
+
31
+ id: Optional[str] = None
32
+ name: Optional[str] = None
33
+
34
+
35
+ class ProjectItem(BaseModel):
36
+ """
37
+ Comprehensive project information and metadata.
38
+
39
+ This model represents a complete project entity with all associated resources,
40
+ budget information, security settings, and organizational details. Projects
41
+ serve as containers for pipelines, models, data sources, and other AI resources.
42
+
43
+ Attributes:
44
+ tenant_id: Unique identifier for the tenant/organization
45
+ created_at: Timestamp when the project was created
46
+ require_classification: Whether data classification is required
47
+ budget_amount: Optional budget limit for the project
48
+ budget_period: Time period for budget calculations
49
+ budget_alert: Budget alert threshold configuration
50
+ budget_stop: Whether to stop operations when budget is exceeded
51
+ used_budget_amount: Amount of budget currently consumed
52
+ resume_ends_at: When the project resumption period ends
53
+ updated_at: Timestamp of last project modification
54
+ pipelines: List of pipelines associated with this project
55
+ models: AI models available in this project
56
+ data_sources: Data sources configured for this project
57
+ prompts: Prompt templates available in this project
58
+ api_keys: API key configurations for external services
59
+ memories: Memory/context storage configurations
60
+ project_icon: Base64 encoded project icon image
61
+ project_icon_id: Unique identifier for the project icon
62
+ description: Human-readable project description
63
+ project_type: Classification of project type
64
+ classifications: Data classification settings
65
+ id: Unique project identifier
66
+ name: Human-readable project name
67
+ """
68
+
69
+ tenant_id: str = Field(alias="tenantId")
70
+ created_at: datetime = Field(alias="createdAt")
71
+ require_classification: bool = Field(alias="requireClassification")
72
+ budget_amount: Optional[Any] = Field(None, alias="budgetAmount")
73
+ budget_period: Optional[Any] = Field(None, alias="budgetPeriod")
74
+ budget_alert: Optional[Any] = Field(None, alias="budgetAlert")
75
+ budget_stop: bool = Field(alias="budgetStop")
76
+ used_budget_amount: Optional[Any] = Field(None, alias="usedBudgetAmount")
77
+ resume_ends_at: Optional[datetime] = Field(None, alias="resumeEndsAt")
78
+ updated_at: datetime = Field(alias="updatedAt")
79
+ pipelines: List[Pipeline]
80
+ models: Optional[Any] = None
81
+ data_sources: List[DataSource] = Field(alias="dataSources")
82
+ prompts: Optional[Any] = None
83
+ api_keys: Optional[Any] = Field(alias="apiKeys")
84
+ memories: Optional[Any] = None
85
+ project_icon: Optional[str] = Field(None, alias="projectIcon")
86
+ project_icon_id: Optional[str] = Field(None, alias="projectIconId")
87
+ description: Optional[str] = None
88
+ project_type: str = Field(alias="projectType")
89
+ classifications: Optional[Any] = None
90
+ id: str
91
+ name: str
@@ -0,0 +1,4 @@
1
+ from .get_file import File, GetFileResponse
2
+ from .get_files import GetFilesResponse
3
+
4
+ __all__ = ["GetFileResponse", "GetFilesResponse", "File"]
@@ -0,0 +1,145 @@
1
+ from typing import List, Optional
2
+
3
+ from pydantic import BaseModel, Field
4
+
5
+
6
+ class Timestamp(BaseModel):
7
+ """Represents a timestamp with seconds and nanoseconds precision.
8
+
9
+ Attributes:
10
+ seconds: The number of seconds since the epoch
11
+ nanos: The number of nanoseconds within the second
12
+ """
13
+
14
+ seconds: int
15
+ nanos: int
16
+
17
+
18
+ class IngestionProcessingStatus(BaseModel):
19
+ """Represents the processing status of a file during ingestion.
20
+
21
+ Attributes:
22
+ table_document_processing_mode: The processing mode for table documents
23
+ user_errors: Optional list of user-related errors during processing
24
+ system_errors: Optional list of system-related errors during processing
25
+ status: The current processing status
26
+ """
27
+
28
+ table_document_processing_mode: str = Field(alias="tableDocumentProcessingMode")
29
+ user_errors: Optional[List[str]] = Field(None, alias="userErrors")
30
+ system_errors: Optional[List[str]] = Field(None, alias="systemErrors")
31
+ status: str
32
+
33
+
34
+ class File(BaseModel):
35
+ """Represents a file in the Airia system with metadata and processing information.
36
+
37
+ Attributes:
38
+ store_connector_id: Optional ID of the store connector
39
+ parent_id: Optional ID of the parent file/folder
40
+ has_parent_id: Whether the file has a parent ID
41
+ id: Optional unique identifier for the file
42
+ name: Optional name of the file
43
+ size: Size of the file in bytes
44
+ mime_type: Optional MIME type of the file
45
+ path: Optional file path
46
+ file_last_updated_at: Optional timestamp of last file update
47
+ additional_metadata_json: Optional JSON string containing additional metadata
48
+ has_additional_metadata_json: Whether the file has additional metadata
49
+ file_hash: Optional hash of the file content
50
+ status: Current status of the file
51
+ user_errors: Optional list of user-related errors
52
+ system_errors: Optional list of system-related errors
53
+ folder_id: Optional ID of the containing folder
54
+ has_folder_id: Whether the file has a folder ID
55
+ external_id: Optional external identifier
56
+ has_external_id: Whether the file has an external ID
57
+ ingestion_duration: Duration of ingestion process in milliseconds
58
+ has_ingestion_duration: Whether ingestion duration is available
59
+ tokens_consumed: Number of tokens consumed during processing
60
+ has_tokens_consumed: Whether token consumption data is available
61
+ processing_message: Optional message about processing status
62
+ has_processing_message: Whether a processing message is available
63
+ processed_at: Optional timestamp when processing completed
64
+ ingestion_processing_statuses: Optional list of processing statuses
65
+ """
66
+
67
+ store_connector_id: Optional[str] = Field(None, alias="storeConnectorId")
68
+ parent_id: Optional[str] = Field(None, alias="parentId")
69
+ has_parent_id: bool = Field(alias="hasParentId")
70
+ id: Optional[str] = None
71
+ name: Optional[str] = None
72
+ size: int
73
+ mime_type: Optional[str] = Field(None, alias="mimeType")
74
+ path: Optional[str] = None
75
+ file_last_updated_at: Optional[Timestamp] = Field(None, alias="fileLastUpdatedAt")
76
+ additional_metadata_json: Optional[str] = Field(
77
+ None, alias="additionalMetadataJson"
78
+ )
79
+ has_additional_metadata_json: bool = Field(alias="hasAdditionalMetadataJson")
80
+ file_hash: Optional[str] = Field(None, alias="fileHash")
81
+ status: str
82
+ user_errors: Optional[List[str]] = Field(None, alias="userErrors")
83
+ system_errors: Optional[List[str]] = Field(None, alias="systemErrors")
84
+ folder_id: Optional[str] = Field(None, alias="folderId")
85
+ has_folder_id: bool = Field(alias="hasFolderId")
86
+ external_id: Optional[str] = Field(None, alias="externalId")
87
+ has_external_id: bool = Field(alias="hasExternalId")
88
+ ingestion_duration: int = Field(alias="ingestionDuration")
89
+ has_ingestion_duration: bool = Field(alias="hasIngestionDuration")
90
+ tokens_consumed: int = Field(alias="tokensConsumed")
91
+ has_tokens_consumed: bool = Field(alias="hasTokensConsumed")
92
+ processing_message: Optional[str] = Field(None, alias="processingMessage")
93
+ has_processing_message: bool = Field(alias="hasProcessingMessage")
94
+ processed_at: Optional[Timestamp] = Field(None, alias="processedAt")
95
+ ingestion_processing_statuses: Optional[List[IngestionProcessingStatus]] = Field(
96
+ None, alias="ingestionProcessingStatuses"
97
+ )
98
+
99
+
100
+ class DownloadInfo(BaseModel):
101
+ """Contains information needed to download a file.
102
+
103
+ Attributes:
104
+ file_id: Optional ID of the file to download
105
+ url: Optional download URL for the file
106
+ """
107
+
108
+ file_id: Optional[str] = Field(None, alias="fileId")
109
+ url: Optional[str] = None
110
+
111
+
112
+ class PreviewInfo(BaseModel):
113
+ """Contains information for previewing a file.
114
+
115
+ Attributes:
116
+ preview_url: Optional URL for file preview
117
+ last_modified_date_time_drive: Optional last modified datetime from drive
118
+ description: Optional description of the file
119
+ connector_type_id: Optional ID of the connector type
120
+ connector_type_name: Optional name of the connector type
121
+ """
122
+
123
+ preview_url: Optional[str] = Field(None, alias="previewUrl")
124
+ last_modified_date_time_drive: Optional[str] = Field(
125
+ None, alias="lastModifiedDateTimeDrive"
126
+ )
127
+ description: Optional[str] = None
128
+ connector_type_id: Optional[str] = Field(None, alias="connectorTypeId")
129
+ connector_type_name: Optional[str] = Field(None, alias="connectorTypeName")
130
+
131
+
132
+ class GetFileResponse(BaseModel):
133
+ """Response model for getting a single file.
134
+
135
+ Contains file metadata, download information, and preview information.
136
+
137
+ Attributes:
138
+ file: Optional file object with metadata and processing information
139
+ download_info: Optional download information for the file
140
+ preview_info: Optional preview information for the file
141
+ """
142
+
143
+ file: Optional[File] = None
144
+ download_info: Optional[DownloadInfo] = Field(None, alias="downloadInfo")
145
+ preview_info: Optional[PreviewInfo] = Field(None, alias="previewInfo")
@@ -0,0 +1,21 @@
1
+ from typing import List, Optional
2
+
3
+ from pydantic import BaseModel, Field
4
+
5
+ from .get_file import DownloadInfo, File
6
+
7
+
8
+ class GetFilesResponse(BaseModel):
9
+ """Response model for getting multiple files.
10
+
11
+ Contains a list of files, their download information, and total count.
12
+
13
+ Attributes:
14
+ files: Optional list of file objects with metadata and processing information
15
+ download_infos: Optional list of download information for the files
16
+ totalCount: Total number of files available (may be greater than files returned)
17
+ """
18
+
19
+ files: Optional[List[File]] = None
20
+ download_infos: Optional[List[DownloadInfo]] = Field(None, alias="downloadInfos")
21
+ totalCount: int
@@ -1,3 +1,11 @@
1
+ """
2
+ Server-Sent Event (SSE) message types for the Airia SDK.
3
+
4
+ This package contains all the message types that can be received via SSE streams
5
+ during pipeline execution, including agent lifecycle events, processing steps,
6
+ model streaming fragments, and tool execution updates.
7
+ """
8
+
1
9
  from .sse_messages import (
2
10
  SSEDict,
3
11
  SSEMessage,