airia 0.1.32__py3-none-any.whl → 0.1.33__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -264,8 +264,8 @@ class AsyncPipelineExecution(BasePipelineExecution):
264
264
  self,
265
265
  model_parameters: Dict[str, Any],
266
266
  user_input: str,
267
+ prompt_parameters: Dict[str, Any],
267
268
  assistant_name: str = "",
268
- prompt_parameters: Dict[str, Any] = {"prompt": ""},
269
269
  async_output: Literal[False] = False,
270
270
  include_tools_response: bool = False,
271
271
  save_history: bool = True,
@@ -295,8 +295,8 @@ class AsyncPipelineExecution(BasePipelineExecution):
295
295
  self,
296
296
  model_parameters: Dict[str, Any],
297
297
  user_input: str,
298
+ prompt_parameters: Dict[str, Any],
298
299
  assistant_name: str = "",
299
- prompt_parameters: Dict[str, Any] = {"prompt": ""},
300
300
  async_output: Literal[True] = True,
301
301
  include_tools_response: bool = False,
302
302
  save_history: bool = True,
@@ -325,8 +325,8 @@ class AsyncPipelineExecution(BasePipelineExecution):
325
325
  self,
326
326
  model_parameters: Dict[str, Any],
327
327
  user_input: str,
328
+ prompt_parameters: Dict[str, Any],
328
329
  assistant_name: str = "",
329
- prompt_parameters: Dict[str, Any] = {"prompt": ""},
330
330
  async_output: bool = False,
331
331
  include_tools_response: bool = False,
332
332
  save_history: bool = True,
@@ -363,8 +363,9 @@ class AsyncPipelineExecution(BasePipelineExecution):
363
363
  model_parameters: Model parameters (required). Must include libraryModelId,
364
364
  projectModelId, modelIdentifierType, and modelIsAvailableinProject
365
365
  user_input: User input text (required)
366
+ prompt_parameters: Parameters for prompt configuration (required). Must include
367
+ a 'prompt' key with the system prompt text
366
368
  assistant_name: Name of the temporary assistant. Default is ""
367
- prompt_parameters: Parameters for prompt configuration. Default is {"prompt": ""}
368
369
  async_output: Whether to stream the response. Default is False
369
370
  include_tools_response: Whether to return initial LLM tool result. Default is False
370
371
  save_history: Whether to save input and output to conversation history. Default is True
@@ -411,6 +412,7 @@ class AsyncPipelineExecution(BasePipelineExecution):
411
412
  "modelIsAvailableinProject": True,
412
413
  },
413
414
  user_input="say double bubble bath ten times fast",
415
+ prompt_parameters={"prompt": "You are a helpful assistant."},
414
416
  )
415
417
  print(response.result)
416
418
  ```
@@ -426,6 +428,7 @@ class AsyncPipelineExecution(BasePipelineExecution):
426
428
  response = await client.pipeline_execution.execute_temporary_assistant(
427
429
  model_parameters={...},
428
430
  user_input="What's the weather?",
431
+ prompt_parameters={"prompt": "You are a weather information assistant."},
429
432
  output_schema=WeatherInfo
430
433
  )
431
434
  ```
@@ -120,8 +120,8 @@ class BasePipelineExecution:
120
120
  self,
121
121
  model_parameters: Dict[str, Any],
122
122
  user_input: str,
123
+ prompt_parameters: Dict[str, Any],
123
124
  assistant_name: str = "",
124
- prompt_parameters: Dict[str, Any] = {},
125
125
  async_output: bool = False,
126
126
  include_tools_response: bool = False,
127
127
  save_history: bool = True,
@@ -153,8 +153,9 @@ class BasePipelineExecution:
153
153
  Args:
154
154
  model_parameters: model parameters
155
155
  user_input: Optional user input text
156
+ prompt_parameters: Parameters for prompt configuration (required). Must include
157
+ a 'prompt' key with the system prompt text
156
158
  assistant_name: Name of the temporary assistant
157
- prompt_parameters: Parameters for prompt configuration (required)
158
159
  async_output: Whether to stream the response. Default is False
159
160
  include_tools_response: Whether to return initial LLM tool result. Default is False
160
161
  save_history: Whether to save input and output to conversation history. Default is True
@@ -263,8 +263,8 @@ class PipelineExecution(BasePipelineExecution):
263
263
  self,
264
264
  model_parameters: Dict[str, Any],
265
265
  user_input: str,
266
+ prompt_parameters: Dict[str, Any],
266
267
  assistant_name: str = "",
267
- prompt_parameters: Dict[str, Any] = {"prompt": ""},
268
268
  async_output: Literal[False] = False,
269
269
  include_tools_response: bool = False,
270
270
  save_history: bool = True,
@@ -294,8 +294,8 @@ class PipelineExecution(BasePipelineExecution):
294
294
  self,
295
295
  model_parameters: Dict[str, Any],
296
296
  user_input: str,
297
+ prompt_parameters: Dict[str, Any],
297
298
  assistant_name: str = "",
298
- prompt_parameters: Dict[str, Any] = {"prompt": ""},
299
299
  async_output: Literal[True] = True,
300
300
  include_tools_response: bool = False,
301
301
  save_history: bool = True,
@@ -324,8 +324,8 @@ class PipelineExecution(BasePipelineExecution):
324
324
  self,
325
325
  model_parameters: Dict[str, Any],
326
326
  user_input: str,
327
+ prompt_parameters: Dict[str, Any],
327
328
  assistant_name: str = "",
328
- prompt_parameters: Dict[str, Any] = {"prompt": ""},
329
329
  async_output: bool = False,
330
330
  include_tools_response: bool = False,
331
331
  save_history: bool = True,
@@ -362,8 +362,9 @@ class PipelineExecution(BasePipelineExecution):
362
362
  model_parameters: Model parameters (required). Must include libraryModelId,
363
363
  projectModelId, modelIdentifierType, and modelIsAvailableinProject
364
364
  user_input: User input text (required)
365
+ prompt_parameters: Parameters for prompt configuration (required). Must include
366
+ a 'prompt' key with the system prompt text
365
367
  assistant_name: Name of the temporary assistant. Default is ""
366
- prompt_parameters: Parameters for prompt configuration. Default is {"prompt": ""}
367
368
  async_output: Whether to stream the response. Default is False
368
369
  include_tools_response: Whether to return initial LLM tool result. Default is False
369
370
  save_history: Whether to save input and output to conversation history. Default is True
@@ -409,6 +410,7 @@ class PipelineExecution(BasePipelineExecution):
409
410
  "modelIsAvailableinProject": True,
410
411
  },
411
412
  user_input="say double bubble bath ten times fast",
413
+ prompt_parameters={"prompt": "You are a helpful assistant."},
412
414
  )
413
415
  print(response.result)
414
416
  ```
@@ -425,6 +427,7 @@ class PipelineExecution(BasePipelineExecution):
425
427
  response = client.pipeline_execution.execute_temporary_assistant(
426
428
  model_parameters={...},
427
429
  user_input="What's the weather like?",
430
+ prompt_parameters={"prompt": "You are a weather information assistant."},
428
431
  output_schema=WeatherInfo
429
432
  )
430
433
  # Response will conform to WeatherInfo schema
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airia
3
- Version: 0.1.32
3
+ Version: 0.1.33
4
4
  Summary: Python SDK for Airia API
5
5
  Author-email: Airia LLC <support@airia.com>
6
6
  License: MIT
@@ -35,9 +35,9 @@ airia/client/models/async_models.py,sha256=gwW3tWwzSZ9_3JmGlwryY48w9KrFXoLET6Jps
35
35
  airia/client/models/base_models.py,sha256=FchsA7P-Fc7fxzlN88jJ3BEVXPWDcIHSzci6wtSArqg,3439
36
36
  airia/client/models/sync_models.py,sha256=FQLM4xCoxcBk1NuLf6z7jVVejlDKx5KpMRBz2tQm71o,5748
37
37
  airia/client/pipeline_execution/__init__.py,sha256=7qEZsPRTLySC71zlwYioBuJs6B4BwRCgFL3TQyFWXmc,175
38
- airia/client/pipeline_execution/async_pipeline_execution.py,sha256=OqkDBEQRkVfXBrFlAb5-z1oTxIg3htyZ8d0ysqEjYjs,23638
39
- airia/client/pipeline_execution/base_pipeline_execution.py,sha256=ltOL0RkfdwfAZzGMZsOziW3b27I2LsLL0ezbLOXOB6A,11088
40
- airia/client/pipeline_execution/sync_pipeline_execution.py,sha256=FcEjKuVhJ-OIS_r0XiYfzAhBMlCkXxTyuu1iMrercSM,23937
38
+ airia/client/pipeline_execution/async_pipeline_execution.py,sha256=EH9EexSYjhSxWAB5kOLQSh5Sfl5Qn5lQGyJrU72wFLw,23824
39
+ airia/client/pipeline_execution/base_pipeline_execution.py,sha256=8x99rm8cPq-i9-UA97-XLNo1qAwdEPkmZ815fRSceFE,11168
40
+ airia/client/pipeline_execution/sync_pipeline_execution.py,sha256=v_Q7Ed9qERr2R_9fvVobsDq1LPFrS44JzLscmYqhq5w,24123
41
41
  airia/client/pipeline_import/__init__.py,sha256=ELSVZbekuhTnGDWFZsqE3-ILWsyHUwj9J_-Z75zGz_0,157
42
42
  airia/client/pipeline_import/async_pipeline_import.py,sha256=BC6HkkMNiU7_7H8vAhXwehV_Q5781xuNLTik6ehTgiU,7251
43
43
  airia/client/pipeline_import/base_pipeline_import.py,sha256=_6AHf_bL3RABDaIQN3-ivL3Z8NR1l0J7A4S0ilJCluY,3844
@@ -95,8 +95,8 @@ airia/types/api/tools/_tools.py,sha256=PSJYFok7yQdE4it55iQmbryFzKN54nT6N161X1Rkp
95
95
  airia/types/sse/__init__.py,sha256=KWnNTfsQnthfrU128pUX6ounvSS7DvjC-Y21FE-OdMk,1863
96
96
  airia/types/sse/sse_messages.py,sha256=asq9KG5plT2XSgQMz-Nqo0WcKlXvE8UT3E-WLhCegPk,30244
97
97
  airia/utils/sse_parser.py,sha256=XCTkuaroYWaVQOgBq8VpbseQYSAVruF69AvKUwZQKTA,4251
98
- airia-0.1.32.dist-info/licenses/LICENSE,sha256=R3ClUMMKPRItIcZ0svzyj2taZZnFYw568YDNzN9KQ1Q,1066
99
- airia-0.1.32.dist-info/METADATA,sha256=_4ujk5aQ_dkSRMKTpsVi2TNSsop6OfVX6txQdIe6nN4,4506
100
- airia-0.1.32.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
101
- airia-0.1.32.dist-info/top_level.txt,sha256=qUQEKfs_hdOYTwjKj1JZbRhS5YeXDNaKQaVTrzabS6w,6
102
- airia-0.1.32.dist-info/RECORD,,
98
+ airia-0.1.33.dist-info/licenses/LICENSE,sha256=R3ClUMMKPRItIcZ0svzyj2taZZnFYw568YDNzN9KQ1Q,1066
99
+ airia-0.1.33.dist-info/METADATA,sha256=Knaqa6nNBWDQCLehYLvsHAt8YT2FDN4G4uj1VFuRJGM,4506
100
+ airia-0.1.33.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
101
+ airia-0.1.33.dist-info/top_level.txt,sha256=qUQEKfs_hdOYTwjKj1JZbRhS5YeXDNaKQaVTrzabS6w,6
102
+ airia-0.1.33.dist-info/RECORD,,
File without changes