seekrai 0.5.17__py3-none-any.whl → 0.5.25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
seekrai/client.py CHANGED
@@ -91,6 +91,7 @@ class SeekrFlow:
91
91
  self.deployments = resources.Deployments(self.client)
92
92
  self.vector_database = resources.VectorDatabase(self.client)
93
93
  self.agents = resources.Agents(self.client)
94
+ self.tools = resources.Tools(self.client)
94
95
  self.observability = resources.AgentObservability(self.client)
95
96
  self.explainability = resources.Explainability(self.client)
96
97
 
@@ -176,6 +177,7 @@ class AsyncSeekrFlow:
176
177
  self.deployments = resources.AsyncDeployments(self.client)
177
178
  self.vector_database = resources.AsyncVectorDatabase(self.client)
178
179
  self.agents = resources.AsyncAgents(self.client)
180
+ self.tools = resources.AsyncTools(self.client)
179
181
  self.observability = resources.AsyncAgentObservability(self.client)
180
182
  self.explainability = resources.AsyncExplainability(self.client)
181
183
 
@@ -2,10 +2,16 @@ from seekrai.resources.agents import (
2
2
  AgentInference,
3
3
  AgentObservability,
4
4
  Agents,
5
+ AsyncAgentInference,
5
6
  AsyncAgentObservability,
6
7
  AsyncAgents,
7
8
  )
8
- from seekrai.resources.alignment import Alignment, AsyncAlignment
9
+ from seekrai.resources.alignment import (
10
+ Alignment,
11
+ AsyncAlignment,
12
+ AsyncSystemPromptResource,
13
+ SystemPromptResource,
14
+ )
9
15
  from seekrai.resources.chat import AsyncChat, Chat
10
16
  from seekrai.resources.completions import AsyncCompletions, Completions
11
17
  from seekrai.resources.deployments import AsyncDeployments, Deployments
@@ -17,12 +23,15 @@ from seekrai.resources.images import AsyncImages, Images
17
23
  from seekrai.resources.ingestion import AsyncIngestion, Ingestion
18
24
  from seekrai.resources.models import AsyncModels, Models
19
25
  from seekrai.resources.projects import AsyncProjects, Projects
26
+ from seekrai.resources.tools import AsyncTools, Tools
20
27
  from seekrai.resources.vectordb import AsyncVectorDatabase, VectorDatabase
21
28
 
22
29
 
23
30
  __all__ = [
24
31
  "AsyncAlignment",
25
32
  "Alignment",
33
+ "AsyncSystemPromptResource",
34
+ "SystemPromptResource",
26
35
  "AsyncCompletions",
27
36
  "Completions",
28
37
  "AsyncChat",
@@ -35,8 +44,8 @@ __all__ = [
35
44
  "Files",
36
45
  "AsyncImages",
37
46
  "Images",
38
- "Ingestion",
39
47
  "AsyncIngestion",
48
+ "Ingestion",
40
49
  "AsyncModels",
41
50
  "Models",
42
51
  "AsyncProjects",
@@ -45,11 +54,14 @@ __all__ = [
45
54
  "Deployments",
46
55
  "AsyncAgents",
47
56
  "Agents",
48
- "AgentObservability",
49
57
  "AsyncAgentObservability",
50
- "VectorDatabase",
58
+ "AgentObservability",
51
59
  "AsyncVectorDatabase",
60
+ "VectorDatabase",
61
+ "AsyncAgentInference",
52
62
  "AgentInference",
53
63
  "AsyncExplainability",
54
64
  "Explainability",
65
+ "AsyncTools",
66
+ "Tools",
55
67
  ]
@@ -1,4 +1,4 @@
1
- from typing import List
1
+ from typing import Any, Dict, List
2
2
 
3
3
  from seekrai.abstract import api_requestor
4
4
  from seekrai.resources.resource_base import ResourceBase
@@ -7,14 +7,23 @@ from seekrai.types import (
7
7
  AlignmentEstimationRequest,
8
8
  AlignmentEstimationResponse,
9
9
  AlignmentList,
10
+ AlignmentOutput,
10
11
  AlignmentRequest,
11
12
  AlignmentResponse,
12
13
  AlignmentType,
13
14
  SeekrFlowRequest,
15
+ SystemPrompt,
16
+ SystemPromptCreateRequest,
17
+ SystemPromptUpdateRequest,
14
18
  )
19
+ from seekrai.types.abstract import SeekrFlowClient
15
20
 
16
21
 
17
22
  class Alignment(ResourceBase):
23
+ def __init__(self, client: SeekrFlowClient) -> None:
24
+ super().__init__(client)
25
+ self.system_prompt = SystemPromptResource(client)
26
+
18
27
  def generate(
19
28
  self,
20
29
  instructions: str,
@@ -92,6 +101,86 @@ class Alignment(ResourceBase):
92
101
 
93
102
  return AlignmentResponse(**response.data)
94
103
 
104
+ def outputs(self, id: str) -> List[AlignmentOutput]:
105
+ """
106
+ Retrieves output files for an alignment job.
107
+
108
+ Args:
109
+ id (str): Alignment job ID whose outputs to fetch.
110
+
111
+ Returns:
112
+ list[AlignmentOutput]: Collection of alignment output metadata.
113
+ """
114
+
115
+ requestor = api_requestor.APIRequestor(
116
+ client=self._client,
117
+ )
118
+
119
+ response, _, _ = requestor.request(
120
+ options=SeekrFlowRequest(
121
+ method="GET",
122
+ url=f"flow/alignment/{id}/outputs",
123
+ ),
124
+ stream=False,
125
+ )
126
+
127
+ assert isinstance(response, SeekrFlowResponse)
128
+
129
+ return [AlignmentOutput(**output) for output in response.data] # type: ignore[arg-type]
130
+
131
+ def delete(self, id: str) -> None:
132
+ """
133
+ Deletes an alignment job.
134
+
135
+ Args:
136
+ id (str): Alignment job ID to delete.
137
+
138
+ Returns:
139
+ None
140
+ """
141
+
142
+ requestor = api_requestor.APIRequestor(
143
+ client=self._client,
144
+ )
145
+
146
+ response, _, _ = requestor.request(
147
+ options=SeekrFlowRequest(
148
+ method="DELETE",
149
+ url=f"flow/alignment/{id}",
150
+ ),
151
+ stream=False,
152
+ )
153
+
154
+ # Endpoint returns 204 No Content
155
+ return None
156
+
157
+ def cancel(self, id: str) -> AlignmentResponse:
158
+ """
159
+ Method to cancel a running alignment job
160
+
161
+ Args:
162
+ id (str): Alignment job ID to cancel. A string that starts with `al-`.
163
+
164
+ Returns:
165
+ AlignmentResponse: Object containing information about cancelled alignment job.
166
+ """
167
+
168
+ requestor = api_requestor.APIRequestor(
169
+ client=self._client,
170
+ )
171
+
172
+ response, _, _ = requestor.request(
173
+ options=SeekrFlowRequest(
174
+ method="POST",
175
+ url=f"flow/alignment/{id}/cancel",
176
+ ),
177
+ stream=False,
178
+ )
179
+
180
+ assert isinstance(response, SeekrFlowResponse)
181
+
182
+ return AlignmentResponse(**response.data)
183
+
95
184
  def estimate(self, files: List[str]) -> AlignmentEstimationResponse:
96
185
  requestor = api_requestor.APIRequestor(
97
186
  client=self._client,
@@ -115,6 +204,10 @@ class Alignment(ResourceBase):
115
204
 
116
205
 
117
206
  class AsyncAlignment(ResourceBase):
207
+ def __init__(self, client: SeekrFlowClient) -> None:
208
+ super().__init__(client)
209
+ self.system_prompt = AsyncSystemPromptResource(client)
210
+
118
211
  async def generate(
119
212
  self,
120
213
  instructions: str,
@@ -192,6 +285,86 @@ class AsyncAlignment(ResourceBase):
192
285
 
193
286
  return AlignmentResponse(**response.data)
194
287
 
288
+ async def outputs(self, id: str) -> List[AlignmentOutput]:
289
+ """
290
+ Retrieves output files for an alignment job asynchronously.
291
+
292
+ Args:
293
+ id (str): Alignment job ID whose outputs to fetch.
294
+
295
+ Returns:
296
+ list[AlignmentOutput]: Collection of alignment output metadata.
297
+ """
298
+
299
+ requestor = api_requestor.APIRequestor(
300
+ client=self._client,
301
+ )
302
+
303
+ response, _, _ = await requestor.arequest(
304
+ options=SeekrFlowRequest(
305
+ method="GET",
306
+ url=f"flow/alignment/{id}/outputs",
307
+ ),
308
+ stream=False,
309
+ )
310
+
311
+ assert isinstance(response, SeekrFlowResponse)
312
+
313
+ return [AlignmentOutput(**output) for output in response.data] # type: ignore[arg-type]
314
+
315
+ async def delete(self, id: str) -> None:
316
+ """
317
+ Deletes an alignment job asynchronously.
318
+
319
+ Args:
320
+ id (str): Alignment job ID to delete.
321
+
322
+ Returns:
323
+ None
324
+ """
325
+
326
+ requestor = api_requestor.APIRequestor(
327
+ client=self._client,
328
+ )
329
+
330
+ response, _, _ = await requestor.arequest(
331
+ options=SeekrFlowRequest(
332
+ method="DELETE",
333
+ url=f"flow/alignment/{id}",
334
+ ),
335
+ stream=False,
336
+ )
337
+
338
+ # Endpoint returns 204 No Content
339
+ return None
340
+
341
+ async def cancel(self, id: str) -> AlignmentResponse:
342
+ """
343
+ Async method to cancel a running alignment job
344
+
345
+ Args:
346
+ id (str): Alignment job ID to cancel. A string that starts with `al-`.
347
+
348
+ Returns:
349
+ AlignmentResponse: Object containing information about cancelled alignment job.
350
+ """
351
+
352
+ requestor = api_requestor.APIRequestor(
353
+ client=self._client,
354
+ )
355
+
356
+ response, _, _ = await requestor.arequest(
357
+ options=SeekrFlowRequest(
358
+ method="POST",
359
+ url=f"flow/alignment/{id}/cancel",
360
+ ),
361
+ stream=False,
362
+ )
363
+
364
+ assert isinstance(response, SeekrFlowResponse)
365
+
366
+ return AlignmentResponse(**response.data)
367
+
195
368
  async def estimate(self, files: List[str]) -> AlignmentEstimationResponse:
196
369
  requestor = api_requestor.APIRequestor(
197
370
  client=self._client,
@@ -212,3 +385,289 @@ class AsyncAlignment(ResourceBase):
212
385
 
213
386
  assert isinstance(response, SeekrFlowResponse)
214
387
  return AlignmentEstimationResponse(**response.data)
388
+
389
+
390
+ class SystemPromptResource(ResourceBase):
391
+ def create(self, source_id: str, instructions: str) -> SystemPrompt:
392
+ """
393
+ Creates a new AI-generated system prompt for the given source_id.
394
+
395
+ Args:
396
+ source_id (str): The ID of the source to create the system prompt for
397
+ instructions (str): Instructions for generating the system prompt
398
+
399
+ Returns:
400
+ SystemPrompt: The created system prompt
401
+ """
402
+ requestor = api_requestor.APIRequestor(
403
+ client=self._client,
404
+ )
405
+
406
+ parameter_payload = SystemPromptCreateRequest(
407
+ instructions=instructions
408
+ ).model_dump()
409
+
410
+ response, _, _ = requestor.request(
411
+ options=SeekrFlowRequest(
412
+ method="POST",
413
+ url=f"flow/alignment/system_prompt/{source_id}",
414
+ params=parameter_payload,
415
+ ),
416
+ stream=False,
417
+ )
418
+
419
+ assert isinstance(response, SeekrFlowResponse)
420
+ return SystemPrompt(**response.data)
421
+
422
+ def get(self, source_id: str) -> SystemPrompt:
423
+ """
424
+ Retrieves the system prompt for the given source_id.
425
+
426
+ Args:
427
+ source_id (str): The ID of the source to retrieve the system prompt for
428
+
429
+ Returns:
430
+ SystemPrompt: The retrieved system prompt
431
+ """
432
+ requestor = api_requestor.APIRequestor(
433
+ client=self._client,
434
+ )
435
+
436
+ response, _, _ = requestor.request(
437
+ options=SeekrFlowRequest(
438
+ method="GET",
439
+ url=f"flow/alignment/system_prompt/{source_id}",
440
+ ),
441
+ stream=False,
442
+ )
443
+
444
+ assert isinstance(response, SeekrFlowResponse)
445
+ return SystemPrompt(**response.data)
446
+
447
+ def regenerate(self, source_id: str, instructions: str) -> SystemPrompt:
448
+ """
449
+ Regenerates the AI-generated system prompt for the given source_id.
450
+
451
+ Args:
452
+ source_id (str): The ID of the source to regenerate the system prompt for
453
+ instructions (str): Instructions for regenerating the system prompt
454
+
455
+ Returns:
456
+ SystemPrompt: The regenerated system prompt
457
+ """
458
+ requestor = api_requestor.APIRequestor(
459
+ client=self._client,
460
+ )
461
+
462
+ parameter_payload = SystemPromptCreateRequest(
463
+ instructions=instructions
464
+ ).model_dump()
465
+
466
+ response, _, _ = requestor.request(
467
+ options=SeekrFlowRequest(
468
+ method="POST",
469
+ url=f"flow/alignment/system_prompt/{source_id}/regenerate",
470
+ params=parameter_payload,
471
+ ),
472
+ stream=False,
473
+ )
474
+
475
+ assert isinstance(response, SeekrFlowResponse)
476
+ return SystemPrompt(**response.data)
477
+
478
+ def update(self, source_id: str, content: str) -> SystemPrompt:
479
+ """
480
+ Updates the system prompt for the given source_id with custom content.
481
+
482
+ Args:
483
+ source_id (str): The ID of the source to update the system prompt for
484
+ content (str): The custom content for the system prompt
485
+
486
+ Returns:
487
+ SystemPrompt: The updated system prompt
488
+ """
489
+ requestor = api_requestor.APIRequestor(
490
+ client=self._client,
491
+ )
492
+
493
+ parameter_payload = SystemPromptUpdateRequest(content=content).model_dump()
494
+
495
+ response, _, _ = requestor.request(
496
+ options=SeekrFlowRequest(
497
+ method="PUT",
498
+ url=f"flow/alignment/system_prompt/{source_id}",
499
+ params=parameter_payload,
500
+ ),
501
+ stream=False,
502
+ )
503
+
504
+ assert isinstance(response, SeekrFlowResponse)
505
+ return SystemPrompt(**response.data)
506
+
507
+ def delete(self, source_id: str) -> Dict[str, Any]:
508
+ """
509
+ Deletes the system prompt for the given source_id.
510
+
511
+ Args:
512
+ source_id (str): The ID of the source to delete the system prompt for
513
+
514
+ Returns:
515
+ dict: A dictionary with the deletion result
516
+ """
517
+ requestor = api_requestor.APIRequestor(
518
+ client=self._client,
519
+ )
520
+
521
+ response, _, _ = requestor.request(
522
+ options=SeekrFlowRequest(
523
+ method="DELETE",
524
+ url=f"flow/alignment/system_prompt/{source_id}",
525
+ ),
526
+ stream=False,
527
+ )
528
+
529
+ assert isinstance(response, SeekrFlowResponse)
530
+ return response.data
531
+
532
+
533
+ class AsyncSystemPromptResource(ResourceBase):
534
+ async def create(self, source_id: str, instructions: str) -> SystemPrompt:
535
+ """
536
+ Asynchronously creates a new AI-generated system prompt for the given source_id.
537
+
538
+ Args:
539
+ source_id (str): The ID of the source to create the system prompt for
540
+ instructions (str): Instructions for generating the system prompt
541
+
542
+ Returns:
543
+ SystemPrompt: The created system prompt
544
+ """
545
+ requestor = api_requestor.APIRequestor(
546
+ client=self._client,
547
+ )
548
+
549
+ parameter_payload = SystemPromptCreateRequest(
550
+ instructions=instructions
551
+ ).model_dump()
552
+
553
+ response, _, _ = await requestor.arequest(
554
+ options=SeekrFlowRequest(
555
+ method="POST",
556
+ url=f"flow/alignment/system_prompt/{source_id}",
557
+ params=parameter_payload,
558
+ ),
559
+ stream=False,
560
+ )
561
+
562
+ assert isinstance(response, SeekrFlowResponse)
563
+ return SystemPrompt(**response.data)
564
+
565
+ async def get(self, source_id: str) -> SystemPrompt:
566
+ """
567
+ Asynchronously retrieves the system prompt for the given source_id.
568
+
569
+ Args:
570
+ source_id (str): The ID of the source to retrieve the system prompt for
571
+
572
+ Returns:
573
+ SystemPrompt: The retrieved system prompt
574
+ """
575
+ requestor = api_requestor.APIRequestor(
576
+ client=self._client,
577
+ )
578
+
579
+ response, _, _ = await requestor.arequest(
580
+ options=SeekrFlowRequest(
581
+ method="GET",
582
+ url=f"flow/alignment/system_prompt/{source_id}",
583
+ ),
584
+ stream=False,
585
+ )
586
+
587
+ assert isinstance(response, SeekrFlowResponse)
588
+ return SystemPrompt(**response.data)
589
+
590
+ async def regenerate(self, source_id: str, instructions: str) -> SystemPrompt:
591
+ """
592
+ Asynchronously regenerates the AI-generated system prompt for the given source_id.
593
+
594
+ Args:
595
+ source_id (str): The ID of the source to regenerate the system prompt for
596
+ instructions (str): Instructions for regenerating the system prompt
597
+
598
+ Returns:
599
+ SystemPrompt: The regenerated system prompt
600
+ """
601
+ requestor = api_requestor.APIRequestor(
602
+ client=self._client,
603
+ )
604
+
605
+ parameter_payload = SystemPromptCreateRequest(
606
+ instructions=instructions
607
+ ).model_dump()
608
+
609
+ response, _, _ = await requestor.arequest(
610
+ options=SeekrFlowRequest(
611
+ method="POST",
612
+ url=f"flow/alignment/system_prompt/{source_id}/regenerate",
613
+ params=parameter_payload,
614
+ ),
615
+ stream=False,
616
+ )
617
+
618
+ assert isinstance(response, SeekrFlowResponse)
619
+ return SystemPrompt(**response.data)
620
+
621
+ async def update(self, source_id: str, content: str) -> SystemPrompt:
622
+ """
623
+ Asynchronously updates the system prompt for the given source_id with custom content.
624
+
625
+ Args:
626
+ source_id (str): The ID of the source to update the system prompt for
627
+ content (str): The custom content for the system prompt
628
+
629
+ Returns:
630
+ SystemPrompt: The updated system prompt
631
+ """
632
+ requestor = api_requestor.APIRequestor(
633
+ client=self._client,
634
+ )
635
+
636
+ parameter_payload = SystemPromptUpdateRequest(content=content).model_dump()
637
+
638
+ response, _, _ = await requestor.arequest(
639
+ options=SeekrFlowRequest(
640
+ method="PUT",
641
+ url=f"flow/alignment/system_prompt/{source_id}",
642
+ params=parameter_payload,
643
+ ),
644
+ stream=False,
645
+ )
646
+
647
+ assert isinstance(response, SeekrFlowResponse)
648
+ return SystemPrompt(**response.data)
649
+
650
+ async def delete(self, source_id: str) -> Dict[str, Any]:
651
+ """
652
+ Asynchronously deletes the system prompt for the given source_id.
653
+
654
+ Args:
655
+ source_id (str): The ID of the source to delete the system prompt for
656
+
657
+ Returns:
658
+ dict: A dictionary with the deletion result
659
+ """
660
+ requestor = api_requestor.APIRequestor(
661
+ client=self._client,
662
+ )
663
+
664
+ response, _, _ = await requestor.arequest(
665
+ options=SeekrFlowRequest(
666
+ method="DELETE",
667
+ url=f"flow/alignment/system_prompt/{source_id}",
668
+ ),
669
+ stream=False,
670
+ )
671
+
672
+ assert isinstance(response, SeekrFlowResponse)
673
+ return response.data
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  from pathlib import Path
4
4
 
5
5
  from seekrai.abstract import api_requestor
6
+ from seekrai.error import InvalidRequestError
6
7
  from seekrai.resources.resource_base import ResourceBase
7
8
  from seekrai.seekrflow_response import SeekrFlowResponse
8
9
  from seekrai.types import (
@@ -17,6 +18,29 @@ from seekrai.types import (
17
18
  )
18
19
 
19
20
 
21
+ def validate_lora_support(
22
+ models_response: SeekrFlowResponse, training_config: TrainingConfig
23
+ ) -> None:
24
+ assert isinstance(models_response, SeekrFlowResponse)
25
+ model_entry = None
26
+ for model in models_response.data.get("data", []):
27
+ model_id = str(model.get("id")) if model.get("id") is not None else None
28
+ if (
29
+ model_id == training_config.model
30
+ or model.get("name") == training_config.model
31
+ ):
32
+ model_entry = model
33
+ break
34
+ if not model_entry:
35
+ raise InvalidRequestError(
36
+ f"Model '{training_config.model}' not found; cannot enable LoRA."
37
+ )
38
+ if not model_entry.get("supports_lora", False):
39
+ raise InvalidRequestError(
40
+ f"Model '{training_config.model}' does not support LoRA fine-tuning."
41
+ )
42
+
43
+
20
44
  class FineTuning(ResourceBase):
21
45
  def create(
22
46
  self,
@@ -39,6 +63,16 @@ class FineTuning(ResourceBase):
39
63
  client=self._client,
40
64
  )
41
65
 
66
+ if training_config.lora_config is not None:
67
+ models_response, _, _ = requestor.request(
68
+ options=SeekrFlowRequest(
69
+ method="GET",
70
+ url="flow/models",
71
+ ),
72
+ stream=False,
73
+ )
74
+ validate_lora_support(models_response, training_config)
75
+
42
76
  parameter_payload = FinetuneRequest(
43
77
  project_id=project_id,
44
78
  training_config=training_config,
@@ -263,6 +297,16 @@ class AsyncFineTuning(ResourceBase):
263
297
  client=self._client,
264
298
  )
265
299
 
300
+ if training_config.lora_config is not None:
301
+ models_response, _, _ = await requestor.arequest(
302
+ options=SeekrFlowRequest(
303
+ method="GET",
304
+ url="flow/models",
305
+ ),
306
+ stream=False,
307
+ )
308
+ validate_lora_support(models_response, training_config)
309
+
266
310
  parameter_payload = FinetuneRequest(
267
311
  project_id=project_id,
268
312
  training_config=training_config,