groundx 2.6.3__py3-none-any.whl → 2.7.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
groundx/__init__.py CHANGED
@@ -52,6 +52,7 @@ from .types import (
52
52
  WebsiteSource,
53
53
  WorkflowApplyRequest,
54
54
  WorkflowDetail,
55
+ WorkflowDetailChunkStrategy,
55
56
  WorkflowDetailRelationships,
56
57
  WorkflowEngine,
57
58
  WorkflowEngineReasoningEffort,
@@ -59,16 +60,13 @@ from .types import (
59
60
  WorkflowPrompt,
60
61
  WorkflowPromptGroup,
61
62
  WorkflowPromptRole,
63
+ WorkflowRequest,
64
+ WorkflowRequestChunkStrategy,
62
65
  WorkflowResponse,
63
66
  WorkflowStep,
67
+ WorkflowStepConfig,
68
+ WorkflowStepConfigField,
64
69
  WorkflowSteps,
65
- WorkflowStepsChunkInstruct,
66
- WorkflowStepsChunkSummary,
67
- WorkflowStepsDocKeys,
68
- WorkflowStepsDocSummary,
69
- WorkflowStepsSearchQuery,
70
- WorkflowStepsSectInstruct,
71
- WorkflowStepsSectSummary,
72
70
  WorkflowsResponse,
73
71
  )
74
72
  from .errors import BadRequestError, UnauthorizedError
@@ -135,6 +133,7 @@ __all__ = [
135
133
  "WebsiteSource",
136
134
  "WorkflowApplyRequest",
137
135
  "WorkflowDetail",
136
+ "WorkflowDetailChunkStrategy",
138
137
  "WorkflowDetailRelationships",
139
138
  "WorkflowEngine",
140
139
  "WorkflowEngineReasoningEffort",
@@ -142,16 +141,13 @@ __all__ = [
142
141
  "WorkflowPrompt",
143
142
  "WorkflowPromptGroup",
144
143
  "WorkflowPromptRole",
144
+ "WorkflowRequest",
145
+ "WorkflowRequestChunkStrategy",
145
146
  "WorkflowResponse",
146
147
  "WorkflowStep",
148
+ "WorkflowStepConfig",
149
+ "WorkflowStepConfigField",
147
150
  "WorkflowSteps",
148
- "WorkflowStepsChunkInstruct",
149
- "WorkflowStepsChunkSummary",
150
- "WorkflowStepsDocKeys",
151
- "WorkflowStepsDocSummary",
152
- "WorkflowStepsSearchQuery",
153
- "WorkflowStepsSectInstruct",
154
- "WorkflowStepsSectSummary",
155
151
  "WorkflowsGetRequestId",
156
152
  "WorkflowsResponse",
157
153
  "__version__",
@@ -14,10 +14,10 @@ class BaseClientWrapper:
14
14
 
15
15
  def get_headers(self) -> typing.Dict[str, str]:
16
16
  headers: typing.Dict[str, str] = {
17
- "User-Agent": "groundx/2.6.3",
17
+ "User-Agent": "groundx/2.7.2",
18
18
  "X-Fern-Language": "Python",
19
19
  "X-Fern-SDK-Name": "groundx",
20
- "X-Fern-SDK-Version": "2.6.3",
20
+ "X-Fern-SDK-Version": "2.7.2",
21
21
  }
22
22
  headers["X-API-Key"] = self.api_key
23
23
  return headers
@@ -2,8 +2,6 @@ import typing
2
2
 
3
3
 
4
4
  def logging_config(name: str, level: str) -> typing.Dict[str, typing.Any]:
5
- print(level)
6
-
7
5
  return {
8
6
  "version": 1,
9
7
  "disable_existing_loggers": False,
@@ -17,6 +17,8 @@ GX_DEFAULT_REGION: str = "GROUNDX_DEFAULT_REGION"
17
17
  GX_SECRET: str = "GROUNDX_SECRET_ACCESS_KEY"
18
18
  GX_TOKEN: str = "GROUNDX_SESSION_TOKEN"
19
19
  VALID_KEYS: str = "GROUNDX_VALID_API_KEYS"
20
+ GX_ADMIN_API_KEY: str = "GROUNDX_ADMIN_API_KEY"
21
+ GX_ADMIN_USERNAME: str = "GROUNDX_ADMIN_USERNAME"
20
22
 
21
23
 
22
24
  class AgentSettings(BaseModel):
@@ -77,22 +79,54 @@ class ContainerSettings(BaseModel):
77
79
  if key:
78
80
  return key
79
81
 
82
+ key = os.environ.get(GX_ADMIN_API_KEY)
83
+ if key:
84
+ return key
85
+
86
+ key = os.environ.get(GX_ADMIN_USERNAME)
87
+ if key:
88
+ return key
89
+
90
+ key = os.environ.get(GX_API_KEY)
91
+ if key:
92
+ return key
93
+
80
94
  raise Exception(f"you must set a callback_api_key")
81
95
 
82
96
  def get_valid_api_keys(self) -> typing.List[str]:
97
+ keys: typing.List[str] = []
98
+
83
99
  if self.valid_api_keys:
84
- return self.valid_api_keys
100
+ keys = self.valid_api_keys
85
101
 
86
- keys: typing.Optional[str] = os.environ.get(VALID_KEYS)
87
- if not keys:
88
- raise Exception(f"you must set an array of valid_api_keys")
102
+ env_keys: typing.Optional[str] = os.environ.get(VALID_KEYS)
103
+ if env_keys:
104
+ try:
105
+ data: typing.List[str] = json.loads(env_keys)
106
+ keys.extend(data)
107
+ except Exception as e:
108
+ raise Exception(f"you must set an array of valid_api_keys: {e}")
89
109
 
90
- try:
91
- data: typing.List[str] = json.loads(keys)
92
- except Exception as e:
93
- raise Exception(f"you must set an array of valid_api_keys: {e}")
110
+ key = os.environ.get(CALLBACK_KEY)
111
+ if key:
112
+ keys.append(key)
94
113
 
95
- return data
114
+ key = os.environ.get(GX_ADMIN_API_KEY)
115
+ if key:
116
+ keys.append(key)
117
+
118
+ key = os.environ.get(GX_ADMIN_USERNAME)
119
+ if key:
120
+ keys.append(key)
121
+
122
+ key = os.environ.get(GX_API_KEY)
123
+ if key:
124
+ keys.append(key)
125
+
126
+ if len(keys) < 1:
127
+ raise Exception(f"you must set an array of valid_api_keys")
128
+
129
+ return keys
96
130
 
97
131
  def loglevel(self) -> str:
98
132
  return self.log_level.upper()
@@ -167,4 +201,12 @@ class GroundXSettings(BaseModel):
167
201
  if key:
168
202
  return key
169
203
 
204
+ key = os.environ.get(GX_ADMIN_API_KEY)
205
+ if key:
206
+ return key
207
+
208
+ key = os.environ.get(GX_ADMIN_USERNAME)
209
+ if key:
210
+ return key
211
+
170
212
  raise Exception(f"you must set a valid GroundX api_key")
@@ -45,7 +45,6 @@ class TestAgentSettings(unittest.TestCase):
45
45
  "expect": {
46
46
  "api_base": "http://test.com",
47
47
  "api_key": "mykey",
48
- "api_key_env": "myenv",
49
48
  "max_steps": 4,
50
49
  "model_id": "gpt-5",
51
50
  },
@@ -452,10 +451,8 @@ class TestGroundXSettings(unittest.TestCase):
452
451
  def test(self) -> None:
453
452
  tsts: typing.List[typing.Dict[str, typing.Any]] = [
454
453
  {
455
- "api_key_env": "",
456
454
  "expect": {
457
455
  "api_key": Exception,
458
- "api_key_env": "",
459
456
  "base_url": None,
460
457
  "upload_url": "https://upload.eyelevel.ai",
461
458
  },
groundx/ingest.py CHANGED
@@ -57,6 +57,7 @@ MAX_BATCH_SIZE = 50
57
57
  MIN_BATCH_SIZE = 1
58
58
  MAX_BATCH_SIZE_BYTES = 50 * 1024 * 1024
59
59
 
60
+
60
61
  def get_presigned_url(
61
62
  endpoint: str,
62
63
  file_name: str,
@@ -68,6 +69,7 @@ def get_presigned_url(
68
69
 
69
70
  return response.json()
70
71
 
72
+
71
73
  def strip_query_params(
72
74
  url: str,
73
75
  ) -> str:
@@ -76,6 +78,7 @@ def strip_query_params(
76
78
 
77
79
  return clean_url
78
80
 
81
+
79
82
  def prep_documents(
80
83
  documents: typing.Sequence[Document],
81
84
  ) -> typing.Tuple[
@@ -127,16 +130,16 @@ def prep_documents(
127
130
 
128
131
  def split_doc(file: Path) -> typing.List[Path]:
129
132
  if file.is_file() and (
130
- file.suffix.lower() in ALLOWED_SUFFIXES
131
- or file.suffix.lower() in SUFFIX_ALIASES
133
+ file.suffix.lower() in ALLOWED_SUFFIXES or file.suffix.lower() in SUFFIX_ALIASES
132
134
  ):
133
135
  if file.suffix.lower() in CSV_SPLITS:
134
136
  return CSVSplitter(filepath=file).split()
135
137
  elif file.suffix.lower() in TSV_SPLITS:
136
- return CSVSplitter(filepath=file, delimiter='\t').split()
138
+ return CSVSplitter(filepath=file, delimiter="\t").split()
137
139
  return [file]
138
140
  return []
139
141
 
142
+
140
143
  class GroundX(GroundXBase):
141
144
  def ingest(
142
145
  self,
@@ -207,11 +210,19 @@ class GroundX(GroundXBase):
207
210
  raise ValueError("No valid documents were provided")
208
211
 
209
212
  if wait_for_complete:
210
- with tqdm(total=len(remote_documents) + len(local_documents), desc="Ingesting Files", unit="file") as pbar:
211
- n = max(MIN_BATCH_SIZE, min(batch_size or MIN_BATCH_SIZE, MAX_BATCH_SIZE))
213
+ with tqdm(
214
+ total=len(remote_documents) + len(local_documents),
215
+ desc="Ingesting Files",
216
+ unit="file",
217
+ ) as pbar:
218
+ n = max(
219
+ MIN_BATCH_SIZE, min(batch_size or MIN_BATCH_SIZE, MAX_BATCH_SIZE)
220
+ )
212
221
 
213
222
  remote_batch: typing.List[IngestRemoteDocument] = []
214
- ingest = IngestResponse(ingest=IngestStatus(process_id="",status="queued"))
223
+ ingest = IngestResponse(
224
+ ingest=IngestStatus(process_id="", status="queued")
225
+ )
215
226
 
216
227
  progress = float(len(remote_documents))
217
228
  for rd in remote_documents:
@@ -239,7 +250,6 @@ class GroundX(GroundXBase):
239
250
  )
240
251
  ingest, progress = self._monitor_batch(ingest, progress, pbar)
241
252
 
242
-
243
253
  if progress > 0:
244
254
  pbar.update(progress)
245
255
 
@@ -251,8 +261,12 @@ class GroundX(GroundXBase):
251
261
  fp = Path(os.path.expanduser(ld.file_path))
252
262
  file_size = fp.stat().st_size
253
263
 
254
- if (current_batch_size + file_size > MAX_BATCH_SIZE_BYTES) or (len(local_batch) >= n):
255
- up_docs, progress = self._process_local(local_batch, upload_api, progress, pbar)
264
+ if (current_batch_size + file_size > MAX_BATCH_SIZE_BYTES) or (
265
+ len(local_batch) >= n
266
+ ):
267
+ up_docs, progress = self._process_local(
268
+ local_batch, upload_api, progress, pbar
269
+ )
256
270
 
257
271
  ingest = self.documents.ingest_remote(
258
272
  documents=up_docs,
@@ -269,7 +283,9 @@ class GroundX(GroundXBase):
269
283
  current_batch_size += file_size
270
284
 
271
285
  if local_batch:
272
- up_docs, progress = self._process_local(local_batch, upload_api, progress, pbar)
286
+ up_docs, progress = self._process_local(
287
+ local_batch, upload_api, progress, pbar
288
+ )
273
289
 
274
290
  ingest = self.documents.ingest_remote(
275
291
  documents=up_docs,
@@ -286,7 +302,6 @@ class GroundX(GroundXBase):
286
302
  elif len(remote_documents) + len(local_documents) > MAX_BATCH_SIZE:
287
303
  raise ValueError("You have sent too many documents in this request")
288
304
 
289
-
290
305
  up_docs, _ = self._process_local(local_documents, upload_api, 0, None)
291
306
  remote_documents.extend(up_docs)
292
307
 
@@ -360,9 +375,9 @@ class GroundX(GroundXBase):
360
375
  matched_files: typing.List[Path] = []
361
376
  for file in dir_path.rglob("*"):
362
377
  for sd in split_doc(file):
363
- matched_files.append(sd)
378
+ matched_files.append(sd)
364
379
 
365
- return matched_files
380
+ return matched_files
366
381
 
367
382
  if bucket_id < 1:
368
383
  raise ValueError(f"Invalid bucket_id: {bucket_id}")
@@ -384,8 +399,18 @@ class GroundX(GroundXBase):
384
399
  for file in files:
385
400
  file_size = file.stat().st_size
386
401
 
387
- if (current_batch_size + file_size > MAX_BATCH_SIZE_BYTES) or (len(current_batch) >= n):
388
- self._upload_file_batch(bucket_id, current_batch, upload_api, callback_url, callback_data, request_options, pbar)
402
+ if (current_batch_size + file_size > MAX_BATCH_SIZE_BYTES) or (
403
+ len(current_batch) >= n
404
+ ):
405
+ self._upload_file_batch(
406
+ bucket_id,
407
+ current_batch,
408
+ upload_api,
409
+ callback_url,
410
+ callback_data,
411
+ request_options,
412
+ pbar,
413
+ )
389
414
  current_batch = []
390
415
  current_batch_size = 0
391
416
 
@@ -393,7 +418,15 @@ class GroundX(GroundXBase):
393
418
  current_batch_size += file_size
394
419
 
395
420
  if current_batch:
396
- self._upload_file_batch(bucket_id, current_batch, upload_api, callback_url, callback_data, request_options, pbar)
421
+ self._upload_file_batch(
422
+ bucket_id,
423
+ current_batch,
424
+ upload_api,
425
+ callback_url,
426
+ callback_data,
427
+ request_options,
428
+ pbar,
429
+ )
397
430
 
398
431
  def _upload_file(
399
432
  self,
@@ -408,12 +441,13 @@ class GroundX(GroundXBase):
408
441
  presigned_info = get_presigned_url(endpoint, file_name, file_extension)
409
442
 
410
443
  upload_url = presigned_info["URL"]
411
- headers = presigned_info.get("Header", {})
444
+ hd = presigned_info.get("Header", {})
412
445
  method = presigned_info.get("Method", "PUT").upper()
413
446
 
414
- for key, value in headers.items():
447
+ headers: typing.Dict[str, typing.Any] = {}
448
+ for key, value in hd.items():
415
449
  if isinstance(value, list):
416
- headers[key] = value[0]
450
+ headers[key.upper()] = value[0]
417
451
 
418
452
  try:
419
453
  with open(file_path, "rb") as f:
@@ -431,6 +465,9 @@ class GroundX(GroundXBase):
431
465
  f"Upload failed: {upload_response.status_code} - {upload_response.text}"
432
466
  )
433
467
 
468
+ if "GX-HOSTED-URL" in headers:
469
+ return headers["GX-HOSTED-URL"]
470
+
434
471
  return strip_query_params(upload_url)
435
472
 
436
473
  def _process_local(
@@ -481,39 +518,62 @@ class GroundX(GroundXBase):
481
518
  ) -> typing.Tuple[IngestResponse, float]:
482
519
  completed_files: typing.Set[str] = set()
483
520
 
484
- while (
485
- ingest.ingest.status not in ["complete", "error", "cancelled"]
486
- ):
521
+ while ingest.ingest.status not in ["complete", "error", "cancelled"]:
487
522
  time.sleep(3)
488
- ingest = self.documents.get_processing_status_by_id(ingest.ingest.process_id)
523
+ ingest = self.documents.get_processing_status_by_id(
524
+ ingest.ingest.process_id
525
+ )
489
526
 
490
527
  if ingest.ingest.progress:
491
- if ingest.ingest.progress.processing and ingest.ingest.progress.processing.documents:
528
+ if (
529
+ ingest.ingest.progress.processing
530
+ and ingest.ingest.progress.processing.documents
531
+ ):
492
532
  for doc in ingest.ingest.progress.processing.documents:
493
- if doc.status in ["complete", "error", "cancelled"] and doc.document_id not in completed_files:
533
+ if (
534
+ doc.status in ["complete", "error", "cancelled"]
535
+ and doc.document_id not in completed_files
536
+ ):
494
537
  pbar.update(0.75)
495
538
  progress -= 0.75
496
539
  completed_files.add(doc.document_id)
497
- if ingest.ingest.progress.complete and ingest.ingest.progress.complete.documents:
540
+ if (
541
+ ingest.ingest.progress.complete
542
+ and ingest.ingest.progress.complete.documents
543
+ ):
498
544
  for doc in ingest.ingest.progress.complete.documents:
499
- if doc.status in ["complete", "error", "cancelled"] and doc.document_id not in completed_files:
545
+ if (
546
+ doc.status in ["complete", "error", "cancelled"]
547
+ and doc.document_id not in completed_files
548
+ ):
500
549
  pbar.update(0.75)
501
550
  progress -= 0.75
502
551
  completed_files.add(doc.document_id)
503
- if ingest.ingest.progress.cancelled and ingest.ingest.progress.cancelled.documents:
552
+ if (
553
+ ingest.ingest.progress.cancelled
554
+ and ingest.ingest.progress.cancelled.documents
555
+ ):
504
556
  for doc in ingest.ingest.progress.cancelled.documents:
505
- if doc.status in ["complete", "error", "cancelled"] and doc.document_id not in completed_files:
557
+ if (
558
+ doc.status in ["complete", "error", "cancelled"]
559
+ and doc.document_id not in completed_files
560
+ ):
506
561
  pbar.update(0.75)
507
562
  progress -= 0.75
508
563
  completed_files.add(doc.document_id)
509
- if ingest.ingest.progress.errors and ingest.ingest.progress.errors.documents:
564
+ if (
565
+ ingest.ingest.progress.errors
566
+ and ingest.ingest.progress.errors.documents
567
+ ):
510
568
  for doc in ingest.ingest.progress.errors.documents:
511
- if doc.status in ["complete", "error", "cancelled"] and doc.document_id not in completed_files:
569
+ if (
570
+ doc.status in ["complete", "error", "cancelled"]
571
+ and doc.document_id not in completed_files
572
+ ):
512
573
  pbar.update(0.75)
513
574
  progress -= 0.75
514
575
  completed_files.add(doc.document_id)
515
576
 
516
-
517
577
  if ingest.ingest.status in ["error", "cancelled"]:
518
578
  raise ValueError(f"Ingest failed with status: {ingest.ingest.status}")
519
579
 
@@ -531,7 +591,7 @@ class GroundX(GroundXBase):
531
591
  ) -> None:
532
592
  docs: typing.List[Document] = []
533
593
 
534
- progress = float(len(batch))
594
+ progress = float(len(batch))
535
595
  for file in batch:
536
596
  url = self._upload_file(upload_api, file)
537
597
  if file.suffix.lower() in SUFFIX_ALIASES:
@@ -567,7 +627,6 @@ class GroundX(GroundXBase):
567
627
  pbar.update(progress)
568
628
 
569
629
 
570
-
571
630
  class AsyncGroundX(AsyncGroundXBase):
572
631
  async def ingest(
573
632
  self,
@@ -682,12 +741,13 @@ class AsyncGroundX(AsyncGroundXBase):
682
741
  presigned_info = get_presigned_url(endpoint, file_name, file_extension)
683
742
 
684
743
  upload_url = presigned_info["URL"]
685
- headers = presigned_info.get("Header", {})
744
+ hd = presigned_info.get("Header", {})
686
745
  method = presigned_info.get("Method", "PUT").upper()
687
746
 
688
- for key, value in headers.items():
747
+ headers: typing.Dict[str, typing.Any] = {}
748
+ for key, value in hd.items():
689
749
  if isinstance(value, list):
690
- headers[key] = value[0]
750
+ headers[key.upper()] = value[0]
691
751
 
692
752
  try:
693
753
  with open(file_path, "rb") as f:
@@ -705,4 +765,7 @@ class AsyncGroundX(AsyncGroundXBase):
705
765
  f"Upload failed: {upload_response.status_code} - {upload_response.text}"
706
766
  )
707
767
 
768
+ if "GX-HOSTED-URL" in headers:
769
+ return headers["GX-HOSTED-URL"]
770
+
708
771
  return strip_query_params(upload_url)
groundx/types/__init__.py CHANGED
@@ -51,6 +51,7 @@ from .subscription_detail_meters import SubscriptionDetailMeters
51
51
  from .website_source import WebsiteSource
52
52
  from .workflow_apply_request import WorkflowApplyRequest
53
53
  from .workflow_detail import WorkflowDetail
54
+ from .workflow_detail_chunk_strategy import WorkflowDetailChunkStrategy
54
55
  from .workflow_detail_relationships import WorkflowDetailRelationships
55
56
  from .workflow_engine import WorkflowEngine
56
57
  from .workflow_engine_reasoning_effort import WorkflowEngineReasoningEffort
@@ -58,16 +59,13 @@ from .workflow_engine_service import WorkflowEngineService
58
59
  from .workflow_prompt import WorkflowPrompt
59
60
  from .workflow_prompt_group import WorkflowPromptGroup
60
61
  from .workflow_prompt_role import WorkflowPromptRole
62
+ from .workflow_request import WorkflowRequest
63
+ from .workflow_request_chunk_strategy import WorkflowRequestChunkStrategy
61
64
  from .workflow_response import WorkflowResponse
62
65
  from .workflow_step import WorkflowStep
66
+ from .workflow_step_config import WorkflowStepConfig
67
+ from .workflow_step_config_field import WorkflowStepConfigField
63
68
  from .workflow_steps import WorkflowSteps
64
- from .workflow_steps_chunk_instruct import WorkflowStepsChunkInstruct
65
- from .workflow_steps_chunk_summary import WorkflowStepsChunkSummary
66
- from .workflow_steps_doc_keys import WorkflowStepsDocKeys
67
- from .workflow_steps_doc_summary import WorkflowStepsDocSummary
68
- from .workflow_steps_search_query import WorkflowStepsSearchQuery
69
- from .workflow_steps_sect_instruct import WorkflowStepsSectInstruct
70
- from .workflow_steps_sect_summary import WorkflowStepsSectSummary
71
69
  from .workflows_response import WorkflowsResponse
72
70
 
73
71
  __all__ = [
@@ -120,6 +118,7 @@ __all__ = [
120
118
  "WebsiteSource",
121
119
  "WorkflowApplyRequest",
122
120
  "WorkflowDetail",
121
+ "WorkflowDetailChunkStrategy",
123
122
  "WorkflowDetailRelationships",
124
123
  "WorkflowEngine",
125
124
  "WorkflowEngineReasoningEffort",
@@ -127,15 +126,12 @@ __all__ = [
127
126
  "WorkflowPrompt",
128
127
  "WorkflowPromptGroup",
129
128
  "WorkflowPromptRole",
129
+ "WorkflowRequest",
130
+ "WorkflowRequestChunkStrategy",
130
131
  "WorkflowResponse",
131
132
  "WorkflowStep",
133
+ "WorkflowStepConfig",
134
+ "WorkflowStepConfigField",
132
135
  "WorkflowSteps",
133
- "WorkflowStepsChunkInstruct",
134
- "WorkflowStepsChunkSummary",
135
- "WorkflowStepsDocKeys",
136
- "WorkflowStepsDocSummary",
137
- "WorkflowStepsSearchQuery",
138
- "WorkflowStepsSectInstruct",
139
- "WorkflowStepsSectSummary",
140
136
  "WorkflowsResponse",
141
137
  ]
@@ -6,6 +6,7 @@ import pydantic
6
6
  import typing_extensions
7
7
  from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
8
8
  from ..core.serialization import FieldMetadata
9
+ from .workflow_detail_chunk_strategy import WorkflowDetailChunkStrategy
9
10
  from .workflow_detail_relationships import WorkflowDetailRelationships
10
11
  from .workflow_steps import WorkflowSteps
11
12
 
@@ -15,6 +16,9 @@ class WorkflowDetail(UniversalBaseModel):
15
16
  Workflow information
16
17
  """
17
18
 
19
+ chunk_strategy: typing_extensions.Annotated[
20
+ typing.Optional[WorkflowDetailChunkStrategy], FieldMetadata(alias="chunkStrategy")
21
+ ] = None
18
22
  document_id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="documentId")] = pydantic.Field(
19
23
  default=None
20
24
  )
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ WorkflowDetailChunkStrategy = typing.Union[typing.Literal["element", "size"], typing.Any]
@@ -3,9 +3,7 @@
3
3
  import typing
4
4
 
5
5
  import pydantic
6
- import typing_extensions
7
6
  from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
8
- from ..core.serialization import FieldMetadata
9
7
  from .workflow_prompt_role import WorkflowPromptRole
10
8
 
11
9
 
@@ -19,7 +17,7 @@ class WorkflowPrompt(UniversalBaseModel):
19
17
  A short version of the prompt that is included in historical chat transcripts as part of the prompt context
20
18
  """
21
19
 
22
- long_: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="long")] = pydantic.Field(default=None)
20
+ prompt: typing.Optional[str] = pydantic.Field(default=None)
23
21
  """
24
22
  The prompt that is sent to the LLM
25
23
  """
@@ -2,4 +2,4 @@
2
2
 
3
3
  import typing
4
4
 
5
- WorkflowPromptRole = typing.Union[typing.Literal["assistant", "system", "user"], typing.Any]
5
+ WorkflowPromptRole = typing.Union[typing.Literal["assistant", "developer", "system", "user"], typing.Any]
@@ -6,12 +6,20 @@ import pydantic
6
6
  import typing_extensions
7
7
  from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
8
8
  from ..core.serialization import FieldMetadata
9
- from .workflow_step import WorkflowStep
9
+ from .workflow_request_chunk_strategy import WorkflowRequestChunkStrategy
10
+ from .workflow_steps import WorkflowSteps
10
11
 
11
12
 
12
- class WorkflowStepsDocSummary(UniversalBaseModel):
13
- all_: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="all")] = None
14
- json_: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="json")] = None
13
+ class WorkflowRequest(UniversalBaseModel):
14
+ chunk_strategy: typing_extensions.Annotated[
15
+ typing.Optional[WorkflowRequestChunkStrategy], FieldMetadata(alias="chunkStrategy")
16
+ ] = None
17
+ name: typing.Optional[str] = pydantic.Field(default=None)
18
+ """
19
+ The name of the workflow being created.
20
+ """
21
+
22
+ steps: typing.Optional[WorkflowSteps] = None
15
23
 
16
24
  if IS_PYDANTIC_V2:
17
25
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ WorkflowRequestChunkStrategy = typing.Union[typing.Literal["element", "size"], typing.Any]
@@ -3,9 +3,10 @@
3
3
  import typing
4
4
 
5
5
  import pydantic
6
+ import typing_extensions
6
7
  from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
7
- from .workflow_engine import WorkflowEngine
8
- from .workflow_prompt_group import WorkflowPromptGroup
8
+ from ..core.serialization import FieldMetadata
9
+ from .workflow_step_config import WorkflowStepConfig
9
10
 
10
11
 
11
12
  class WorkflowStep(UniversalBaseModel):
@@ -13,8 +14,14 @@ class WorkflowStep(UniversalBaseModel):
13
14
  Configurations for an agent, including LLM information and prompts
14
15
  """
15
16
 
16
- engine: typing.Optional[WorkflowEngine] = None
17
- prompt: typing.Optional[WorkflowPromptGroup] = None
17
+ all_: typing_extensions.Annotated[typing.Optional[WorkflowStepConfig], FieldMetadata(alias="all")] = None
18
+ figure: typing.Optional[WorkflowStepConfig] = None
19
+ json_: typing_extensions.Annotated[typing.Optional[WorkflowStepConfig], FieldMetadata(alias="json")] = None
20
+ paragraph: typing.Optional[WorkflowStepConfig] = None
21
+ table: typing.Optional[WorkflowStepConfig] = None
22
+ table_figure: typing_extensions.Annotated[
23
+ typing.Optional[WorkflowStepConfig], FieldMetadata(alias="table-figure")
24
+ ] = None
18
25
 
19
26
  if IS_PYDANTIC_V2:
20
27
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -0,0 +1,33 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ import pydantic
6
+ from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
7
+ from .workflow_engine import WorkflowEngine
8
+ from .workflow_prompt_group import WorkflowPromptGroup
9
+ from .workflow_step_config_field import WorkflowStepConfigField
10
+
11
+
12
+ class WorkflowStepConfig(UniversalBaseModel):
13
+ """
14
+ Configurations for an agent, including LLM information and prompts
15
+ """
16
+
17
+ engine: typing.Optional[WorkflowEngine] = None
18
+ field: typing.Optional[WorkflowStepConfigField] = pydantic.Field(default=None)
19
+ """
20
+ The field where agent output will be saved
21
+ """
22
+
23
+ includes: typing.Optional[typing.Dict[str, bool]] = None
24
+ prompt: typing.Optional[WorkflowPromptGroup] = None
25
+
26
+ if IS_PYDANTIC_V2:
27
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
28
+ else:
29
+
30
+ class Config:
31
+ frozen = True
32
+ smart_union = True
33
+ extra = pydantic.Extra.allow
@@ -0,0 +1,8 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ WorkflowStepConfigField = typing.Union[
6
+ typing.Literal["doc-sum", "doc-keys", "sect-sum", "sect-keys", "chunk-sum", "chunk-keys", "chunk-instruct", "text"],
7
+ typing.Any,
8
+ ]
@@ -6,13 +6,7 @@ import pydantic
6
6
  import typing_extensions
7
7
  from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
8
8
  from ..core.serialization import FieldMetadata
9
- from .workflow_steps_chunk_instruct import WorkflowStepsChunkInstruct
10
- from .workflow_steps_chunk_summary import WorkflowStepsChunkSummary
11
- from .workflow_steps_doc_keys import WorkflowStepsDocKeys
12
- from .workflow_steps_doc_summary import WorkflowStepsDocSummary
13
- from .workflow_steps_search_query import WorkflowStepsSearchQuery
14
- from .workflow_steps_sect_instruct import WorkflowStepsSectInstruct
15
- from .workflow_steps_sect_summary import WorkflowStepsSectSummary
9
+ from .workflow_step import WorkflowStep
16
10
 
17
11
 
18
12
  class WorkflowSteps(UniversalBaseModel):
@@ -21,24 +15,18 @@ class WorkflowSteps(UniversalBaseModel):
21
15
  """
22
16
 
23
17
  chunk_instruct: typing_extensions.Annotated[
24
- typing.Optional[WorkflowStepsChunkInstruct], FieldMetadata(alias="chunk-instruct")
25
- ] = None
26
- chunk_summary: typing_extensions.Annotated[
27
- typing.Optional[WorkflowStepsChunkSummary], FieldMetadata(alias="chunk-summary")
28
- ] = None
29
- doc_keys: typing_extensions.Annotated[typing.Optional[WorkflowStepsDocKeys], FieldMetadata(alias="doc-keys")] = None
30
- doc_summary: typing_extensions.Annotated[
31
- typing.Optional[WorkflowStepsDocSummary], FieldMetadata(alias="doc-summary")
32
- ] = None
33
- search_query: typing_extensions.Annotated[
34
- typing.Optional[WorkflowStepsSearchQuery], FieldMetadata(alias="search-query")
35
- ] = None
36
- sect_instruct: typing_extensions.Annotated[
37
- typing.Optional[WorkflowStepsSectInstruct], FieldMetadata(alias="sect-instruct")
38
- ] = None
39
- sect_summary: typing_extensions.Annotated[
40
- typing.Optional[WorkflowStepsSectSummary], FieldMetadata(alias="sect-summary")
18
+ typing.Optional[WorkflowStep], FieldMetadata(alias="chunk-instruct")
41
19
  ] = None
20
+ chunk_summary: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="chunk-summary")] = (
21
+ None
22
+ )
23
+ doc_keys: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="doc-keys")] = None
24
+ doc_summary: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="doc-summary")] = None
25
+ search_query: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="search-query")] = None
26
+ sect_instruct: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="sect-instruct")] = (
27
+ None
28
+ )
29
+ sect_summary: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="sect-summary")] = None
42
30
 
43
31
  if IS_PYDANTIC_V2:
44
32
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -5,6 +5,7 @@ import typing
5
5
  from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
6
6
  from ..core.request_options import RequestOptions
7
7
  from ..types.message_response import MessageResponse
8
+ from ..types.workflow_request_chunk_strategy import WorkflowRequestChunkStrategy
8
9
  from ..types.workflow_response import WorkflowResponse
9
10
  from ..types.workflow_steps import WorkflowSteps
10
11
  from ..types.workflows_response import WorkflowsResponse
@@ -59,6 +60,7 @@ class WorkflowsClient:
59
60
  def create(
60
61
  self,
61
62
  *,
63
+ chunk_strategy: typing.Optional[WorkflowRequestChunkStrategy] = OMIT,
62
64
  name: typing.Optional[str] = OMIT,
63
65
  steps: typing.Optional[WorkflowSteps] = OMIT,
64
66
  request_options: typing.Optional[RequestOptions] = None,
@@ -68,6 +70,8 @@ class WorkflowsClient:
68
70
 
69
71
  Parameters
70
72
  ----------
73
+ chunk_strategy : typing.Optional[WorkflowRequestChunkStrategy]
74
+
71
75
  name : typing.Optional[str]
72
76
  The name of the workflow being created.
73
77
 
@@ -90,7 +94,9 @@ class WorkflowsClient:
90
94
  )
91
95
  client.workflows.create()
92
96
  """
93
- _response = self._raw_client.create(name=name, steps=steps, request_options=request_options)
97
+ _response = self._raw_client.create(
98
+ chunk_strategy=chunk_strategy, name=name, steps=steps, request_options=request_options
99
+ )
94
100
  return _response.data
95
101
 
96
102
  def add_to_account(
@@ -257,7 +263,7 @@ class WorkflowsClient:
257
263
  self,
258
264
  id: str,
259
265
  *,
260
- workflow_id: str,
266
+ chunk_strategy: typing.Optional[WorkflowRequestChunkStrategy] = OMIT,
261
267
  name: typing.Optional[str] = OMIT,
262
268
  steps: typing.Optional[WorkflowSteps] = OMIT,
263
269
  request_options: typing.Optional[RequestOptions] = None,
@@ -270,8 +276,7 @@ class WorkflowsClient:
270
276
  id : str
271
277
  The workflowId of the workflow being updated.
272
278
 
273
- workflow_id : str
274
- The id of the workflow that is being updated.
279
+ chunk_strategy : typing.Optional[WorkflowRequestChunkStrategy]
275
280
 
276
281
  name : typing.Optional[str]
277
282
  The name of the workflow being created.
@@ -295,11 +300,10 @@ class WorkflowsClient:
295
300
  )
296
301
  client.workflows.update(
297
302
  id="id",
298
- workflow_id="workflowId",
299
303
  )
300
304
  """
301
305
  _response = self._raw_client.update(
302
- id, workflow_id=workflow_id, name=name, steps=steps, request_options=request_options
306
+ id, chunk_strategy=chunk_strategy, name=name, steps=steps, request_options=request_options
303
307
  )
304
308
  return _response.data
305
309
 
@@ -387,6 +391,7 @@ class AsyncWorkflowsClient:
387
391
  async def create(
388
392
  self,
389
393
  *,
394
+ chunk_strategy: typing.Optional[WorkflowRequestChunkStrategy] = OMIT,
390
395
  name: typing.Optional[str] = OMIT,
391
396
  steps: typing.Optional[WorkflowSteps] = OMIT,
392
397
  request_options: typing.Optional[RequestOptions] = None,
@@ -396,6 +401,8 @@ class AsyncWorkflowsClient:
396
401
 
397
402
  Parameters
398
403
  ----------
404
+ chunk_strategy : typing.Optional[WorkflowRequestChunkStrategy]
405
+
399
406
  name : typing.Optional[str]
400
407
  The name of the workflow being created.
401
408
 
@@ -426,7 +433,9 @@ class AsyncWorkflowsClient:
426
433
 
427
434
  asyncio.run(main())
428
435
  """
429
- _response = await self._raw_client.create(name=name, steps=steps, request_options=request_options)
436
+ _response = await self._raw_client.create(
437
+ chunk_strategy=chunk_strategy, name=name, steps=steps, request_options=request_options
438
+ )
430
439
  return _response.data
431
440
 
432
441
  async def add_to_account(
@@ -635,7 +644,7 @@ class AsyncWorkflowsClient:
635
644
  self,
636
645
  id: str,
637
646
  *,
638
- workflow_id: str,
647
+ chunk_strategy: typing.Optional[WorkflowRequestChunkStrategy] = OMIT,
639
648
  name: typing.Optional[str] = OMIT,
640
649
  steps: typing.Optional[WorkflowSteps] = OMIT,
641
650
  request_options: typing.Optional[RequestOptions] = None,
@@ -648,8 +657,7 @@ class AsyncWorkflowsClient:
648
657
  id : str
649
658
  The workflowId of the workflow being updated.
650
659
 
651
- workflow_id : str
652
- The id of the workflow that is being updated.
660
+ chunk_strategy : typing.Optional[WorkflowRequestChunkStrategy]
653
661
 
654
662
  name : typing.Optional[str]
655
663
  The name of the workflow being created.
@@ -678,14 +686,13 @@ class AsyncWorkflowsClient:
678
686
  async def main() -> None:
679
687
  await client.workflows.update(
680
688
  id="id",
681
- workflow_id="workflowId",
682
689
  )
683
690
 
684
691
 
685
692
  asyncio.run(main())
686
693
  """
687
694
  _response = await self._raw_client.update(
688
- id, workflow_id=workflow_id, name=name, steps=steps, request_options=request_options
695
+ id, chunk_strategy=chunk_strategy, name=name, steps=steps, request_options=request_options
689
696
  )
690
697
  return _response.data
691
698
 
@@ -11,6 +11,7 @@ from ..core.pydantic_utilities import parse_obj_as
11
11
  from ..core.request_options import RequestOptions
12
12
  from ..core.serialization import convert_and_respect_annotation_metadata
13
13
  from ..types.message_response import MessageResponse
14
+ from ..types.workflow_request_chunk_strategy import WorkflowRequestChunkStrategy
14
15
  from ..types.workflow_response import WorkflowResponse
15
16
  from ..types.workflow_steps import WorkflowSteps
16
17
  from ..types.workflows_response import WorkflowsResponse
@@ -61,6 +62,7 @@ class RawWorkflowsClient:
61
62
  def create(
62
63
  self,
63
64
  *,
65
+ chunk_strategy: typing.Optional[WorkflowRequestChunkStrategy] = OMIT,
64
66
  name: typing.Optional[str] = OMIT,
65
67
  steps: typing.Optional[WorkflowSteps] = OMIT,
66
68
  request_options: typing.Optional[RequestOptions] = None,
@@ -70,6 +72,8 @@ class RawWorkflowsClient:
70
72
 
71
73
  Parameters
72
74
  ----------
75
+ chunk_strategy : typing.Optional[WorkflowRequestChunkStrategy]
76
+
73
77
  name : typing.Optional[str]
74
78
  The name of the workflow being created.
75
79
 
@@ -87,6 +91,7 @@ class RawWorkflowsClient:
87
91
  "v1/workflow",
88
92
  method="POST",
89
93
  json={
94
+ "chunkStrategy": chunk_strategy,
90
95
  "name": name,
91
96
  "steps": convert_and_respect_annotation_metadata(
92
97
  object_=steps, annotation=WorkflowSteps, direction="write"
@@ -326,7 +331,7 @@ class RawWorkflowsClient:
326
331
  self,
327
332
  id: str,
328
333
  *,
329
- workflow_id: str,
334
+ chunk_strategy: typing.Optional[WorkflowRequestChunkStrategy] = OMIT,
330
335
  name: typing.Optional[str] = OMIT,
331
336
  steps: typing.Optional[WorkflowSteps] = OMIT,
332
337
  request_options: typing.Optional[RequestOptions] = None,
@@ -339,8 +344,7 @@ class RawWorkflowsClient:
339
344
  id : str
340
345
  The workflowId of the workflow being updated.
341
346
 
342
- workflow_id : str
343
- The id of the workflow that is being updated.
347
+ chunk_strategy : typing.Optional[WorkflowRequestChunkStrategy]
344
348
 
345
349
  name : typing.Optional[str]
346
350
  The name of the workflow being created.
@@ -359,11 +363,11 @@ class RawWorkflowsClient:
359
363
  f"v1/workflow/{jsonable_encoder(id)}",
360
364
  method="PUT",
361
365
  json={
366
+ "chunkStrategy": chunk_strategy,
362
367
  "name": name,
363
368
  "steps": convert_and_respect_annotation_metadata(
364
369
  object_=steps, annotation=WorkflowSteps, direction="write"
365
370
  ),
366
- "workflowId": workflow_id,
367
371
  },
368
372
  headers={
369
373
  "content-type": "application/json",
@@ -469,6 +473,7 @@ class AsyncRawWorkflowsClient:
469
473
  async def create(
470
474
  self,
471
475
  *,
476
+ chunk_strategy: typing.Optional[WorkflowRequestChunkStrategy] = OMIT,
472
477
  name: typing.Optional[str] = OMIT,
473
478
  steps: typing.Optional[WorkflowSteps] = OMIT,
474
479
  request_options: typing.Optional[RequestOptions] = None,
@@ -478,6 +483,8 @@ class AsyncRawWorkflowsClient:
478
483
 
479
484
  Parameters
480
485
  ----------
486
+ chunk_strategy : typing.Optional[WorkflowRequestChunkStrategy]
487
+
481
488
  name : typing.Optional[str]
482
489
  The name of the workflow being created.
483
490
 
@@ -495,6 +502,7 @@ class AsyncRawWorkflowsClient:
495
502
  "v1/workflow",
496
503
  method="POST",
497
504
  json={
505
+ "chunkStrategy": chunk_strategy,
498
506
  "name": name,
499
507
  "steps": convert_and_respect_annotation_metadata(
500
508
  object_=steps, annotation=WorkflowSteps, direction="write"
@@ -734,7 +742,7 @@ class AsyncRawWorkflowsClient:
734
742
  self,
735
743
  id: str,
736
744
  *,
737
- workflow_id: str,
745
+ chunk_strategy: typing.Optional[WorkflowRequestChunkStrategy] = OMIT,
738
746
  name: typing.Optional[str] = OMIT,
739
747
  steps: typing.Optional[WorkflowSteps] = OMIT,
740
748
  request_options: typing.Optional[RequestOptions] = None,
@@ -747,8 +755,7 @@ class AsyncRawWorkflowsClient:
747
755
  id : str
748
756
  The workflowId of the workflow being updated.
749
757
 
750
- workflow_id : str
751
- The id of the workflow that is being updated.
758
+ chunk_strategy : typing.Optional[WorkflowRequestChunkStrategy]
752
759
 
753
760
  name : typing.Optional[str]
754
761
  The name of the workflow being created.
@@ -767,11 +774,11 @@ class AsyncRawWorkflowsClient:
767
774
  f"v1/workflow/{jsonable_encoder(id)}",
768
775
  method="PUT",
769
776
  json={
777
+ "chunkStrategy": chunk_strategy,
770
778
  "name": name,
771
779
  "steps": convert_and_respect_annotation_metadata(
772
780
  object_=steps, annotation=WorkflowSteps, direction="write"
773
781
  ),
774
- "workflowId": workflow_id,
775
782
  },
776
783
  headers={
777
784
  "content-type": "application/json",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: groundx
3
- Version: 2.6.3
3
+ Version: 2.7.2
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.8,<4.0
@@ -1,11 +1,11 @@
1
- groundx/__init__.py,sha256=4J8NWDCfslhhQE1OKHrYQ98bP-EmfPqJlOo6CtGeO1c,4129
1
+ groundx/__init__.py,sha256=4bcjoYc2ZZw1k364bhk_b95byRwk-epto1rggWlljUo,3989
2
2
  groundx/buckets/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
3
3
  groundx/buckets/client.py,sha256=F1tcqQoqmrC8lQtRZvmXldIdVxIp1LWfdbAfY8SB5sM,11460
4
4
  groundx/buckets/raw_client.py,sha256=T2Ty5obN7eHbaxHGAimzjM8MGOmSOQEckhciyZkzcjE,23873
5
5
  groundx/client.py,sha256=PksVIgU2pXup9Ewkl7NcLPvQOIhg_Do3cJVGgXqqQjE,6641
6
6
  groundx/core/__init__.py,sha256=lTcqUPXcx4112yLDd70RAPeqq6tu3eFMe1pKOqkW9JQ,1562
7
7
  groundx/core/api_error.py,sha256=44vPoTyWN59gonCIZMdzw7M1uspygiLnr3GNFOoVL2Q,614
8
- groundx/core/client_wrapper.py,sha256=k60nUQUg22U-y88dW1rgM5sq-Y7zKrBPR8BZoDO8gS4,1822
8
+ groundx/core/client_wrapper.py,sha256=SMtUbBSxFAaKECURzzg47mANqsc3Pn64oEM4Utzqlzw,1822
9
9
  groundx/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
10
10
  groundx/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
11
11
  groundx/core/force_multipart.py,sha256=awxh5MtcRYe74ehY8U76jzv6fYM_w_D3Rur7KQQzSDk,429
@@ -48,7 +48,7 @@ groundx/extract/services/.DS_Store,sha256=1lFlJ5EFymdzGAUAaI30vcaaLHt3F1LwpG7xIL
48
48
  groundx/extract/services/__init__.py,sha256=Zf-PjjmUo5Nv1BaEQOjaFhI3QtOn7xSZo0Mccf9DOg0,249
49
49
  groundx/extract/services/csv.py,sha256=9ugPXvJYioM42wE2o4uXWyIlHMqkK_F6dCDpL_kyCzU,2132
50
50
  groundx/extract/services/logger.py,sha256=yuQ4eBs5eiHVZhlWlrROKNLuJ72G9hlJCcbT93lQO0o,3201
51
- groundx/extract/services/logging_cfg.py,sha256=krwfflUgYTGobPXC341cKbpZZFiKawSfK0J429lybq8,1619
51
+ groundx/extract/services/logging_cfg.py,sha256=WHlV4_ThUC5-dqAyicGpLmSId34RoCFf9xfGLw-HCYQ,1601
52
52
  groundx/extract/services/ratelimit.py,sha256=PU-9YV9dhzyci0A5knlGEJvi4Jdyl9-hguI0v_DFnYA,3206
53
53
  groundx/extract/services/sheets_client.py,sha256=0Sgy6dKYnjwdZ1He3m489D3mQ1038k5xBzbvgKO4BSs,4859
54
54
  groundx/extract/services/status.py,sha256=n4_cP-1ZfH8KShlif16bwaSBhtI-7lI_5ecYgTvcKRc,6700
@@ -57,8 +57,8 @@ groundx/extract/services/upload_minio.py,sha256=i4i5-_ER9_WvEKhYPIuqsg6oZckZdbA4
57
57
  groundx/extract/services/upload_s3.py,sha256=0jUC0V2qz4W-cavt4HaAxkOKAj5SdeGt1L-Dv4A5e50,2310
58
58
  groundx/extract/services/utility.py,sha256=nlAVgSFpzo0LPrm5dqexn2dmDa3cFmAmJpVHFE2rgnM,1321
59
59
  groundx/extract/settings/__init__.py,sha256=1YJcL6whtsHNVd9AuOzdIx3vM5xeu5m6e4U5V39McmA,277
60
- groundx/extract/settings/settings.py,sha256=5J0H8Dvw_6XYLIoQhoj9zRxz9lU2lixBkVPgDeuOGLo,4318
61
- groundx/extract/settings/test_settings.py,sha256=n56UMaIcK7_rN5dUx1CNXsn0Yy7CI4g9jWDwLdl9NKE,18861
60
+ groundx/extract/settings/settings.py,sha256=QenatSQyWIB1Log7jdai-YzkaGMhVP-c1k-nIP_mZgo,5331
61
+ groundx/extract/settings/test_settings.py,sha256=9pEa3_MTY2CFlAnv5YqGojplm3Z8eFpRhclQlIem0Vo,18743
62
62
  groundx/extract/tasks/__init__.py,sha256=fEtUoLXI2vNlbcogE5FmRk2t0ZRuM4xjFK7S4BF1Rws,115
63
63
  groundx/extract/tasks/utility.py,sha256=6pJG0SLsj_zTtdFbMqXIUmbIH3kGLbYpOTQKweIIQcY,736
64
64
  groundx/extract/utility/__init__.py,sha256=Wf8-yEKavFfI8VnoKevfuFK-SarD0FxAWYWydo1F-Ug,240
@@ -70,14 +70,14 @@ groundx/groups/raw_client.py,sha256=nP9yFh7MexjDUQU8TtB5j-HAmZJjQWOd78hu-KeMnRs,
70
70
  groundx/health/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
71
71
  groundx/health/client.py,sha256=kcGIlqCEzBl6fuwJaf3x-obOagXxyAlEFaPRH3qgdDs,4566
72
72
  groundx/health/raw_client.py,sha256=_TDa-O13PtC0RYCAq4bx5FESz1oLDLp9WExyOKjsIjs,7430
73
- groundx/ingest.py,sha256=yMX39sDmm0NfdojkBtPxcIBGNXn33djCJO5nlcIKX3c,24780
73
+ groundx/ingest.py,sha256=QTwsLUdzL6CIvwCb0jep4zHGQebpKbS-cwgG4i9QgiQ,26283
74
74
  groundx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
75
75
  groundx/search/__init__.py,sha256=Y1EKHPBEh-ebo1YOikCHTHU9E8kBP2s7K4J_kZGzcOA,165
76
76
  groundx/search/client.py,sha256=ArfAbcQGS6eCWuMU5Ld-AQ8nB0Vh-4Jec3tt05QJtuM,13783
77
77
  groundx/search/raw_client.py,sha256=_qO5u62e1d0rVbRUeyJFdt85v7WT_bSHcSLf8wJvfgQ,19545
78
78
  groundx/search/types/__init__.py,sha256=sy0s9qFdeT4Q3SJxK6hrulnsPirVpKkjxI29OqLTY0s,185
79
79
  groundx/search/types/search_content_request_id.py,sha256=us7mYdzR0qPur_wR5I9BhHaLEzC5nLBRna6-xq4M1ec,128
80
- groundx/types/__init__.py,sha256=da_CncK5LZVjbGFfmgZPiT3GT2gXX0bgMql1rf1EQbQ,5410
80
+ groundx/types/__init__.py,sha256=1TxoLVGJWBx8dS5hL-fZzjTOvUmrsYGyg1USvw6paLQ,5187
81
81
  groundx/types/bounding_box_detail.py,sha256=5_l3vFNIs2n-U2VXEpyPRcTcFKpMWrpvzQiIL88XNEs,1796
82
82
  groundx/types/bucket_detail.py,sha256=sYKzUCPCAosh2jTFfSDtSn0bEsLL228kLjA51jFEYwY,1475
83
83
  groundx/types/bucket_list_response.py,sha256=m1lO4PElbxc5VFCLchPtdWfCTlfrm8enpTe3bg1ng7Y,1060
@@ -126,32 +126,30 @@ groundx/types/subscription_detail.py,sha256=GEEivqyiLsZtd8Ow7mqqwF1y0m0tHD-t9r9d
126
126
  groundx/types/subscription_detail_meters.py,sha256=vGqiR2uupVh5177DfOghjoe5mwzVhoWljKzPF-twUc0,794
127
127
  groundx/types/website_source.py,sha256=53jWDBtSrJVOsBVtVbZbjhEAsd0QGkXa7IuKO4AooLs,1542
128
128
  groundx/types/workflow_apply_request.py,sha256=BooXhqjiXftutycdR4GEARPvOcK-tMEKDRS02zFQH0o,755
129
- groundx/types/workflow_detail.py,sha256=xWIFGayvsFLPHvzRY1MyvojNIYMv4eIiJBWP7Xj332w,1639
129
+ groundx/types/workflow_detail.py,sha256=St29Dbw7zTAfZtTcrGQcL5Mj6ixyasHA2hRdeinRxS0,1864
130
+ groundx/types/workflow_detail_chunk_strategy.py,sha256=GT5tU7Eh7KaWqtKQmR_xjJA78d74yHRfv2E21u-yGUE,170
130
131
  groundx/types/workflow_detail_relationships.py,sha256=lw-7OcI595j_1wjC1Rd4rFoPk4UjwniF3SKydX5mlG4,1072
131
132
  groundx/types/workflow_engine.py,sha256=J_PXgGLUrpY7Ci-A6jx1vq7_9h-3OZTMrxdru0DkieI,1977
132
133
  groundx/types/workflow_engine_reasoning_effort.py,sha256=xQma7tB5cVPvaj5WqmBe1scPqeDWDDsTe2TlDXXnOVs,189
133
134
  groundx/types/workflow_engine_service.py,sha256=8EPL1ffSnv1r547eixSryVqO-X18TYFyHZotqw1qHA4,211
134
- groundx/types/workflow_prompt.py,sha256=r0ei5qpw_cv-FmpG1Z5lfr9QteOcB0Kh_rGomRqRtTU,1261
135
+ groundx/types/workflow_prompt.py,sha256=uAKLSm57MJcgum5libVrK8d-4-MmidptdQ7IfxmkTx0,1132
135
136
  groundx/types/workflow_prompt_group.py,sha256=iT3QibXPAVyA_aAC8Z2aEqDul1xelbFOcsjYnHrPB6Q,724
136
- groundx/types/workflow_prompt_role.py,sha256=oasiW8V0qVZGMSULH0cPRySvf28SX4ESbzyfECrkIX0,173
137
+ groundx/types/workflow_prompt_role.py,sha256=_O50xjX0RsXYvvg3RlzdhoI4rF8ZG7yDYETjHJnOdaE,186
138
+ groundx/types/workflow_request.py,sha256=BKYlv7_RWYDrDWBITKWy5MO8ZojKW33_b_6j0LV3lY8,1022
139
+ groundx/types/workflow_request_chunk_strategy.py,sha256=36SwJuoQ1-9nB0mWpw9ois3IcLZBYFVzAIB2EZfHyRA,171
137
140
  groundx/types/workflow_response.py,sha256=5SFtWKKTQW3L26Tv02Cc0aI4oZN-3NaCwzNk5WD762I,582
138
- groundx/types/workflow_step.py,sha256=PkwIsR9IRjcrFtUDbjHAvkEhIczUJQtp88WjcbYdZsw,799
139
- groundx/types/workflow_steps.py,sha256=oGf-zP5p6kigPe_GT4z9xaVlYTuHYxakbvVlC4e39YU,2102
140
- groundx/types/workflow_steps_chunk_instruct.py,sha256=Gszs6Oh3fGtLaWunxq_wpdsGt3MNOm2TIDH-S2CDojU,907
141
- groundx/types/workflow_steps_chunk_summary.py,sha256=US2EY9Id5jIo7Naz-2iogfudE2CmxJTTYnr8UPD0g10,1058
142
- groundx/types/workflow_steps_doc_keys.py,sha256=5GvJWaF3NVK8thxWNtJEEr6jW_2I3T2EE1Chqd8_ZU8,729
143
- groundx/types/workflow_steps_doc_summary.py,sha256=alzXvPCXJ-XGIhbXE88HbklMcFwc0frSh-aW674SgrM,838
144
- groundx/types/workflow_steps_search_query.py,sha256=DbsIChD58JB6aF-0tUhf8jwOQ9HF68s8_kviAzr--YI,733
145
- groundx/types/workflow_steps_sect_instruct.py,sha256=EJhl5__qyC1o1-CQh9HL-bwGmWsXilLYNcdFJhlp7YA,606
146
- groundx/types/workflow_steps_sect_summary.py,sha256=76MalXb7GO0m4x-UllrYkkvQbSjx9Qu1WROlxvyNCRU,781
141
+ groundx/types/workflow_step.py,sha256=YRTW79pJMb76NckGK3tWke8CCjOPkUfLD1YQ76pA5Zk,1248
142
+ groundx/types/workflow_step_config.py,sha256=yef00gJz-j0we-w-fa68-G9rDOMxH0m-MptO_jvOjxM,1077
143
+ groundx/types/workflow_step_config_field.py,sha256=20Jb6xd7tURZEzRZoWLq3xzbzgofvL_6Tf3FDVbpiTA,259
144
+ groundx/types/workflow_steps.py,sha256=qK4vyjT7nc3FY2dlS_gVGl_p9r9DlWUOh7FnNuFHf9w,1561
147
145
  groundx/types/workflows_response.py,sha256=lKy6N4r0jTVK2S3mnmTzAvPjkmOl4BTnU2q0k-TbqjQ,597
148
146
  groundx/version.py,sha256=1yVogKaq260fQfckM2RYN2144SEw0QROsZW8ICtkG4U,74
149
147
  groundx/workflows/__init__.py,sha256=qS5TOSfeClFC9oVjYFqCMuNlZOHTcU0cNOrNmDpflQs,163
150
- groundx/workflows/client.py,sha256=Z6Ua0DGLMAK0s2HtrbJjLCQe2fp96gyXu8BHX4z276c,19304
151
- groundx/workflows/raw_client.py,sha256=SQ3CBWTIwatEdz1igO-1p-dGrnAHXIliK_ksT8ImWkM,30799
148
+ groundx/workflows/client.py,sha256=-aAqa43LQMx62iwcH4QEEHPSOeT2cNj9334SBZMGy4s,19801
149
+ groundx/workflows/raw_client.py,sha256=eoIedOkJtF01d4VC8W0q3eH9TgU5Qi40XwEbQl7MQsA,31368
152
150
  groundx/workflows/types/__init__.py,sha256=r-3IiPgf480gPstg62dFXecJQNOoTaJzcqul0_8_8DM,182
153
151
  groundx/workflows/types/workflows_get_request_id.py,sha256=pGcBQwEQYDxoxBGpACdy3zf1Qc2rjcN3zv-TZXHu9p0,127
154
- groundx-2.6.3.dist-info/LICENSE,sha256=dFE6nY1bHnSn6NqmdlghlU1gQqLqYNphrceGVehSa7o,1065
155
- groundx-2.6.3.dist-info/METADATA,sha256=1rpHjrR0rnvw5E946QjT0xTZDLIbySvcE5VPXGsceaQ,5919
156
- groundx-2.6.3.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
157
- groundx-2.6.3.dist-info/RECORD,,
152
+ groundx-2.7.2.dist-info/LICENSE,sha256=dFE6nY1bHnSn6NqmdlghlU1gQqLqYNphrceGVehSa7o,1065
153
+ groundx-2.7.2.dist-info/METADATA,sha256=OoCyGJeG-ncAhQw2WMx0cttcd_9-m6cZPXRR6INzRf0,5919
154
+ groundx-2.7.2.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
155
+ groundx-2.7.2.dist-info/RECORD,,
@@ -1,24 +0,0 @@
1
- # This file was auto-generated by Fern from our API Definition.
2
-
3
- import typing
4
-
5
- import pydantic
6
- import typing_extensions
7
- from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
8
- from ..core.serialization import FieldMetadata
9
- from .workflow_step import WorkflowStep
10
-
11
-
12
- class WorkflowStepsChunkInstruct(UniversalBaseModel):
13
- figure: typing.Optional[WorkflowStep] = None
14
- json_: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="json")] = None
15
- table_figure: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="table-figure")] = None
16
-
17
- if IS_PYDANTIC_V2:
18
- model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
19
- else:
20
-
21
- class Config:
22
- frozen = True
23
- smart_union = True
24
- extra = pydantic.Extra.allow
@@ -1,26 +0,0 @@
1
- # This file was auto-generated by Fern from our API Definition.
2
-
3
- import typing
4
-
5
- import pydantic
6
- import typing_extensions
7
- from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
8
- from ..core.serialization import FieldMetadata
9
- from .workflow_step import WorkflowStep
10
-
11
-
12
- class WorkflowStepsChunkSummary(UniversalBaseModel):
13
- all_: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="all")] = None
14
- figure: typing.Optional[WorkflowStep] = None
15
- json_: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="json")] = None
16
- table: typing.Optional[WorkflowStep] = None
17
- table_figure: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="table-figure")] = None
18
-
19
- if IS_PYDANTIC_V2:
20
- model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
21
- else:
22
-
23
- class Config:
24
- frozen = True
25
- smart_union = True
26
- extra = pydantic.Extra.allow
@@ -1,22 +0,0 @@
1
- # This file was auto-generated by Fern from our API Definition.
2
-
3
- import typing
4
-
5
- import pydantic
6
- import typing_extensions
7
- from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
8
- from ..core.serialization import FieldMetadata
9
- from .workflow_step import WorkflowStep
10
-
11
-
12
- class WorkflowStepsDocKeys(UniversalBaseModel):
13
- all_: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="all")] = None
14
-
15
- if IS_PYDANTIC_V2:
16
- model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
17
- else:
18
-
19
- class Config:
20
- frozen = True
21
- smart_union = True
22
- extra = pydantic.Extra.allow
@@ -1,22 +0,0 @@
1
- # This file was auto-generated by Fern from our API Definition.
2
-
3
- import typing
4
-
5
- import pydantic
6
- import typing_extensions
7
- from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
8
- from ..core.serialization import FieldMetadata
9
- from .workflow_step import WorkflowStep
10
-
11
-
12
- class WorkflowStepsSearchQuery(UniversalBaseModel):
13
- all_: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="all")] = None
14
-
15
- if IS_PYDANTIC_V2:
16
- model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
17
- else:
18
-
19
- class Config:
20
- frozen = True
21
- smart_union = True
22
- extra = pydantic.Extra.allow
@@ -1,20 +0,0 @@
1
- # This file was auto-generated by Fern from our API Definition.
2
-
3
- import typing
4
-
5
- import pydantic
6
- from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
7
- from .workflow_step import WorkflowStep
8
-
9
-
10
- class WorkflowStepsSectInstruct(UniversalBaseModel):
11
- table: typing.Optional[WorkflowStep] = None
12
-
13
- if IS_PYDANTIC_V2:
14
- model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
15
- else:
16
-
17
- class Config:
18
- frozen = True
19
- smart_union = True
20
- extra = pydantic.Extra.allow
@@ -1,23 +0,0 @@
1
- # This file was auto-generated by Fern from our API Definition.
2
-
3
- import typing
4
-
5
- import pydantic
6
- import typing_extensions
7
- from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
8
- from ..core.serialization import FieldMetadata
9
- from .workflow_step import WorkflowStep
10
-
11
-
12
- class WorkflowStepsSectSummary(UniversalBaseModel):
13
- all_: typing_extensions.Annotated[typing.Optional[WorkflowStep], FieldMetadata(alias="all")] = None
14
- table: typing.Optional[WorkflowStep] = None
15
-
16
- if IS_PYDANTIC_V2:
17
- model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
18
- else:
19
-
20
- class Config:
21
- frozen = True
22
- smart_union = True
23
- extra = pydantic.Extra.allow