morphik 0.1.9__tar.gz → 0.2.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -44,3 +44,8 @@ ui-component/notebook-storage/notebooks.json
44
44
  ee/ui-component/package-lock.json
45
45
  ee/ee_tokens/gdrive_token_dev_user.pickle
46
46
  core/tests/integration/test_data/version_test_1.txt
47
+ ee/ee_tokens/*
48
+
49
+ migrations
50
+ ugly-onetime-code/*
51
+ trees
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: morphik
3
- Version: 0.1.9
3
+ Version: 0.2.1
4
4
  Summary: Morphik Python Client
5
5
  Author-email: Morphik <founders@morphik.ai>
6
6
  Requires-Python: >=3.8
@@ -12,4 +12,4 @@ __all__ = [
12
12
  "Document",
13
13
  ]
14
14
 
15
- __version__ = "0.1.8"
15
+ __version__ = "0.2.1"
@@ -172,8 +172,14 @@ class _MorphikClientLogic:
172
172
  rules: Optional[List[RuleOrDict]],
173
173
  folder_name: Optional[str],
174
174
  end_user_id: Optional[str],
175
+ use_colpali: Optional[bool] = None,
175
176
  ) -> Dict[str, Any]:
176
- """Prepare form data for ingest_file endpoint"""
177
+ """Prepare form data for ingest_file endpoint.
178
+
179
+ All parameters are included in the multipart body so that the server
180
+ never relies on query-string values. *use_colpali* is therefore always
181
+ embedded here when provided.
182
+ """
177
183
  form_data = {
178
184
  "metadata": json.dumps(metadata or {}),
179
185
  "rules": json.dumps([self._convert_rule(r) for r in (rules or [])]),
@@ -182,6 +188,12 @@ class _MorphikClientLogic:
182
188
  form_data["folder_name"] = folder_name
183
189
  if end_user_id:
184
190
  form_data["end_user_id"] = end_user_id
191
+
192
+ # Only include the flag when caller supplied a specific value to avoid
193
+ # overriding server defaults unintentionally.
194
+ if use_colpali is not None:
195
+ form_data["use_colpali"] = str(use_colpali).lower()
196
+
185
197
  return form_data
186
198
 
187
199
  def _prepare_ingest_files_form_data(
@@ -208,10 +220,15 @@ class _MorphikClientLogic:
208
220
  data = {
209
221
  "metadata": json.dumps(metadata or {}),
210
222
  "rules": json.dumps(converted_rules),
211
- # use_colpali is a query parameter, not a form field
212
223
  "parallel": str(parallel).lower(),
213
224
  }
214
225
 
226
+ # Always carry use_colpali in the body for consistency with single-file
227
+ # ingestion. The API treats missing values as "true" for backward
228
+ # compatibility, hence we only add it when explicitly provided.
229
+ if use_colpali is not None:
230
+ data["use_colpali"] = str(use_colpali).lower()
231
+
215
232
  if folder_name:
216
233
  data["folder_name"] = folder_name
217
234
  if end_user_id:
@@ -234,6 +251,7 @@ class _MorphikClientLogic:
234
251
  prompt_overrides: Optional[Dict],
235
252
  folder_name: Optional[Union[str, List[str]]],
236
253
  end_user_id: Optional[str],
254
+ chat_id: Optional[str] = None,
237
255
  schema: Optional[Union[Type[BaseModel], Dict[str, Any]]] = None,
238
256
  ) -> Dict[str, Any]:
239
257
  """Prepare request for query endpoint"""
@@ -254,6 +272,8 @@ class _MorphikClientLogic:
254
272
  payload["folder_name"] = folder_name
255
273
  if end_user_id:
256
274
  payload["end_user_id"] = end_user_id
275
+ if chat_id:
276
+ payload["chat_id"] = chat_id
257
277
 
258
278
  # Add schema to payload if provided
259
279
  if schema:
@@ -3,6 +3,7 @@ import logging
3
3
  from io import BytesIO, IOBase
4
4
  from pathlib import Path
5
5
  from typing import Any, BinaryIO, Dict, List, Optional, Type, Union
6
+ from datetime import datetime
6
7
 
7
8
  import httpx
8
9
  from pydantic import BaseModel
@@ -163,14 +164,15 @@ class AsyncFolder:
163
164
  files = {"file": (filename, file_obj)}
164
165
 
165
166
  # Create form data
166
- form_data = self._client._logic._prepare_ingest_file_form_data(metadata, rules, self._name, None)
167
+ form_data = self._client._logic._prepare_ingest_file_form_data(
168
+ metadata, rules, self._name, None, use_colpali
169
+ )
167
170
 
168
171
  response = await self._client._request(
169
172
  "POST",
170
173
  "ingest/file",
171
174
  data=form_data,
172
175
  files=files,
173
- params={"use_colpali": str(use_colpali).lower()},
174
176
  )
175
177
  doc = self._client._logic._parse_document_response(response)
176
178
  doc._client = self._client
@@ -215,7 +217,6 @@ class AsyncFolder:
215
217
  "ingest/files",
216
218
  data=data,
217
219
  files=file_objects,
218
- params={"use_colpali": str(use_colpali).lower()},
219
220
  )
220
221
 
221
222
  if response.get("errors"):
@@ -354,6 +355,7 @@ class AsyncFolder:
354
355
  prompt_overrides: Optional[Union[QueryPromptOverrides, Dict[str, Any]]] = None,
355
356
  additional_folders: Optional[List[str]] = None,
356
357
  schema: Optional[Union[Type[BaseModel], Dict[str, Any]]] = None,
358
+ chat_id: Optional[str] = None,
357
359
  ) -> CompletionResponse:
358
360
  """
359
361
  Generate completion using relevant chunks as context within this folder.
@@ -391,6 +393,7 @@ class AsyncFolder:
391
393
  prompt_overrides,
392
394
  effective_folder,
393
395
  None,
396
+ chat_id,
394
397
  schema,
395
398
  )
396
399
 
@@ -474,9 +477,7 @@ class AsyncFolder:
474
477
  List[FinalChunkResult]: List of chunk results
475
478
  """
476
479
  merged = self._merge_folders(additional_folders)
477
- request = self._client._logic._prepare_batch_get_chunks_request(
478
- sources, merged, None, use_colpali
479
- )
480
+ request = self._client._logic._prepare_batch_get_chunks_request(sources, merged, None, use_colpali)
480
481
  response = await self._client._request("POST", "batch/chunks", data=request)
481
482
  return self._client._logic._parse_chunk_result_list_response(response)
482
483
 
@@ -670,14 +671,14 @@ class AsyncUserScope:
670
671
  # Prepare multipart form data
671
672
  files = {"file": (filename, file_obj)}
672
673
 
673
- # Add metadata and rules
674
+ # Add metadata, rules and scoping information
674
675
  data = {
675
676
  "metadata": json.dumps(metadata or {}),
676
677
  "rules": json.dumps([self._client._convert_rule(r) for r in (rules or [])]),
677
- "end_user_id": self._end_user_id, # Add end user ID here
678
+ "end_user_id": self._end_user_id,
679
+ "use_colpali": str(use_colpali).lower(),
678
680
  }
679
681
 
680
- # Add folder name if scoped to a folder
681
682
  if self._folder_name:
682
683
  data["folder_name"] = self._folder_name
683
684
 
@@ -738,9 +739,9 @@ class AsyncUserScope:
738
739
  data = {
739
740
  "metadata": json.dumps(metadata or {}),
740
741
  "rules": json.dumps(converted_rules),
741
- "use_colpali": str(use_colpali).lower() if use_colpali is not None else None,
742
742
  "parallel": str(parallel).lower(),
743
- "end_user_id": self._end_user_id, # Add end user ID here
743
+ "end_user_id": self._end_user_id,
744
+ "use_colpali": str(use_colpali).lower(),
744
745
  }
745
746
 
746
747
  # Add folder name if scoped to a folder
@@ -752,7 +753,6 @@ class AsyncUserScope:
752
753
  "ingest/files",
753
754
  data=data,
754
755
  files=file_objects,
755
- params={"use_colpali": str(use_colpali).lower()},
756
756
  )
757
757
 
758
758
  if response.get("errors"):
@@ -891,6 +891,7 @@ class AsyncUserScope:
891
891
  prompt_overrides: Optional[Union[QueryPromptOverrides, Dict[str, Any]]] = None,
892
892
  additional_folders: Optional[List[str]] = None,
893
893
  schema: Optional[Union[Type[BaseModel], Dict[str, Any]]] = None,
894
+ chat_id: Optional[str] = None,
894
895
  ) -> CompletionResponse:
895
896
  """
896
897
  Generate completion using relevant chunks as context, scoped to the end user.
@@ -928,6 +929,7 @@ class AsyncUserScope:
928
929
  prompt_overrides,
929
930
  effective_folder,
930
931
  self._end_user_id,
932
+ chat_id,
931
933
  schema,
932
934
  )
933
935
 
@@ -1345,14 +1347,13 @@ class AsyncMorphik:
1345
1347
  files = {"file": (filename, file_obj)}
1346
1348
 
1347
1349
  # Create form data
1348
- form_data = self._logic._prepare_ingest_file_form_data(metadata, rules, None, None)
1350
+ form_data = self._logic._prepare_ingest_file_form_data(metadata, rules, None, None, use_colpali)
1349
1351
 
1350
1352
  response = await self._request(
1351
1353
  "POST",
1352
1354
  "ingest/file",
1353
1355
  data=form_data,
1354
1356
  files=files,
1355
- params={"use_colpali": str(use_colpali).lower()},
1356
1357
  )
1357
1358
  doc = self._logic._parse_document_response(response)
1358
1359
  doc._client = self
@@ -1398,7 +1399,6 @@ class AsyncMorphik:
1398
1399
  "ingest/files",
1399
1400
  data=data,
1400
1401
  files=file_objects,
1401
- params={"use_colpali": str(use_colpali).lower()},
1402
1402
  )
1403
1403
 
1404
1404
  if response.get("errors"):
@@ -1554,6 +1554,7 @@ class AsyncMorphik:
1554
1554
  include_paths: bool = False,
1555
1555
  prompt_overrides: Optional[Union[QueryPromptOverrides, Dict[str, Any]]] = None,
1556
1556
  folder_name: Optional[Union[str, List[str]]] = None,
1557
+ chat_id: Optional[str] = None,
1557
1558
  schema: Optional[Union[Type[BaseModel], Dict[str, Any]]] = None,
1558
1559
  ) -> CompletionResponse:
1559
1560
  """
@@ -1659,6 +1660,7 @@ class AsyncMorphik:
1659
1660
  prompt_overrides,
1660
1661
  effective_folder,
1661
1662
  None,
1663
+ chat_id,
1662
1664
  schema,
1663
1665
  )
1664
1666
 
@@ -1676,6 +1678,45 @@ class AsyncMorphik:
1676
1678
  response = await self._request("POST", "query", data=payload)
1677
1679
  return self._logic._parse_completion_response(response)
1678
1680
 
1681
+ async def agent_query(self, query: str) -> Dict[str, Any]:
1682
+ """
1683
+ Execute an agentic query with tool access and conversation handling.
1684
+
1685
+ The agent can autonomously use various tools to answer complex queries including:
1686
+ - Searching and retrieving relevant documents
1687
+ - Analyzing document content
1688
+ - Performing calculations and data processing
1689
+ - Creating summaries and reports
1690
+ - Managing knowledge graphs
1691
+
1692
+ Args:
1693
+ query: Natural language query for the Morphik agent
1694
+
1695
+ Returns:
1696
+ Dict[str, Any]: Agent response with potential tool execution results and sources
1697
+
1698
+ Example:
1699
+ ```python
1700
+ # Simple query
1701
+ result = await db.agent_query("What are the main trends in our Q3 sales data?")
1702
+ print(result["response"])
1703
+
1704
+ # Complex analysis request
1705
+ result = await db.agent_query(
1706
+ "Analyze all documents from the marketing department, "
1707
+ "identify key performance metrics, and create a summary "
1708
+ "with actionable insights"
1709
+ )
1710
+ print(result["response"])
1711
+
1712
+ # Tool usage is automatic - the agent will decide which tools to use
1713
+ # based on the query requirements
1714
+ ```
1715
+ """
1716
+ request = {"query": query}
1717
+ response = await self._request("POST", "agent", data=request)
1718
+ return response
1719
+
1679
1720
  async def list_documents(
1680
1721
  self,
1681
1722
  skip: int = 0,
@@ -2225,9 +2266,7 @@ class AsyncMorphik:
2225
2266
  print(f"Chunk from {chunk.document_id}, number {chunk.chunk_number}: {chunk.content[:50]}...")
2226
2267
  ```
2227
2268
  """
2228
- request = self._logic._prepare_batch_get_chunks_request(
2229
- sources, folder_name, None, use_colpali
2230
- )
2269
+ request = self._logic._prepare_batch_get_chunks_request(sources, folder_name, None, use_colpali)
2231
2270
  response = await self._request("POST", "batch/chunks", data=request)
2232
2271
  return self._logic._parse_chunk_result_list_response(response)
2233
2272
 
@@ -2552,3 +2591,77 @@ class AsyncMorphik:
2552
2591
  raise RuntimeError(graph.error or "Graph processing failed")
2553
2592
  await asyncio.sleep(check_interval_seconds)
2554
2593
  raise TimeoutError("Timed out waiting for graph completion")
2594
+
2595
+ async def ping(self) -> Dict[str, Any]:
2596
+ """Simple health-check call to ``/ping`` endpoint."""
2597
+ return await self._request("GET", "ping")
2598
+
2599
+ # ------------------------------------------------------------------
2600
+ # Chat API ----------------------------------------------------------
2601
+ # ------------------------------------------------------------------
2602
+ async def get_chat_history(self, chat_id: str) -> List[Dict[str, Any]]:
2603
+ """Return the full message history for *chat_id*."""
2604
+ return await self._request("GET", f"chat/{chat_id}")
2605
+
2606
+ async def list_chat_conversations(self, limit: int = 100) -> List[Dict[str, Any]]:
2607
+ """List recent chat conversations for the current user (async)."""
2608
+ limit_capped = max(1, min(limit, 500))
2609
+ return await self._request("GET", "chats", params={"limit": limit_capped})
2610
+
2611
+ # ------------------------------------------------------------------
2612
+ # Usage API ---------------------------------------------------------
2613
+ # ------------------------------------------------------------------
2614
+ async def get_usage_stats(self) -> Dict[str, int]:
2615
+ """Return cumulative token usage statistics (async)."""
2616
+ return await self._request("GET", "usage/stats")
2617
+
2618
+ async def get_recent_usage(
2619
+ self,
2620
+ operation_type: Optional[str] = None,
2621
+ since: Optional["datetime"] = None,
2622
+ status: Optional[str] = None,
2623
+ ) -> List[Dict[str, Any]]:
2624
+ """Return recent usage entries with optional filtering (async)."""
2625
+ from datetime import datetime
2626
+
2627
+ params: Dict[str, Any] = {}
2628
+ if operation_type:
2629
+ params["operation_type"] = operation_type
2630
+ if since:
2631
+ params["since"] = since.isoformat() if isinstance(since, datetime) else str(since)
2632
+ if status:
2633
+ params["status"] = status
2634
+ return await self._request("GET", "usage/recent", params=params)
2635
+
2636
+ # ------------------------------------------------------------------
2637
+ # Graph helpers -----------------------------------------------------
2638
+ # ------------------------------------------------------------------
2639
+ async def get_graph_visualization(
2640
+ self,
2641
+ name: str,
2642
+ folder_name: Optional[Union[str, List[str]]] = None,
2643
+ end_user_id: Optional[str] = None,
2644
+ ) -> Dict[str, Any]:
2645
+ """Fetch nodes & links for visualising *name* graph (async)."""
2646
+ params: Dict[str, Any] = {}
2647
+ if folder_name is not None:
2648
+ params["folder_name"] = folder_name
2649
+ if end_user_id is not None:
2650
+ params["end_user_id"] = end_user_id
2651
+ return await self._request("GET", f"graph/{name}/visualization", params=params)
2652
+
2653
+ async def check_workflow_status(
2654
+ self, workflow_id: str, run_id: Optional[str] = None
2655
+ ) -> Dict[str, Any]:
2656
+ """Poll the status of an async graph build/update workflow."""
2657
+ params = {"run_id": run_id} if run_id else None
2658
+ return await self._request("GET", f"graph/workflow/{workflow_id}/status", params=params)
2659
+
2660
+ # ------------------------------------------------------------------
2661
+ # Document download helpers ----------------------------------------
2662
+ # ------------------------------------------------------------------
2663
+ async def get_document_download_url(self, document_id: str, expires_in: int = 3600) -> Dict[str, Any]:
2664
+ """Generate a presigned download URL for a document (async)."""
2665
+ return await self._request(
2666
+ "GET", f"documents/{document_id}/download_url", params={"expires_in": expires_in}
2667
+ )
@@ -3,6 +3,7 @@ import logging
3
3
  from io import BytesIO, IOBase
4
4
  from pathlib import Path
5
5
  from typing import Any, BinaryIO, Dict, List, Optional, Type, Union
6
+ from datetime import datetime
6
7
 
7
8
  import httpx
8
9
  from pydantic import BaseModel
@@ -163,15 +164,16 @@ class Folder:
163
164
  files = {"file": (filename, file_obj)}
164
165
 
165
166
  # Create form data
166
- form_data = self._client._logic._prepare_ingest_file_form_data(metadata, rules, self._name, None)
167
+ form_data = self._client._logic._prepare_ingest_file_form_data(
168
+ metadata, rules, self._name, None, use_colpali
169
+ )
167
170
 
168
- # use_colpali should be a query parameter as defined in the API
171
+ # use_colpali flag is included in multipart form data for consistency
169
172
  response = self._client._request(
170
173
  "POST",
171
174
  "ingest/file",
172
175
  data=form_data,
173
176
  files=files,
174
- params={"use_colpali": str(use_colpali).lower()},
175
177
  )
176
178
  doc = self._client._logic._parse_document_response(response)
177
179
  doc._client = self._client
@@ -216,7 +218,6 @@ class Folder:
216
218
  "ingest/files",
217
219
  data=data,
218
220
  files=file_objects,
219
- params={"use_colpali": str(use_colpali).lower()},
220
221
  )
221
222
 
222
223
  if response.get("errors"):
@@ -367,6 +368,7 @@ class Folder:
367
368
  prompt_overrides: Optional[Union[QueryPromptOverrides, Dict[str, Any]]] = None,
368
369
  additional_folders: Optional[List[str]] = None,
369
370
  schema: Optional[Union[Type[BaseModel], Dict[str, Any]]] = None,
371
+ chat_id: Optional[str] = None,
370
372
  ) -> CompletionResponse:
371
373
  """
372
374
  Generate completion using relevant chunks as context within this folder.
@@ -404,6 +406,7 @@ class Folder:
404
406
  prompt_overrides,
405
407
  effective_folder,
406
408
  None, # end_user_id not supported at this level
409
+ chat_id,
407
410
  schema,
408
411
  )
409
412
 
@@ -488,9 +491,7 @@ class Folder:
488
491
  List[FinalChunkResult]: List of chunk results
489
492
  """
490
493
  merged = self._merge_folders(additional_folders)
491
- request = self._client._logic._prepare_batch_get_chunks_request(
492
- sources, merged, None, use_colpali
493
- )
494
+ request = self._client._logic._prepare_batch_get_chunks_request(sources, merged, None, use_colpali)
494
495
 
495
496
  response = self._client._request("POST", "batch/chunks", data=request)
496
497
  return self._client._logic._parse_chunk_result_list_response(response)
@@ -703,24 +704,22 @@ class UserScope:
703
704
  # Prepare multipart form data
704
705
  files = {"file": (filename, file_obj)}
705
706
 
706
- # Add metadata and rules
707
+ # Add metadata, rules and scoping information
707
708
  form_data = {
708
709
  "metadata": json.dumps(metadata or {}),
709
710
  "rules": json.dumps([self._client._convert_rule(r) for r in (rules or [])]),
710
- "end_user_id": self._end_user_id, # Add end user ID here
711
+ "end_user_id": self._end_user_id,
712
+ "use_colpali": str(use_colpali).lower(),
711
713
  }
712
714
 
713
- # Add folder name if scoped to a folder
714
715
  if self._folder_name:
715
716
  form_data["folder_name"] = self._folder_name
716
717
 
717
- # use_colpali should be a query parameter as defined in the API
718
718
  response = self._client._request(
719
719
  "POST",
720
720
  "ingest/file",
721
721
  data=form_data,
722
722
  files=files,
723
- params={"use_colpali": str(use_colpali).lower()},
724
723
  )
725
724
  doc = self._client._logic._parse_document_response(response)
726
725
  doc._client = self._client
@@ -778,9 +777,9 @@ class UserScope:
778
777
  data = {
779
778
  "metadata": json.dumps(metadata or {}),
780
779
  "rules": json.dumps(converted_rules),
781
- # Remove use_colpali from form data - it should be a query param
782
780
  "parallel": str(parallel).lower(),
783
781
  "end_user_id": self._end_user_id, # Add end user ID here
782
+ "use_colpali": str(use_colpali).lower(),
784
783
  }
785
784
 
786
785
  # Add folder name if scoped to a folder
@@ -792,7 +791,6 @@ class UserScope:
792
791
  "ingest/files",
793
792
  data=data,
794
793
  files=file_objects,
795
- params={"use_colpali": str(use_colpali).lower()},
796
794
  )
797
795
 
798
796
  if response.get("errors"):
@@ -953,6 +951,7 @@ class UserScope:
953
951
  prompt_overrides: Optional[Union[QueryPromptOverrides, Dict[str, Any]]] = None,
954
952
  additional_folders: Optional[List[str]] = None,
955
953
  schema: Optional[Union[Type[BaseModel], Dict[str, Any]]] = None,
954
+ chat_id: Optional[str] = None,
956
955
  ) -> CompletionResponse:
957
956
  """
958
957
  Generate completion using relevant chunks as context as this end user.
@@ -990,6 +989,7 @@ class UserScope:
990
989
  prompt_overrides,
991
990
  effective_folder,
992
991
  self._end_user_id,
992
+ chat_id,
993
993
  schema,
994
994
  )
995
995
 
@@ -1088,9 +1088,7 @@ class UserScope:
1088
1088
  List[FinalChunkResult]: List of chunk results
1089
1089
  """
1090
1090
  merged = self._merge_folders(additional_folders)
1091
- request = self._client._logic._prepare_batch_get_chunks_request(
1092
- sources, merged, None, use_colpali
1093
- )
1091
+ request = self._client._logic._prepare_batch_get_chunks_request(sources, merged, None, use_colpali)
1094
1092
 
1095
1093
  response = self._client._request("POST", "batch/chunks", data=request)
1096
1094
  return self._client._logic._parse_chunk_result_list_response(response)
@@ -1493,7 +1491,7 @@ class Morphik:
1493
1491
  files = {"file": (filename, file_obj)}
1494
1492
 
1495
1493
  # Create form data
1496
- form_data = self._logic._prepare_ingest_file_form_data(metadata, rules, None, None)
1494
+ form_data = self._logic._prepare_ingest_file_form_data(metadata, rules, None, None, use_colpali)
1497
1495
 
1498
1496
  # use_colpali should be a query parameter as defined in the API
1499
1497
  response = self._request(
@@ -1501,7 +1499,6 @@ class Morphik:
1501
1499
  "ingest/file",
1502
1500
  data=form_data,
1503
1501
  files=files,
1504
- params={"use_colpali": str(use_colpali).lower()},
1505
1502
  )
1506
1503
  doc = self._logic._parse_document_response(response)
1507
1504
  doc._client = self
@@ -1548,7 +1545,6 @@ class Morphik:
1548
1545
  "ingest/files",
1549
1546
  data=data,
1550
1547
  files=file_objects,
1551
- params={"use_colpali": str(use_colpali).lower()},
1552
1548
  )
1553
1549
 
1554
1550
  if response.get("errors"):
@@ -1701,6 +1697,7 @@ class Morphik:
1701
1697
  include_paths: bool = False,
1702
1698
  prompt_overrides: Optional[Union[QueryPromptOverrides, Dict[str, Any]]] = None,
1703
1699
  folder_name: Optional[Union[str, List[str]]] = None,
1700
+ chat_id: Optional[str] = None,
1704
1701
  schema: Optional[Union[Type[BaseModel], Dict[str, Any]]] = None,
1705
1702
  ) -> CompletionResponse:
1706
1703
  """
@@ -1807,6 +1804,7 @@ class Morphik:
1807
1804
  prompt_overrides,
1808
1805
  folder_name,
1809
1806
  None, # end_user_id not supported at this level
1807
+ chat_id,
1810
1808
  schema,
1811
1809
  )
1812
1810
 
@@ -1824,6 +1822,45 @@ class Morphik:
1824
1822
  response = self._request("POST", "query", data=payload)
1825
1823
  return self._logic._parse_completion_response(response)
1826
1824
 
1825
+ def agent_query(self, query: str) -> Dict[str, Any]:
1826
+ """
1827
+ Execute an agentic query with tool access and conversation handling.
1828
+
1829
+ The agent can autonomously use various tools to answer complex queries including:
1830
+ - Searching and retrieving relevant documents
1831
+ - Analyzing document content
1832
+ - Performing calculations and data processing
1833
+ - Creating summaries and reports
1834
+ - Managing knowledge graphs
1835
+
1836
+ Args:
1837
+ query: Natural language query for the Morphik agent
1838
+
1839
+ Returns:
1840
+ Dict[str, Any]: Agent response with potential tool execution results and sources
1841
+
1842
+ Example:
1843
+ ```python
1844
+ # Simple query
1845
+ result = db.agent_query("What are the main trends in our Q3 sales data?")
1846
+ print(result["response"])
1847
+
1848
+ # Complex analysis request
1849
+ result = db.agent_query(
1850
+ "Analyze all documents from the marketing department, "
1851
+ "identify key performance metrics, and create a summary "
1852
+ "with actionable insights"
1853
+ )
1854
+ print(result["response"])
1855
+
1856
+ # Tool usage is automatic - the agent will decide which tools to use
1857
+ # based on the query requirements
1858
+ ```
1859
+ """
1860
+ request = {"query": query}
1861
+ response = self._request("POST", "agent", data=request)
1862
+ return response
1863
+
1827
1864
  def list_documents(
1828
1865
  self,
1829
1866
  skip: int = 0,
@@ -2368,9 +2405,7 @@ class Morphik:
2368
2405
  print(f"Chunk from {chunk.document_id}, number {chunk.chunk_number}: {chunk.content[:50]}...")
2369
2406
  ```
2370
2407
  """
2371
- request = self._logic._prepare_batch_get_chunks_request(
2372
- sources, folder_name, None, use_colpali
2373
- )
2408
+ request = self._logic._prepare_batch_get_chunks_request(sources, folder_name, None, use_colpali)
2374
2409
  response = self._request("POST", "batch/chunks", data=request)
2375
2410
  return self._logic._parse_chunk_result_list_response(response)
2376
2411
 
@@ -2729,3 +2764,96 @@ class Morphik:
2729
2764
  raise RuntimeError(graph.error or "Graph processing failed")
2730
2765
  time.sleep(check_interval_seconds)
2731
2766
  raise TimeoutError("Timed out waiting for graph completion")
2767
+
2768
+ def ping(self) -> Dict[str, Any]:
2769
+ """Simple health-check call to the server (``/ping``).
2770
+
2771
+ Returns
2772
+ -------
2773
+ Dict[str, Any]
2774
+ The JSON payload returned by the server, typically
2775
+ ``{"status": "ok", "message": "Server is running"}``.
2776
+ """
2777
+ return self._request("GET", "ping")
2778
+
2779
+ # ------------------------------------------------------------------
2780
+ # Chat API ----------------------------------------------------------
2781
+ # ------------------------------------------------------------------
2782
+ def get_chat_history(self, chat_id: str) -> List[Dict[str, Any]]:
2783
+ """Return the full message history for the given *chat_id*.
2784
+
2785
+ Parameters
2786
+ ----------
2787
+ chat_id:
2788
+ Identifier of the chat conversation returned by previous
2789
+ calls that used ``chat_id``.
2790
+ """
2791
+ return self._request("GET", f"chat/{chat_id}")
2792
+
2793
+ def list_chat_conversations(self, limit: int = 100) -> List[Dict[str, Any]]:
2794
+ """List recent chat conversations available to the current user.
2795
+
2796
+ Parameters
2797
+ ----------
2798
+ limit:
2799
+ Maximum number of conversations to return (1-500).
2800
+ """
2801
+ limit_capped = max(1, min(limit, 500))
2802
+ return self._request("GET", "chats", params={"limit": limit_capped})
2803
+
2804
+ # ------------------------------------------------------------------
2805
+ # Usage API ---------------------------------------------------------
2806
+ # ------------------------------------------------------------------
2807
+ def get_usage_stats(self) -> Dict[str, int]:
2808
+ """Return cumulative usage statistics for the authenticated user."""
2809
+ return self._request("GET", "usage/stats")
2810
+
2811
+ def get_recent_usage(
2812
+ self,
2813
+ operation_type: Optional[str] = None,
2814
+ since: Optional["datetime"] = None,
2815
+ status: Optional[str] = None,
2816
+ ) -> List[Dict[str, Any]]:
2817
+ """Return recent usage records with optional filtering."""
2818
+ from datetime import datetime # Local import ensures small dependency surface
2819
+
2820
+ params: Dict[str, Any] = {}
2821
+ if operation_type:
2822
+ params["operation_type"] = operation_type
2823
+ if since:
2824
+ # Accept either ``str`` or ``datetime`` for *since*
2825
+ params["since"] = since.isoformat() if isinstance(since, datetime) else str(since)
2826
+ if status:
2827
+ params["status"] = status
2828
+ return self._request("GET", "usage/recent", params=params)
2829
+
2830
+ # ------------------------------------------------------------------
2831
+ # Graph helpers -----------------------------------------------------
2832
+ # ------------------------------------------------------------------
2833
+ def get_graph_visualization(
2834
+ self,
2835
+ name: str,
2836
+ folder_name: Optional[Union[str, List[str]]] = None,
2837
+ end_user_id: Optional[str] = None,
2838
+ ) -> Dict[str, Any]:
2839
+ """Fetch nodes & links for visualising *name* graph."""
2840
+ params: Dict[str, Any] = {}
2841
+ if folder_name is not None:
2842
+ params["folder_name"] = folder_name
2843
+ if end_user_id is not None:
2844
+ params["end_user_id"] = end_user_id
2845
+ return self._request("GET", f"graph/{name}/visualization", params=params)
2846
+
2847
+ def check_workflow_status(self, workflow_id: str, run_id: Optional[str] = None) -> Dict[str, Any]:
2848
+ """Poll the status of an asynchronous graph build/update workflow."""
2849
+ params = {"run_id": run_id} if run_id else None
2850
+ return self._request("GET", f"graph/workflow/{workflow_id}/status", params=params)
2851
+
2852
+ # ------------------------------------------------------------------
2853
+ # Document download helpers ----------------------------------------
2854
+ # ------------------------------------------------------------------
2855
+ def get_document_download_url(self, document_id: str, expires_in: int = 3600) -> Dict[str, Any]:
2856
+ """Generate a presigned download URL for a document stored remotely."""
2857
+ return self._request(
2858
+ "GET", f"documents/{document_id}/download_url", params={"expires_in": expires_in}
2859
+ )
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "morphik"
7
- version = "0.1.9"
7
+ version = "0.2.1"
8
8
  authors = [
9
9
  { name = "Morphik", email = "founders@morphik.ai" },
10
10
  ]
File without changes
File without changes
File without changes
File without changes
File without changes