remdb 0.3.103__py3-none-any.whl → 0.3.118__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of remdb might be problematic. Click here for more details.

Files changed (55) hide show
  1. rem/agentic/context.py +28 -24
  2. rem/agentic/mcp/tool_wrapper.py +29 -3
  3. rem/agentic/otel/setup.py +92 -4
  4. rem/agentic/providers/pydantic_ai.py +88 -18
  5. rem/agentic/schema.py +358 -21
  6. rem/agentic/tools/rem_tools.py +3 -3
  7. rem/api/main.py +85 -16
  8. rem/api/mcp_router/resources.py +1 -1
  9. rem/api/mcp_router/server.py +18 -4
  10. rem/api/mcp_router/tools.py +383 -16
  11. rem/api/routers/admin.py +218 -1
  12. rem/api/routers/chat/completions.py +30 -3
  13. rem/api/routers/chat/streaming.py +143 -3
  14. rem/api/routers/feedback.py +12 -319
  15. rem/api/routers/query.py +360 -0
  16. rem/api/routers/shared_sessions.py +13 -13
  17. rem/cli/commands/README.md +237 -64
  18. rem/cli/commands/cluster.py +1300 -0
  19. rem/cli/commands/configure.py +1 -3
  20. rem/cli/commands/db.py +354 -143
  21. rem/cli/commands/process.py +14 -8
  22. rem/cli/commands/schema.py +92 -45
  23. rem/cli/main.py +27 -6
  24. rem/models/core/rem_query.py +5 -2
  25. rem/models/entities/shared_session.py +2 -28
  26. rem/registry.py +10 -4
  27. rem/services/content/service.py +30 -8
  28. rem/services/embeddings/api.py +4 -4
  29. rem/services/embeddings/worker.py +16 -16
  30. rem/services/postgres/README.md +151 -26
  31. rem/services/postgres/__init__.py +2 -1
  32. rem/services/postgres/diff_service.py +531 -0
  33. rem/services/postgres/pydantic_to_sqlalchemy.py +427 -129
  34. rem/services/postgres/schema_generator.py +205 -4
  35. rem/services/postgres/service.py +6 -6
  36. rem/services/rem/parser.py +44 -9
  37. rem/services/rem/service.py +36 -2
  38. rem/services/session/reload.py +1 -1
  39. rem/settings.py +56 -7
  40. rem/sql/background_indexes.sql +19 -24
  41. rem/sql/migrations/001_install.sql +252 -69
  42. rem/sql/migrations/002_install_models.sql +2171 -593
  43. rem/sql/migrations/003_optional_extensions.sql +326 -0
  44. rem/sql/migrations/004_cache_system.sql +548 -0
  45. rem/utils/__init__.py +18 -0
  46. rem/utils/date_utils.py +2 -2
  47. rem/utils/schema_loader.py +17 -13
  48. rem/utils/sql_paths.py +146 -0
  49. rem/workers/__init__.py +2 -1
  50. rem/workers/unlogged_maintainer.py +463 -0
  51. {remdb-0.3.103.dist-info → remdb-0.3.118.dist-info}/METADATA +149 -76
  52. {remdb-0.3.103.dist-info → remdb-0.3.118.dist-info}/RECORD +54 -48
  53. rem/sql/migrations/003_seed_default_user.sql +0 -48
  54. {remdb-0.3.103.dist-info → remdb-0.3.118.dist-info}/WHEEL +0 -0
  55. {remdb-0.3.103.dist-info → remdb-0.3.118.dist-info}/entry_points.txt +0 -0
@@ -19,10 +19,18 @@ FastMCP Features:
19
19
  - Built-in auth that can be disabled for testing
20
20
  """
21
21
 
22
+ import importlib.metadata
23
+
22
24
  from fastmcp import FastMCP
23
25
 
24
26
  from ...settings import settings
25
27
 
28
+ # Get package version
29
+ try:
30
+ __version__ = importlib.metadata.version("remdb")
31
+ except importlib.metadata.PackageNotFoundError:
32
+ __version__ = "0.0.0-dev"
33
+
26
34
 
27
35
  def create_mcp_server(is_local: bool = False) -> FastMCP:
28
36
  """
@@ -52,7 +60,7 @@ def create_mcp_server(is_local: bool = False) -> FastMCP:
52
60
  """
53
61
  mcp = FastMCP(
54
62
  name=f"REM MCP Server ({settings.team}/{settings.environment})",
55
- version="0.1.0",
63
+ version=__version__,
56
64
  instructions=(
57
65
  "REM (Resource-Entity-Moment) MCP Server - Unified memory infrastructure for agentic systems.\n\n"
58
66
  "═══════════════════════════════════════════════════════════════════════════\n"
@@ -119,10 +127,12 @@ def create_mcp_server(is_local: bool = False) -> FastMCP:
119
127
  "AVAILABLE TOOLS\n"
120
128
  "═══════════════════════════════════════════════════════════════════════════\n"
121
129
  "\n"
122
- "• rem_query - Execute REM queries (LOOKUP, FUZZY, SEARCH, SQL, TRAVERSE)\n"
123
- "• ask_rem - Natural language to REM query conversion\n"
130
+ "• search_rem - Execute REM queries (LOOKUP, FUZZY, SEARCH, SQL, TRAVERSE)\n"
131
+ "• ask_rem_agent - Natural language to REM query conversion\n"
124
132
  " - plan_mode=True: Hints agent to use TRAVERSE with depth=0 for edge analysis\n"
125
- "• parse_and_ingest_file - Ingest files from local paths (local server only), s3://, or https://\n"
133
+ "• ingest_into_rem - Ingest files from local paths (local server only), s3://, or https://\n"
134
+ "• list_schema - List all database schemas (tables) with row counts\n"
135
+ "• get_schema - Get detailed schema for a specific table (columns, types, indexes)\n"
126
136
  "\n"
127
137
  "═══════════════════════════════════════════════════════════════════════════\n"
128
138
  "AVAILABLE RESOURCES (Read-Only)\n"
@@ -167,7 +177,9 @@ def create_mcp_server(is_local: bool = False) -> FastMCP:
167
177
  # Register REM tools
168
178
  from .tools import (
169
179
  ask_rem_agent,
180
+ get_schema,
170
181
  ingest_into_rem,
182
+ list_schema,
171
183
  read_resource,
172
184
  register_metadata,
173
185
  search_rem,
@@ -177,6 +189,8 @@ def create_mcp_server(is_local: bool = False) -> FastMCP:
177
189
  mcp.tool()(ask_rem_agent)
178
190
  mcp.tool()(read_resource)
179
191
  mcp.tool()(register_metadata)
192
+ mcp.tool()(list_schema)
193
+ mcp.tool()(get_schema)
180
194
 
181
195
  # File ingestion tool (with local path support for local servers)
182
196
  # Wrap to inject is_local parameter
@@ -15,6 +15,9 @@ Available Tools:
15
15
  - ask_rem_agent: Natural language to REM query conversion via agent
16
16
  - ingest_into_rem: Full file ingestion pipeline (read + store + parse + chunk)
17
17
  - read_resource: Access MCP resources (for Claude Desktop compatibility)
18
+ - register_metadata: Register response metadata for SSE MetadataEvent
19
+ - list_schema: List all schemas (tables, agents) in the database with row counts
20
+ - get_schema: Get detailed schema for a table (columns, types, indexes)
18
21
  """
19
22
 
20
23
  from functools import wraps
@@ -53,7 +56,7 @@ def init_services(postgres_service: PostgresService, rem_service: RemService):
53
56
  """
54
57
  _service_cache["postgres"] = postgres_service
55
58
  _service_cache["rem"] = rem_service
56
- logger.info("MCP tools initialized with service instances")
59
+ logger.debug("MCP tools initialized with service instances")
57
60
 
58
61
 
59
62
  async def get_rem_service() -> RemService:
@@ -79,7 +82,7 @@ async def get_rem_service() -> RemService:
79
82
  _service_cache["postgres"] = postgres_service
80
83
  _service_cache["rem"] = rem_service
81
84
 
82
- logger.info("MCP tools: lazy initialized services")
85
+ logger.debug("MCP tools: lazy initialized services")
83
86
  return rem_service
84
87
 
85
88
 
@@ -399,14 +402,14 @@ async def ask_rem_agent(
399
402
  )
400
403
 
401
404
  # Run agent (errors handled by decorator)
402
- logger.info(f"Running ask_rem agent for query: {query[:100]}...")
405
+ logger.debug(f"Running ask_rem agent for query: {query[:100]}...")
403
406
  result = await agent_runtime.run(query)
404
407
 
405
408
  # Extract output
406
409
  from rem.agentic.serialization import serialize_agent_result
407
410
  query_output = serialize_agent_result(result.output)
408
411
 
409
- logger.info("Agent execution completed successfully")
412
+ logger.debug("Agent execution completed successfully")
410
413
 
411
414
  return {
412
415
  "response": str(result.output),
@@ -509,7 +512,7 @@ async def ingest_into_rem(
509
512
  resource_type=resource_type,
510
513
  )
511
514
 
512
- logger.info(
515
+ logger.debug(
513
516
  f"MCP ingestion complete: {result['file_name']} "
514
517
  f"(status: {result['processing_status']}, "
515
518
  f"resources: {result['resources_created']})"
@@ -564,7 +567,7 @@ async def read_resource(uri: str) -> dict[str, Any]:
564
567
  # Check system status
565
568
  read_resource(uri="rem://status")
566
569
  """
567
- logger.info(f"📖 Reading resource: {uri}")
570
+ logger.debug(f"Reading resource: {uri}")
568
571
 
569
572
  # Import here to avoid circular dependency
570
573
  from .resources import load_resource
@@ -572,7 +575,7 @@ async def read_resource(uri: str) -> dict[str, Any]:
572
575
  # Load resource using the existing resource handler (errors handled by decorator)
573
576
  result = await load_resource(uri)
574
577
 
575
- logger.info(f"Resource loaded successfully: {uri}")
578
+ logger.debug(f"Resource loaded successfully: {uri}")
576
579
 
577
580
  # If result is already a dict, return it
578
581
  if isinstance(result, dict):
@@ -603,6 +606,13 @@ async def register_metadata(
603
606
  references: list[str] | None = None,
604
607
  sources: list[str] | None = None,
605
608
  flags: list[str] | None = None,
609
+ # Risk assessment fields (used by mental health agents like Siggy)
610
+ risk_level: str | None = None,
611
+ risk_score: int | None = None,
612
+ risk_reasoning: str | None = None,
613
+ recommended_action: str | None = None,
614
+ # Generic extension - any additional key-value pairs
615
+ extra: dict[str, Any] | None = None,
606
616
  ) -> dict[str, Any]:
607
617
  """
608
618
  Register response metadata to be emitted as an SSE MetadataEvent.
@@ -627,13 +637,23 @@ async def register_metadata(
627
637
  sources: List of source descriptions (e.g., "REM database",
628
638
  "search results", "user context").
629
639
  flags: Optional flags for the response (e.g., "needs_review",
630
- "uncertain", "incomplete").
640
+ "uncertain", "incomplete", "crisis_alert").
641
+
642
+ risk_level: Risk level indicator (e.g., "green", "orange", "red").
643
+ Used by mental health agents for C-SSRS style assessment.
644
+ risk_score: Numeric risk score (e.g., 0-6 for C-SSRS).
645
+ risk_reasoning: Brief explanation of risk assessment.
646
+ recommended_action: Suggested next steps based on assessment.
647
+
648
+ extra: Dict of arbitrary additional metadata. Use this for any
649
+ domain-specific fields not covered by the standard parameters.
650
+ Example: {"topics_detected": ["anxiety", "sleep"], "session_count": 5}
631
651
 
632
652
  Returns:
633
653
  Dict with:
634
654
  - status: "success"
635
655
  - _metadata_event: True (marker for streaming layer)
636
- - confidence, references, sources, flags: The registered values
656
+ - All provided fields merged into response
637
657
 
638
658
  Examples:
639
659
  # High confidence answer with references
@@ -643,18 +663,41 @@ async def register_metadata(
643
663
  sources=["REM database lookup"]
644
664
  )
645
665
 
646
- # Lower confidence with flags
666
+ # Mental health risk assessment (Siggy-style)
667
+ register_metadata(
668
+ confidence=0.9,
669
+ risk_level="green",
670
+ risk_score=0,
671
+ risk_reasoning="No risk indicators detected in message",
672
+ sources=["mental_health_resources"]
673
+ )
674
+
675
+ # Orange risk with recommended action
647
676
  register_metadata(
648
- confidence=0.65,
649
- flags=["needs_review", "incomplete_data"]
677
+ risk_level="orange",
678
+ risk_score=2,
679
+ risk_reasoning="Passive ideation detected - 'feeling hopeless'",
680
+ recommended_action="Schedule care team check-in within 24-48 hours",
681
+ flags=["care_team_alert"]
682
+ )
683
+
684
+ # Custom domain-specific metadata
685
+ register_metadata(
686
+ confidence=0.8,
687
+ extra={
688
+ "topics_detected": ["medication", "side_effects"],
689
+ "drug_mentioned": "sertraline",
690
+ "sentiment": "concerned"
691
+ }
650
692
  )
651
693
  """
652
- logger.info(
653
- f"📊 Registering metadata: confidence={confidence}, "
654
- f"refs={len(references or [])}, sources={len(sources or [])}"
694
+ logger.debug(
695
+ f"Registering metadata: confidence={confidence}, "
696
+ f"risk_level={risk_level}, refs={len(references or [])}, "
697
+ f"sources={len(sources or [])}"
655
698
  )
656
699
 
657
- return {
700
+ result = {
658
701
  "status": "success",
659
702
  "_metadata_event": True, # Marker for streaming layer
660
703
  "confidence": confidence,
@@ -662,3 +705,327 @@ async def register_metadata(
662
705
  "sources": sources,
663
706
  "flags": flags,
664
707
  }
708
+
709
+ # Add risk assessment fields if provided
710
+ if risk_level is not None:
711
+ result["risk_level"] = risk_level
712
+ if risk_score is not None:
713
+ result["risk_score"] = risk_score
714
+ if risk_reasoning is not None:
715
+ result["risk_reasoning"] = risk_reasoning
716
+ if recommended_action is not None:
717
+ result["recommended_action"] = recommended_action
718
+
719
+ # Merge any extra fields
720
+ if extra:
721
+ result["extra"] = extra
722
+
723
+ return result
724
+
725
+
726
+ @mcp_tool_error_handler
727
+ async def list_schema(
728
+ include_system: bool = False,
729
+ user_id: str | None = None,
730
+ ) -> dict[str, Any]:
731
+ """
732
+ List all schemas (tables) in the REM database.
733
+
734
+ Returns metadata about all available tables including their names,
735
+ row counts, and descriptions. Use this to discover what data is
736
+ available before constructing queries.
737
+
738
+ Args:
739
+ include_system: If True, include PostgreSQL system tables (pg_*, information_schema).
740
+ Default False shows only REM application tables.
741
+ user_id: Optional user identifier (defaults to authenticated user or "default")
742
+
743
+ Returns:
744
+ Dict with:
745
+ - status: "success" or "error"
746
+ - tables: List of table metadata dicts with:
747
+ - name: Table name
748
+ - schema: Schema name (usually "public")
749
+ - estimated_rows: Approximate row count
750
+ - description: Table comment if available
751
+
752
+ Examples:
753
+ # List all REM schemas
754
+ list_schema()
755
+
756
+ # Include system tables
757
+ list_schema(include_system=True)
758
+ """
759
+ rem_service = await get_rem_service()
760
+ user_id = AgentContext.get_user_id_or_default(user_id, source="list_schema")
761
+
762
+ # Query information_schema for tables
763
+ schema_filter = ""
764
+ if not include_system:
765
+ schema_filter = """
766
+ AND table_schema = 'public'
767
+ AND table_name NOT LIKE 'pg_%'
768
+ AND table_name NOT LIKE '_pg_%'
769
+ """
770
+
771
+ query = f"""
772
+ SELECT
773
+ t.table_schema,
774
+ t.table_name,
775
+ pg_catalog.obj_description(
776
+ (quote_ident(t.table_schema) || '.' || quote_ident(t.table_name))::regclass,
777
+ 'pg_class'
778
+ ) as description,
779
+ (
780
+ SELECT reltuples::bigint
781
+ FROM pg_class c
782
+ JOIN pg_namespace n ON n.oid = c.relnamespace
783
+ WHERE c.relname = t.table_name
784
+ AND n.nspname = t.table_schema
785
+ ) as estimated_rows
786
+ FROM information_schema.tables t
787
+ WHERE t.table_type = 'BASE TABLE'
788
+ {schema_filter}
789
+ ORDER BY t.table_schema, t.table_name
790
+ """
791
+
792
+ # Access postgres service directly from cache
793
+ postgres_service = _service_cache.get("postgres")
794
+ if not postgres_service:
795
+ postgres_service = rem_service._postgres
796
+
797
+ rows = await postgres_service.fetch(query)
798
+
799
+ tables = []
800
+ for row in rows:
801
+ tables.append({
802
+ "name": row["table_name"],
803
+ "schema": row["table_schema"],
804
+ "estimated_rows": int(row["estimated_rows"]) if row["estimated_rows"] else 0,
805
+ "description": row["description"],
806
+ })
807
+
808
+ logger.info(f"Listed {len(tables)} schemas for user {user_id}")
809
+
810
+ return {
811
+ "tables": tables,
812
+ "count": len(tables),
813
+ }
814
+
815
+
816
+ @mcp_tool_error_handler
817
+ async def get_schema(
818
+ table_name: str,
819
+ include_indexes: bool = True,
820
+ include_constraints: bool = True,
821
+ columns: list[str] | None = None,
822
+ user_id: str | None = None,
823
+ ) -> dict[str, Any]:
824
+ """
825
+ Get detailed schema information for a specific table.
826
+
827
+ Returns column definitions, data types, constraints, and indexes.
828
+ Use this to understand table structure before writing SQL queries.
829
+
830
+ Args:
831
+ table_name: Name of the table to inspect (e.g., "resources", "moments")
832
+ include_indexes: Include index information (default True)
833
+ include_constraints: Include constraint information (default True)
834
+ columns: Optional list of specific columns to return. If None, returns all columns.
835
+ user_id: Optional user identifier (defaults to authenticated user or "default")
836
+
837
+ Returns:
838
+ Dict with:
839
+ - status: "success" or "error"
840
+ - table_name: Name of the table
841
+ - columns: List of column definitions with:
842
+ - name: Column name
843
+ - type: PostgreSQL data type
844
+ - nullable: Whether NULL is allowed
845
+ - default: Default value if any
846
+ - description: Column comment if available
847
+ - indexes: List of indexes (if include_indexes=True)
848
+ - constraints: List of constraints (if include_constraints=True)
849
+ - primary_key: Primary key column(s)
850
+
851
+ Examples:
852
+ # Get full schema for resources table
853
+ get_schema(table_name="resources")
854
+
855
+ # Get only specific columns
856
+ get_schema(
857
+ table_name="resources",
858
+ columns=["id", "name", "created_at"]
859
+ )
860
+
861
+ # Get schema without indexes
862
+ get_schema(
863
+ table_name="moments",
864
+ include_indexes=False
865
+ )
866
+ """
867
+ rem_service = await get_rem_service()
868
+ user_id = AgentContext.get_user_id_or_default(user_id, source="get_schema")
869
+
870
+ # Access postgres service
871
+ postgres_service = _service_cache.get("postgres")
872
+ if not postgres_service:
873
+ postgres_service = rem_service._postgres
874
+
875
+ # Verify table exists
876
+ exists_query = """
877
+ SELECT EXISTS (
878
+ SELECT 1 FROM information_schema.tables
879
+ WHERE table_schema = 'public' AND table_name = $1
880
+ )
881
+ """
882
+ exists = await postgres_service.fetchval(exists_query, table_name)
883
+ if not exists:
884
+ return {
885
+ "status": "error",
886
+ "error": f"Table '{table_name}' not found in public schema",
887
+ }
888
+
889
+ # Get columns
890
+ columns_filter = ""
891
+ if columns:
892
+ placeholders = ", ".join(f"${i+2}" for i in range(len(columns)))
893
+ columns_filter = f"AND column_name IN ({placeholders})"
894
+
895
+ columns_query = f"""
896
+ SELECT
897
+ c.column_name,
898
+ c.data_type,
899
+ c.udt_name,
900
+ c.is_nullable,
901
+ c.column_default,
902
+ c.character_maximum_length,
903
+ c.numeric_precision,
904
+ pg_catalog.col_description(
905
+ (quote_ident(c.table_schema) || '.' || quote_ident(c.table_name))::regclass,
906
+ c.ordinal_position
907
+ ) as description
908
+ FROM information_schema.columns c
909
+ WHERE c.table_schema = 'public'
910
+ AND c.table_name = $1
911
+ {columns_filter}
912
+ ORDER BY c.ordinal_position
913
+ """
914
+
915
+ params = [table_name]
916
+ if columns:
917
+ params.extend(columns)
918
+
919
+ column_rows = await postgres_service.fetch(columns_query, *params)
920
+
921
+ column_defs = []
922
+ for row in column_rows:
923
+ # Build a more readable type string
924
+ data_type = row["data_type"]
925
+ if row["character_maximum_length"]:
926
+ data_type = f"{data_type}({row['character_maximum_length']})"
927
+ elif row["udt_name"] in ("int4", "int8", "float4", "float8"):
928
+ # Use common type names
929
+ type_map = {"int4": "integer", "int8": "bigint", "float4": "real", "float8": "double precision"}
930
+ data_type = type_map.get(row["udt_name"], data_type)
931
+ elif row["udt_name"] == "vector":
932
+ data_type = "vector"
933
+
934
+ column_defs.append({
935
+ "name": row["column_name"],
936
+ "type": data_type,
937
+ "nullable": row["is_nullable"] == "YES",
938
+ "default": row["column_default"],
939
+ "description": row["description"],
940
+ })
941
+
942
+ result = {
943
+ "table_name": table_name,
944
+ "columns": column_defs,
945
+ "column_count": len(column_defs),
946
+ }
947
+
948
+ # Get primary key
949
+ pk_query = """
950
+ SELECT a.attname as column_name
951
+ FROM pg_index i
952
+ JOIN pg_attribute a ON a.attrelid = i.indrelid AND a.attnum = ANY(i.indkey)
953
+ WHERE i.indrelid = $1::regclass
954
+ AND i.indisprimary
955
+ ORDER BY array_position(i.indkey, a.attnum)
956
+ """
957
+ pk_rows = await postgres_service.fetch(pk_query, table_name)
958
+ result["primary_key"] = [row["column_name"] for row in pk_rows]
959
+
960
+ # Get indexes
961
+ if include_indexes:
962
+ indexes_query = """
963
+ SELECT
964
+ i.relname as index_name,
965
+ am.amname as index_type,
966
+ ix.indisunique as is_unique,
967
+ ix.indisprimary as is_primary,
968
+ array_agg(a.attname ORDER BY array_position(ix.indkey, a.attnum)) as columns
969
+ FROM pg_index ix
970
+ JOIN pg_class i ON i.oid = ix.indexrelid
971
+ JOIN pg_class t ON t.oid = ix.indrelid
972
+ JOIN pg_am am ON am.oid = i.relam
973
+ JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey)
974
+ WHERE t.relname = $1
975
+ GROUP BY i.relname, am.amname, ix.indisunique, ix.indisprimary
976
+ ORDER BY i.relname
977
+ """
978
+ index_rows = await postgres_service.fetch(indexes_query, table_name)
979
+ result["indexes"] = [
980
+ {
981
+ "name": row["index_name"],
982
+ "type": row["index_type"],
983
+ "unique": row["is_unique"],
984
+ "primary": row["is_primary"],
985
+ "columns": row["columns"],
986
+ }
987
+ for row in index_rows
988
+ ]
989
+
990
+ # Get constraints
991
+ if include_constraints:
992
+ constraints_query = """
993
+ SELECT
994
+ con.conname as constraint_name,
995
+ con.contype as constraint_type,
996
+ array_agg(a.attname ORDER BY array_position(con.conkey, a.attnum)) as columns,
997
+ pg_get_constraintdef(con.oid) as definition
998
+ FROM pg_constraint con
999
+ JOIN pg_class t ON t.oid = con.conrelid
1000
+ JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(con.conkey)
1001
+ WHERE t.relname = $1
1002
+ GROUP BY con.conname, con.contype, con.oid
1003
+ ORDER BY con.contype, con.conname
1004
+ """
1005
+ constraint_rows = await postgres_service.fetch(constraints_query, table_name)
1006
+
1007
+ # Map constraint types to readable names
1008
+ type_map = {
1009
+ "p": "PRIMARY KEY",
1010
+ "u": "UNIQUE",
1011
+ "f": "FOREIGN KEY",
1012
+ "c": "CHECK",
1013
+ "x": "EXCLUSION",
1014
+ }
1015
+
1016
+ result["constraints"] = []
1017
+ for row in constraint_rows:
1018
+ # contype is returned as bytes (char type), decode it
1019
+ con_type = row["constraint_type"]
1020
+ if isinstance(con_type, bytes):
1021
+ con_type = con_type.decode("utf-8")
1022
+ result["constraints"].append({
1023
+ "name": row["constraint_name"],
1024
+ "type": type_map.get(con_type, con_type),
1025
+ "columns": row["columns"],
1026
+ "definition": row["definition"],
1027
+ })
1028
+
1029
+ logger.info(f"Retrieved schema for table '{table_name}' with {len(column_defs)} columns")
1030
+
1031
+ return result