sunholo 0.123.1__py3-none-any.whl → 0.123.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -262,21 +262,38 @@ class AlloyDBClient:
262
262
  async def _execute_sql_async_langchain(self, sql_statement):
263
263
  return await self.engine._afetch(query = sql_statement)
264
264
 
265
- async def _execute_sql_async_pg8000(self, sql_statement):
266
- """Executes a given SQL statement asynchronously with error handling."""
265
+ async def _execute_sql_async_pg8000(self, sql_statement, values=None):
266
+ """Executes a given SQL statement asynchronously with error handling.
267
+
268
+ Args:
269
+ sql_statement (str): The SQL statement to execute
270
+ values (list, optional): Values for parameterized query
271
+
272
+ Returns:
273
+ Result of SQL execution
274
+ """
267
275
  sql_ = sqlalchemy.text(sql_statement)
268
276
  result = None
269
- async with self.engine.connect() as conn:
270
- try:
271
- log.info(f"Executing SQL statement asynchronously: {sql_}")
277
+
278
+ # Get connection directly, avoiding async with
279
+ conn = await self.engine.connect()
280
+ try:
281
+ log.info(f"Executing SQL statement asynchronously: {sql_}")
282
+ if values:
283
+ result = await conn.execute(sql_, values)
284
+ else:
272
285
  result = await conn.execute(sql_)
273
- except DatabaseError as e:
274
- if "already exists" in str(e):
275
- log.warning(f"Error ignored: {str(e)}. Assuming object already exists.")
276
- else:
277
- raise
278
- finally:
279
- await conn.close()
286
+
287
+ # Explicitly commit transaction
288
+ await conn.commit()
289
+ except DatabaseError as e:
290
+ if "already exists" in str(e):
291
+ log.warning(f"Error ignored: {str(e)}. Assuming object already exists.")
292
+ else:
293
+ raise
294
+ finally:
295
+ # Close connection only here, not inside the context manager
296
+ await conn.close()
280
297
 
281
298
  return result
282
299
 
@@ -540,7 +557,7 @@ class AlloyDBClient:
540
557
  """
541
558
  try:
542
559
  # Simple query to check connection
543
- result = await self.execute_sql_async("SELECT 1")
560
+ _ = await self.execute_sql_async("SELECT 1")
544
561
  return True
545
562
  except Exception as e:
546
563
  log.warning(f"Database connection check failed: {e}")
@@ -565,7 +582,7 @@ class AlloyDBClient:
565
582
  # Re-create the engine
566
583
  self.engine = self._create_engine()
567
584
 
568
- log.info(f"Successfully reconnected to AlloyDB")
585
+ log.info("Successfully reconnected to AlloyDB")
569
586
  return True
570
587
  except Exception as e:
571
588
  log.error(f"Failed to reconnect to AlloyDB: {e}")
@@ -585,128 +602,109 @@ class AlloyDBClient:
585
602
  except Exception as e:
586
603
  log.warning(f"Error closing AlloyDB connection: {e}")
587
604
 
588
- async def create_table_from_schema(self, table_name: str, schema_data: dict, users: list = None):
589
- """
590
- Creates or ensures a table exists based on the structure of the provided schema data.
591
-
592
- Args:
593
- table_name (str): Name of the table to create
594
- schema_data (dict): Data structure that matches the expected schema
595
- users (list, optional): List of users to grant permissions to
605
+ async def create_table_from_schema(self, table_name: str, schema_data: dict, users: list = None):
606
+ """
607
+ Creates or ensures a table exists based on the structure of the provided schema data.
596
608
 
597
- Returns:
598
- Result of SQL execution
599
- """
600
- # Generate column definitions from schema data
601
- columns = []
602
- for key, value in schema_data.items():
603
- if isinstance(value, dict):
604
- # For nested objects, store as JSONB
605
- columns.append(f'"{key}" JSONB')
606
- elif isinstance(value, list):
607
- # For arrays, store as JSONB
608
- columns.append(f'"{key}" JSONB')
609
- elif isinstance(value, int):
610
- columns.append(f'"{key}" INTEGER')
611
- elif isinstance(value, float):
612
- columns.append(f'"{key}" NUMERIC')
613
- elif isinstance(value, bool):
614
- columns.append(f'"{key}" BOOLEAN')
615
- else:
616
- # Default to TEXT for strings and other types
617
- columns.append(f'"{key}" TEXT')
618
-
619
- # Add metadata columns
620
- columns.extend([
621
- '"source" TEXT',
622
- '"extraction_date" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP',
623
- '"extraction_backend" TEXT',
624
- '"extraction_model" TEXT'
625
- ])
626
-
627
- # Create SQL statement for table creation
628
- columns_sql = ", ".join(columns)
629
- sql = f'''
630
- CREATE TABLE IF NOT EXISTS "{table_name}" (
631
- id SERIAL PRIMARY KEY,
632
- {columns_sql}
633
- )
634
- '''
635
-
636
- # Execute SQL to create table
637
- result = await self.execute_sql_async(sql)
638
- log.info(f"Created or ensured table {table_name} exists")
639
-
640
- # Grant permissions if users are provided
641
- if users:
642
- for user in users:
643
- await self.execute_sql_async(f'GRANT SELECT, INSERT, UPDATE, DELETE ON TABLE "{table_name}" TO "{user}";')
644
-
645
- return result
646
-
647
- async def write_data_to_table(self, table_name: str, data: dict, metadata: dict = None):
648
- """
649
- Writes data to the specified table.
650
-
651
- Args:
652
- table_name (str): Name of the table
653
- data (dict): Data to write to the table
654
- metadata (dict, optional): Additional metadata to include
609
+ Args:
610
+ table_name (str): Name of the table to create
611
+ schema_data (dict): Data structure that matches the expected schema
612
+ users (list, optional): List of users to grant permissions to
613
+
614
+ Returns:
615
+ Result of SQL execution
616
+ """
617
+ # Generate column definitions from schema data
618
+ columns = []
619
+ for key, value in schema_data.items():
620
+ if isinstance(value, dict):
621
+ # For nested objects, store as JSONB
622
+ columns.append(f'"{key}" JSONB')
623
+ elif isinstance(value, list):
624
+ # For arrays, store as JSONB
625
+ columns.append(f'"{key}" JSONB')
626
+ elif isinstance(value, int):
627
+ columns.append(f'"{key}" INTEGER')
628
+ elif isinstance(value, float):
629
+ columns.append(f'"{key}" NUMERIC')
630
+ elif isinstance(value, bool):
631
+ columns.append(f'"{key}" BOOLEAN')
632
+ else:
633
+ # Default to TEXT for strings and other types
634
+ columns.append(f'"{key}" TEXT')
655
635
 
656
- Returns:
657
- Result of SQL execution
658
- """
659
- # Create copies to avoid modifying the original data
660
- insert_data = dict(data)
661
-
662
- # Add metadata if provided
663
- if metadata:
664
- insert_data["source"] = metadata.get("objectId", metadata.get("source", "unknown"))
665
- insert_data["extraction_backend"] = metadata.get("extraction_backend", "unknown")
666
- insert_data["extraction_model"] = metadata.get("extraction_model", "unknown")
667
-
668
- # Prepare column names and placeholders for values
669
- columns = [f'"{key}"' for key in insert_data.keys()]
670
- placeholders = []
671
- values = []
672
-
673
- # Process values and create properly formatted placeholders
674
- for key, value in insert_data.items():
675
- values.append(json.dumps(value) if isinstance(value, (dict, list)) else value)
676
- placeholders.append("%s")
677
-
678
- # Create SQL statement for insertion
679
- columns_str = ", ".join(columns)
680
- placeholders_str = ", ".join(placeholders)
681
-
682
- sql = f'''
683
- INSERT INTO "{table_name}" ({columns_str})
684
- VALUES ({placeholders_str})
685
- RETURNING id
686
- '''
687
-
688
- # Execute SQL to insert data
689
- result = await self.execute_sql_async(sql, values)
690
- log.info(f"Inserted data into table {table_name}")
691
-
692
- return result
693
-
694
- async def execute_sql_async(self, sql_statement, values=None):
695
- """
696
- Executes a given SQL statement asynchronously with optional parameter values.
697
-
698
- Args:
699
- sql_statement (str): The SQL statement to execute
700
- values (list, optional): Values for parameterized query
636
+ # Add metadata columns
637
+ columns.extend([
638
+ '"source" TEXT',
639
+ '"extraction_date" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP',
640
+ '"extraction_backend" TEXT',
641
+ '"extraction_model" TEXT'
642
+ ])
701
643
 
702
- Returns:
703
- Result of SQL execution
704
- """
705
- log.info(f"Executing async SQL statement: {sql_statement}")
706
- if self.engine_type == "pg8000":
707
- result = await self._execute_sql_async_pg8000(sql_statement, values)
708
- elif self.engine_type == "langchain":
709
- result = await self._execute_sql_async_langchain(sql_statement, values)
710
-
711
- return result
644
+ # Create SQL statement for table creation
645
+ columns_sql = ", ".join(columns)
646
+ sql = f'''
647
+ CREATE TABLE IF NOT EXISTS "{table_name}" (
648
+ id SERIAL PRIMARY KEY,
649
+ {columns_sql}
650
+ )
651
+ '''
652
+
653
+ # Execute SQL to create table
654
+ result = await self.execute_sql_async(sql)
655
+ log.info(f"Created or ensured table {table_name} exists")
656
+
657
+ # Grant permissions if users are provided
658
+ if users:
659
+ for user in users:
660
+ grant_sql = f'GRANT SELECT, INSERT, UPDATE, DELETE ON TABLE "{table_name}" TO "{user}";'
661
+ await self.execute_sql_async(grant_sql)
662
+
663
+ return result
712
664
 
665
+ async def write_data_to_table(self, table_name: str, data: dict, metadata: dict = None):
666
+ """
667
+ Writes data to the specified table.
668
+
669
+ Args:
670
+ table_name (str): Name of the table
671
+ data (dict): Data to write to the table
672
+ metadata (dict, optional): Additional metadata to include
673
+
674
+ Returns:
675
+ Result of SQL execution
676
+ """
677
+ # Create copies to avoid modifying the original data
678
+ insert_data = dict(data)
679
+
680
+ # Add metadata if provided
681
+ if metadata:
682
+ insert_data["source"] = metadata.get("objectId", metadata.get("source", "unknown"))
683
+ insert_data["extraction_backend"] = metadata.get("extraction_backend", "unknown")
684
+ insert_data["extraction_model"] = metadata.get("extraction_model", "unknown")
685
+
686
+ # Prepare column names and placeholders for values
687
+ columns = [f'"{key}"' for key in insert_data.keys()]
688
+ placeholders = []
689
+ values = []
690
+
691
+ # Process values and create properly formatted placeholders
692
+ for key, value in insert_data.items():
693
+ values.append(json.dumps(value) if isinstance(value, (dict, list)) else value)
694
+ placeholders.append("%s")
695
+
696
+ # Create SQL statement for insertion
697
+ columns_str = ", ".join(columns)
698
+ placeholders_str = ", ".join(placeholders)
699
+
700
+ sql = f'''
701
+ INSERT INTO "{table_name}" ({columns_str})
702
+ VALUES ({placeholders_str})
703
+ RETURNING id
704
+ '''
705
+
706
+ # Execute SQL to insert data
707
+ result = await self.execute_sql_async(sql, values)
708
+ log.info(f"Inserted data into table {table_name}")
709
+
710
+ return result
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sunholo
3
- Version: 0.123.1
3
+ Version: 0.123.3
4
4
  Summary: Large Language Model DevOps - a package to help deploy LLMs to the Cloud.
5
5
  Author-email: Holosun ApS <multivac@sunholo.com>
6
6
  License: Apache License, Version 2.0
@@ -60,7 +60,7 @@ sunholo/components/retriever.py,sha256=Wmchv3huAM4w7DIS-a5Lp9Hi7M8pE6vZdxgseiT9S
60
60
  sunholo/components/vectorstore.py,sha256=k7GS1Y5c6ZGXSDAJvyCes6dTjhDAi0fjGbVLqpyfzBc,5918
61
61
  sunholo/database/__init__.py,sha256=bpB5Nk21kwqYj-qdVnvNgXjLsbflnH4g-San7OHMqR4,283
62
62
  sunholo/database/alloydb.py,sha256=x1zUMB-EVWbE2Zvp4nAs2Z-tB_kOZmS45H2lwVHdYnk,11678
63
- sunholo/database/alloydb_client.py,sha256=WpkrQmy2hK4148df-6Ys8XRjCGpObZa9Dc9TXLSX_sE,27108
63
+ sunholo/database/alloydb_client.py,sha256=yksz52Qnx8xCsW61TPKGqSGTehSukP8suKwShec4z54,27401
64
64
  sunholo/database/database.py,sha256=VqhZdkXUNdvWn8sUcUV3YNby1JDVf7IykPVXWBtxo9U,7361
65
65
  sunholo/database/lancedb.py,sha256=DyfZntiFKBlVPaFooNN1Z6Pl-LAs4nxWKKuq8GBqN58,715
66
66
  sunholo/database/static_dbs.py,sha256=8cvcMwUK6c32AS2e_WguKXWMkFf5iN3g9WHzsh0C07Q,442
@@ -168,9 +168,9 @@ sunholo/vertex/init.py,sha256=1OQwcPBKZYBTDPdyU7IM4X4OmiXLdsNV30C-fee2scQ,2875
168
168
  sunholo/vertex/memory_tools.py,sha256=tBZxqVZ4InTmdBvLlOYwoSEWu4-kGquc-gxDwZCC4FA,7667
169
169
  sunholo/vertex/safety.py,sha256=S9PgQT1O_BQAkcqauWncRJaydiP8Q_Jzmu9gxYfy1VA,2482
170
170
  sunholo/vertex/type_dict_to_json.py,sha256=uTzL4o9tJRao4u-gJOFcACgWGkBOtqACmb6ihvCErL8,4694
171
- sunholo-0.123.1.dist-info/licenses/LICENSE.txt,sha256=SdE3QjnD3GEmqqg9EX3TM9f7WmtOzqS1KJve8rhbYmU,11345
172
- sunholo-0.123.1.dist-info/METADATA,sha256=eMkSTIyICC03RID4bD6yN7ryL8Faglbjjj2EKsnvVf4,10001
173
- sunholo-0.123.1.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
174
- sunholo-0.123.1.dist-info/entry_points.txt,sha256=bZuN5AIHingMPt4Ro1b_T-FnQvZ3teBes-3OyO0asl4,49
175
- sunholo-0.123.1.dist-info/top_level.txt,sha256=wt5tadn5--5JrZsjJz2LceoUvcrIvxjHJe-RxuudxAk,8
176
- sunholo-0.123.1.dist-info/RECORD,,
171
+ sunholo-0.123.3.dist-info/licenses/LICENSE.txt,sha256=SdE3QjnD3GEmqqg9EX3TM9f7WmtOzqS1KJve8rhbYmU,11345
172
+ sunholo-0.123.3.dist-info/METADATA,sha256=BYdslKNSlXmkUMdV4JExD5JzamsfnwVfzIepQV9PsAA,10001
173
+ sunholo-0.123.3.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
174
+ sunholo-0.123.3.dist-info/entry_points.txt,sha256=bZuN5AIHingMPt4Ro1b_T-FnQvZ3teBes-3OyO0asl4,49
175
+ sunholo-0.123.3.dist-info/top_level.txt,sha256=wt5tadn5--5JrZsjJz2LceoUvcrIvxjHJe-RxuudxAk,8
176
+ sunholo-0.123.3.dist-info/RECORD,,