pyobvector 0.2.13__py3-none-any.whl → 0.2.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -90,11 +90,19 @@ class ObVecClient:
90
90
  )
91
91
 
92
92
  def refresh_metadata(self, tables: Optional[list[str]] = None):
93
- """Reload metadata from the database."""
94
- if tables is None:
95
- self.metadata_obj.reflect(bind=self.engine, extend_existing=True)
96
- else:
93
+ """Reload metadata from the database.
94
+
95
+ Args:
96
+ tables (Optional[list[str]]): names of the tables to refresh. If None, refresh all tables.
97
+ """
98
+ if tables is not None:
99
+ for table_name in tables:
100
+ if table_name in self.metadata_obj.tables:
101
+ self.metadata_obj.remove(Table(table_name, self.metadata_obj))
97
102
  self.metadata_obj.reflect(bind=self.engine, only=tables, extend_existing=True)
103
+ else:
104
+ self.metadata_obj.clear()
105
+ self.metadata_obj.reflect(bind=self.engine, extend_existing=True)
98
106
 
99
107
  def _insert_partition_hint_for_query_sql(self, sql: str, partition_hint: str):
100
108
  from_index = sql.find("FROM")
@@ -808,3 +816,47 @@ class ObVecClient:
808
816
  with self.engine.connect() as conn:
809
817
  with conn.begin():
810
818
  return conn.execute(text(text_sql))
819
+
820
+ def add_columns(
821
+ self,
822
+ table_name: str,
823
+ columns: list[Column],
824
+ ):
825
+ """Add multiple columns to an existing table.
826
+
827
+ Args:
828
+ table_name (string): table name
829
+ columns (list[Column]): list of SQLAlchemy Column objects representing the new columns
830
+ """
831
+ compiler = self.engine.dialect.ddl_compiler(self.engine.dialect, None)
832
+ column_specs = [compiler.get_column_specification(column) for column in columns]
833
+ columns_ddl = ", ".join(f"ADD COLUMN {spec}" for spec in column_specs)
834
+
835
+ with self.engine.connect() as conn:
836
+ with conn.begin():
837
+ conn.execute(
838
+ text(f"ALTER TABLE `{table_name}` {columns_ddl}")
839
+ )
840
+
841
+ self.refresh_metadata([table_name])
842
+
843
+ def drop_columns(
844
+ self,
845
+ table_name: str,
846
+ column_names: list[str],
847
+ ):
848
+ """Drop multiple columns from an existing table.
849
+
850
+ Args:
851
+ table_name (string): table name
852
+ column_names (list[str]): names of the columns to drop
853
+ """
854
+ columns_ddl = ", ".join(f"DROP COLUMN `{name}`" for name in column_names)
855
+
856
+ with self.engine.connect() as conn:
857
+ with conn.begin():
858
+ conn.execute(
859
+ text(f"ALTER TABLE `{table_name}` {columns_ddl}")
860
+ )
861
+
862
+ self.refresh_metadata([table_name])
@@ -236,6 +236,7 @@ class ObVecJsonTableClient(ObVecClient):
236
236
  raise ValueError("Table name duplicated")
237
237
 
238
238
  session = self.session()
239
+ session.execute(text("SET @@session.autocommit=0"))
239
240
  new_meta_cache_items = []
240
241
  col_id = 16
241
242
  for col_def in ast.find_all(exp.ColumnDef):
@@ -607,6 +608,7 @@ class ObVecJsonTableClient(ObVecClient):
607
608
  raise ValueError(f"Table {jtable_name} does not exists")
608
609
 
609
610
  session = self.session()
611
+ session.execute(text("SET @@session.autocommit=0"))
610
612
  for action in ast.actions:
611
613
  if isinstance(action, ChangeColumn):
612
614
  self._handle_alter_jtable_change_column(
@@ -681,6 +683,7 @@ class ObVecJsonTableClient(ObVecClient):
681
683
  raise ValueError(f"Invalid ast type {ast.this}")
682
684
 
683
685
  session = self.session()
686
+ session.execute(text("SET @@session.autocommit=0"))
684
687
  n_new_records = 0
685
688
  for tuple in ast.expression.expressions:
686
689
  expr_list = tuple.expressions
@@ -1,6 +1,6 @@
1
1
  """ARRAY: An extended data type for SQLAlchemy"""
2
2
  import json
3
- from typing import Any, List, Optional, Sequence, Union, Type
3
+ from typing import Any, List, Optional, Sequence, Union
4
4
 
5
5
  from sqlalchemy.sql.type_api import TypeEngine
6
6
  from sqlalchemy.types import UserDefinedType, String
@@ -120,39 +120,3 @@ class ARRAY(UserDefinedType):
120
120
  return json.dumps(processed)
121
121
 
122
122
  return process
123
-
124
-
125
- def nested_array(dim: int) -> Type[ARRAY]:
126
- """Create a nested array type class with specified dimensions.
127
-
128
- Args:
129
- dim: The number of dimensions for the array type (1-6)
130
-
131
- Returns:
132
- A class type that can be instantiated with an item_type to create a nested array
133
-
134
- Raises:
135
- ValueError: If dim is not between 1 and 6
136
- """
137
- if not 1 <= dim <= 6:
138
- raise ValueError("Dimension must be between 1 and 6")
139
-
140
- class NestedArray(ARRAY):
141
- cache_ok = True
142
- _string = String()
143
-
144
- def __init__(self, item_type: Union[TypeEngine, type]):
145
- super(UserDefinedType, self).__init__()
146
- if isinstance(item_type, type):
147
- item_type = item_type()
148
-
149
- assert not isinstance(item_type, ARRAY), "The item_type of NestedArray should not be an ARRAY type"
150
-
151
- nested_type = item_type
152
- for _ in range(dim):
153
- nested_type = ARRAY(nested_type)
154
-
155
- self.item_type = nested_type.item_type
156
- self.dim = dim
157
-
158
- return NestedArray
@@ -3,7 +3,7 @@ import re
3
3
  import logging
4
4
  from sqlalchemy.dialects.mysql.reflection import MySQLTableDefinitionParser, _re_compile, cleanup_text
5
5
 
6
- from pyobvector.schema.array import nested_array
6
+ from pyobvector.schema.array import ARRAY
7
7
 
8
8
  logger = logging.getLogger(__name__)
9
9
 
@@ -49,10 +49,12 @@ class OceanBaseTableDefinitionParser(MySQLTableDefinitionParser):
49
49
  # r"(?:(?P<type>\S+) )?KEY"
50
50
  r"(?: +{iq}(?P<name>(?:{esc_fq}|[^{fq}])+){fq})?"
51
51
  r"(?: +USING +(?P<using_pre>\S+))?"
52
- r" +\((?P<columns>.+?)\)"
52
+ r" +\((?P<columns>[^)]+)\)"
53
53
  r"(?: +USING +(?P<using_post>\S+))?"
54
- r"(?: +(KEY_)?BLOCK_SIZE *[ =]? *(?P<keyblock>\S+) *(LOCAL)?)?"
54
+ r"(?: +WITH +\((?P<vector_params>[^)]+)\))?"
55
55
  r"(?: +WITH PARSER +(?P<parser>\S+))?"
56
+ r"(?: +PARSER_PROPERTIES=\((?P<parser_properties>[^)]+)\))?"
57
+ r"(?: +(KEY_)?BLOCK_SIZE *[ =]? *(?P<keyblock>\S+) *(LOCAL)?)?"
56
58
  r"(?: +COMMENT +(?P<comment>(\x27\x27|\x27([^\x27])*?\x27)+))?"
57
59
  r"(?: +/\*(?P<version_sql>.+)\*/ *)?"
58
60
  r",?$".format(iq=quotes["iq"], esc_fq=quotes["esc_fq"], fq=quotes["fq"])
@@ -100,7 +102,9 @@ class OceanBaseTableDefinitionParser(MySQLTableDefinitionParser):
100
102
  item_type_args = [int(v) for v in self._re_csv_int.findall(item_type_arg)]
101
103
 
102
104
  nested_level = coltype_with_args.lower().count('array')
103
- type_instance = nested_array(nested_level)(item_type(*item_type_args))
105
+ type_instance = item_type(*item_type_args)
106
+ for _ in range(nested_level):
107
+ type_instance = ARRAY(type_instance)
104
108
 
105
109
  col_kw = {}
106
110
 
@@ -135,16 +139,15 @@ class OceanBaseTableDefinitionParser(MySQLTableDefinitionParser):
135
139
  if ret:
136
140
  tp, spec = ret
137
141
 
138
- if tp is None:
139
- return ret
140
- if tp == "partition":
141
- # do not handle partition
142
+ if tp is None or tp == "partition" or not isinstance(spec, dict):
142
143
  return ret
144
+
143
145
  if tp == "fk_constraint":
144
- if len(spec["table"]) == 2 and spec["table"][0] == self.default_schema:
145
- spec["table"] = spec["table"][1:]
146
- if spec.get("onupdate", "").lower() == "restrict":
147
- spec["onupdate"] = None
148
- if spec.get("ondelete", "").lower() == "restrict":
149
- spec["ondelete"] = None
146
+ table = spec.get("table", [])
147
+ if isinstance(table, list) and len(table) == 2 and table[0] == self.default_schema:
148
+ spec["table"] = table[1:]
149
+
150
+ for action in ["onupdate", "ondelete"]:
151
+ if (spec.get(action) or "").lower() == "restrict":
152
+ spec[action] = None
150
153
  return ret
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: pyobvector
3
- Version: 0.2.13
3
+ Version: 0.2.15
4
4
  Summary: A python SDK for OceanBase Vector Store, based on SQLAlchemy, compatible with Milvus API.
5
5
  Author: shanhaikang.shk
6
6
  Author-email: shanhaikang.shk@oceanbase.com
@@ -36,7 +36,7 @@ poetry install
36
36
  - install with pip:
37
37
 
38
38
  ```shell
39
- pip install pyobvector==0.2.13
39
+ pip install pyobvector==0.2.14
40
40
  ```
41
41
 
42
42
  ## Build Doc
@@ -6,8 +6,8 @@ pyobvector/client/exceptions.py,sha256=CAsTHR9juYleRjYIa4bqk_lw14h8daBvChKoU0o19
6
6
  pyobvector/client/fts_index_param.py,sha256=hMCjA3Aecnt0uQQT6UQGTIIqdPk1M4gX4-zREDQygLs,1139
7
7
  pyobvector/client/index_param.py,sha256=3gXi66Ey1PO9x5_61CrH7DmPb496kviBQI5NT7nfbGc,6309
8
8
  pyobvector/client/milvus_like_client.py,sha256=CpPo6mkGE8iNFpKGBFof3h7E1VTzy1DAPGlFM9F_s8g,26373
9
- pyobvector/client/ob_vec_client.py,sha256=QZAZxtSgsS2Z8SgEtsSSUcCroaML_XS-cq7aK-sSxic,29718
10
- pyobvector/client/ob_vec_json_table_client.py,sha256=m0Oq41dXEil9S1YCK2_RGbSMziatqAItn8Osk-9rzJI,39066
9
+ pyobvector/client/ob_vec_client.py,sha256=XRbsf9wT6obnbJTBV-xlseXBrkvMhkfmzis-gQKD6Os,31566
10
+ pyobvector/client/ob_vec_json_table_client.py,sha256=rq80AfqAKhosLcrBFROAoINVSkr-48xlRH91Jt4pEwA,39246
11
11
  pyobvector/client/partitions.py,sha256=Bxwr5yVNlXwZc7SXBC03NeqL9giy4Fe6S2qZdHD8xGw,15621
12
12
  pyobvector/client/schema_type.py,sha256=u1LJsr1o9lxv2b_6KYu77RciFa1R_Qk69k_WT30x6BU,1582
13
13
  pyobvector/json_table/__init__.py,sha256=X5MmK3f10oyJleUUFZJFeunMEfzmf6P1f_7094b-FZc,554
@@ -15,14 +15,14 @@ pyobvector/json_table/json_value_returning_func.py,sha256=NWSV2zhe2-1KhIprQaFqOH
15
15
  pyobvector/json_table/oceanbase_dialect.py,sha256=lxpbWBQdK18LWXLmGyk_-ODv6VfnwGLHbcpsQMElOUo,4480
16
16
  pyobvector/json_table/virtual_data_type.py,sha256=uQh6ZQ0UbwpVO9TFegGeu4E8bXW7rdLHAXFQJdiEjLs,3467
17
17
  pyobvector/schema/__init__.py,sha256=EU8NH8Q-L05sFBGKPV6yIBUeh5f3awTkArdBJ7d4CvQ,2271
18
- pyobvector/schema/array.py,sha256=PoSBc3qCVdMJcLramZp95t69i15ES1J_bqnFANqQoRs,5255
18
+ pyobvector/schema/array.py,sha256=WDWLZbCdu8stK8wlGWfKUjkhWifS8vbsfYUEEJsQOlQ,4163
19
19
  pyobvector/schema/dialect.py,sha256=mdRjn3roztCkk6RXbaB0Wn1uhT2BPS2y18MwL6wW-jo,1840
20
20
  pyobvector/schema/full_text_index.py,sha256=ohQX8uTPdRswEJONuN5A-bNv203d0N0b2BsJ7etx71g,2071
21
21
  pyobvector/schema/geo_srid_point.py,sha256=RwEoCgGTmXDc0le1B2E3mZudtqiFdMf2M0Va1ocmVSY,1210
22
22
  pyobvector/schema/gis_func.py,sha256=u7bqaB5qIylW8GvRdglLQL2H1SheQZNnAqgZrOGyrks,3118
23
23
  pyobvector/schema/match_against_func.py,sha256=ExTQJvAXHaZwBo1Sjy6IlnF1nF6D9xGUsF4f7zaP8Q0,1336
24
24
  pyobvector/schema/ob_table.py,sha256=wlb6Oo9LG-sr8XnG_wbX1Qi5CgnS0XUzNL5qTdsncoY,392
25
- pyobvector/schema/reflection.py,sha256=aWJrodN9B2NmCOLoagg_v4b-9ABJGLCEWoDb7CIjTPY,5745
25
+ pyobvector/schema/reflection.py,sha256=ae8BYlbOWddyU6ly_bOcudsB1CKcD_OcUpAvLVgOW7o,5890
26
26
  pyobvector/schema/replace_stmt.py,sha256=FtGLXHz6DwzD0FOZPn1EZgXdbHZu-K9HIHS02rZqYrE,560
27
27
  pyobvector/schema/vec_dist_func.py,sha256=4GAWSrhFNDYooBpbBg604wDrByPrewp46Y4VeoDxV7Y,2986
28
28
  pyobvector/schema/vector.py,sha256=dFKfPcTOto0jNxVjhvDmJM7Q4wwp6Z-HcZ3K6oZxUMc,1120
@@ -30,7 +30,7 @@ pyobvector/schema/vector_index.py,sha256=aNtrEBUclc4s6QuqCZpu3Hj3OdjyhbWgtLiJzo6
30
30
  pyobvector/util/__init__.py,sha256=D9EgRDlcMSDhY3uI__vnCl45Or75dOXMWSval5P5fqs,251
31
31
  pyobvector/util/ob_version.py,sha256=ZIySam8q_MCiwctAiAHPB4GdAzGQiXEo1wVkc9IOTDU,1539
32
32
  pyobvector/util/vector.py,sha256=xyM-NuOyd78K7P3kinqyWvLIzEbf9c-4TKn_QVF7qgw,2265
33
- pyobvector-0.2.13.dist-info/LICENSE,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
34
- pyobvector-0.2.13.dist-info/METADATA,sha256=fxRXFWsunoyvBZ3wW5r7FMo8JB6WZbYBj9ijOtF8tnU,6659
35
- pyobvector-0.2.13.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
36
- pyobvector-0.2.13.dist-info/RECORD,,
33
+ pyobvector-0.2.15.dist-info/LICENSE,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
34
+ pyobvector-0.2.15.dist-info/METADATA,sha256=msAPB_jEYpVDpBtf3Dvj0-4mmy9OwfXpfP1kODTE38A,6659
35
+ pyobvector-0.2.15.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
36
+ pyobvector-0.2.15.dist-info/RECORD,,