meerschaum 2.9.0rc2__py3-none-any.whl → 2.9.0rc3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -383,6 +383,10 @@ def get_pipe_data(
383
383
  params: Optional[str] = None,
384
384
  limit: int = MAX_RESPONSE_ROW_LIMIT,
385
385
  order: str = 'asc',
386
+ date_format: str = 'iso',
387
+ date_unit: str = 'us',
388
+ double_precision: int = 15,
389
+ geometry_format: str = 'wkb_hex',
386
390
  as_chunks: bool = False,
387
391
  chunk_interval: Optional[int] = None,
388
392
  curr_user = (
@@ -401,6 +405,21 @@ def get_pipe_data(
401
405
  The connector key to the instance on which the pipe is registered.
402
406
  Defaults to the configured value for `meerschaum:api_instance`.
403
407
 
408
+ date_format: str, default 'iso'
409
+ Serialzation format for datetime values.
410
+ Accepted values are `'iso`' (ISO8601) and `'epoch'` (epoch milliseconds).
411
+
412
+ date_unit: str, default 'us'
413
+ Timestamp precision for serialization. Accepted values are `'s'` (seconds),
414
+ `'ms'` (milliseconds), `'us'` (microseconds), and `'ns'`.
415
+
416
+ double_precision: int, default 15
417
+ The number of decimal places to use when encoding floating point values (maximum 15).
418
+
419
+ geometry_format: str, default 'wkb_hex'
420
+ The serialization format for geometry data.
421
+ Accepted values are `geojson`, `wkb_hex`, and `wkt`.
422
+
404
423
  as_chunks: bool, default False
405
424
  If `True`, return a chunk token to be consumed by the `/chunks` endpoint.
406
425
  """
@@ -483,7 +502,7 @@ def get_pipe_data(
483
502
  )
484
503
  return fastapi.Response(
485
504
  json.dumps({
486
- 'chunks_cursor': chunks_cursor,
505
+ 'chunks_cursor': chunks_cursor_token,
487
506
  }),
488
507
  media_type='application/json',
489
508
  )
@@ -504,7 +523,13 @@ def get_pipe_data(
504
523
  detail="Could not fetch data with the given parameters.",
505
524
  )
506
525
 
507
- json_content = to_json(df)
526
+ json_content = to_json(
527
+ df,
528
+ date_format=date_format,
529
+ date_unit=date_unit,
530
+ geometry_format=geometry_format,
531
+ double_precision=double_precision,
532
+ )
508
533
  return fastapi.Response(
509
534
  json_content,
510
535
  media_type='application/json',
@@ -2,4 +2,4 @@
2
2
  Specify the Meerschaum release version.
3
3
  """
4
4
 
5
- __version__ = "2.9.0rc2"
5
+ __version__ = "2.9.0rc3"
@@ -50,6 +50,8 @@ class ValkeyConnector(Connector):
50
50
  get_sync_time,
51
51
  get_pipe_rowcount,
52
52
  fetch_pipes_keys,
53
+ get_document_key,
54
+ get_table_quoted_doc_key,
53
55
  )
54
56
  from ._fetch import (
55
57
  fetch,
@@ -10,8 +10,9 @@ from datetime import datetime, timezone
10
10
 
11
11
  import meerschaum as mrsm
12
12
  from meerschaum.utils.typing import SuccessTuple, Any, Union, Optional, Dict, List, Tuple
13
- from meerschaum.utils.misc import json_serialize_datetime, string_to_dict
14
- from meerschaum.utils.warnings import warn
13
+ from meerschaum.utils.misc import string_to_dict
14
+ from meerschaum.utils.dtypes import json_serialize_value
15
+ from meerschaum.utils.warnings import warn, dprint
15
16
  from meerschaum.config.static import STATIC_CONFIG
16
17
 
17
18
  PIPES_TABLE: str = 'mrsm_pipes'
@@ -46,25 +47,15 @@ def serialize_document(doc: Dict[str, Any]) -> str:
46
47
  -------
47
48
  A serialized string for the document.
48
49
  """
49
- from meerschaum.utils.dtypes import serialize_bytes
50
50
  return json.dumps(
51
51
  doc,
52
- default=(
53
- lambda x: (
54
- json_serialize_datetime(x)
55
- if hasattr(x, 'tzinfo')
56
- else (
57
- serialize_bytes(x)
58
- if isinstance(x, bytes)
59
- else str(x)
60
- )
61
- )
62
- ),
52
+ default=json_serialize_value,
63
53
  separators=(',', ':'),
64
54
  sort_keys=True,
65
55
  )
66
56
 
67
57
 
58
+ @staticmethod
68
59
  def get_document_key(
69
60
  doc: Dict[str, Any],
70
61
  indices: List[str],
@@ -91,25 +82,39 @@ def get_document_key(
91
82
  from meerschaum.utils.dtypes import coerce_timezone
92
83
  index_vals = {
93
84
  key: (
94
- str(val)
85
+ str(val).replace(':', COLON)
95
86
  if not isinstance(val, datetime)
96
87
  else str(int(coerce_timezone(val).replace(tzinfo=timezone.utc).timestamp()))
97
88
  )
98
89
  for key, val in doc.items()
99
- if key in indices
100
- } if indices else {}
101
- indices_str = ((table_name + ':indices:') if table_name else '') + ','.join(
102
- sorted(
103
- [
104
- f'{key}{COLON}{val}'
105
- for key, val in index_vals.items()
106
- ]
90
+ if ((key in indices) if indices else True)
91
+ }
92
+ indices_str = (
93
+ (
94
+ (
95
+ (
96
+ table_name
97
+ + ':'
98
+ + ('indices:' if True else '')
99
+ )
100
+ )
101
+ if table_name
102
+ else ''
103
+ ) + ','.join(
104
+ sorted(
105
+ [
106
+ f'{key}{COLON}{val}'
107
+ for key, val in index_vals.items()
108
+ ]
109
+ )
107
110
  )
108
- ) if indices else serialize_document(doc)
111
+ )
109
112
  return indices_str
110
113
 
111
114
 
115
+ @classmethod
112
116
  def get_table_quoted_doc_key(
117
+ cls,
113
118
  table_name: str,
114
119
  doc: Dict[str, Any],
115
120
  indices: List[str],
@@ -120,7 +125,7 @@ def get_table_quoted_doc_key(
120
125
  """
121
126
  return json.dumps(
122
127
  {
123
- get_document_key(doc, indices, table_name): serialize_document(doc),
128
+ cls.get_document_key(doc, indices, table_name): serialize_document(doc),
124
129
  **(
125
130
  {datetime_column: doc.get(datetime_column, 0)}
126
131
  if datetime_column
@@ -129,7 +134,7 @@ def get_table_quoted_doc_key(
129
134
  },
130
135
  sort_keys=True,
131
136
  separators=(',', ':'),
132
- default=(lambda x: json_serialize_datetime(x) if hasattr(x, 'tzinfo') else str(x)),
137
+ default=json_serialize_value,
133
138
  )
134
139
 
135
140
 
@@ -377,7 +382,7 @@ def delete_pipe(
377
382
  doc = docs[0]
378
383
  doc_str = json.dumps(
379
384
  doc,
380
- default=(lambda x: json_serialize_datetime(x) if hasattr(x, 'tzinfo') else str(x)),
385
+ default=json_serialize_value,
381
386
  separators=(',', ':'),
382
387
  sort_keys=True,
383
388
  )
@@ -445,9 +450,13 @@ def get_pipe_data(
445
450
  ]
446
451
  try:
447
452
  docs_strings = [
448
- self.get(get_document_key(
449
- doc, indices, table_name
450
- ))
453
+ self.get(
454
+ self.get_document_key(
455
+ doc,
456
+ indices,
457
+ table_name,
458
+ )
459
+ )
451
460
  for doc in ix_docs
452
461
  ]
453
462
  except Exception as e:
@@ -535,7 +544,7 @@ def sync_pipe(
535
544
  def _serialize_indices_docs(_docs):
536
545
  return [
537
546
  {
538
- 'ix': get_document_key(doc, indices),
547
+ 'ix': self.get_document_key(doc, indices),
539
548
  **(
540
549
  {
541
550
  dt_col: doc.get(dt_col, 0)
@@ -594,7 +603,7 @@ def sync_pipe(
594
603
  unseen_docs = unseen_df.to_dict(orient='records') if unseen_df is not None else []
595
604
  unseen_indices_docs = _serialize_indices_docs(unseen_docs)
596
605
  unseen_ix_vals = {
597
- get_document_key(doc, indices, table_name): serialize_document(doc)
606
+ self.get_document_key(doc, indices, table_name): serialize_document(doc)
598
607
  for doc in unseen_docs
599
608
  }
600
609
  for key, val in unseen_ix_vals.items():
@@ -615,7 +624,7 @@ def sync_pipe(
615
624
 
616
625
  update_docs = update_df.to_dict(orient='records') if update_df is not None else []
617
626
  update_ix_docs = {
618
- get_document_key(doc, indices, table_name): doc
627
+ self.get_document_key(doc, indices, table_name): doc
619
628
  for doc in update_docs
620
629
  }
621
630
  existing_docs_data = {
@@ -633,7 +642,7 @@ def sync_pipe(
633
642
  if key not in existing_docs
634
643
  }
635
644
  new_ix_vals = {
636
- get_document_key(doc, indices, table_name): serialize_document(doc)
645
+ self.get_document_key(doc, indices, table_name): serialize_document(doc)
637
646
  for doc in new_update_docs.values()
638
647
  }
639
648
  for key, val in new_ix_vals.items():
@@ -743,8 +752,8 @@ def clear_pipe(
743
752
  table_name = self.quote_table(pipe.target)
744
753
  indices = [col for col in pipe.columns.values() if col]
745
754
  for doc in docs:
746
- set_doc_key = get_document_key(doc, indices)
747
- table_doc_key = get_document_key(doc, indices, table_name)
755
+ set_doc_key = self.get_document_key(doc, indices)
756
+ table_doc_key = self.get_document_key(doc, indices, table_name)
748
757
  try:
749
758
  if dt_col:
750
759
  self.client.zrem(table_name, set_doc_key)
@@ -826,13 +835,15 @@ def get_pipe_rowcount(
826
835
  return 0
827
836
 
828
837
  try:
829
- if begin is None and end is None and params is None:
838
+ if begin is None and end is None and not params:
830
839
  return (
831
840
  self.client.zcard(table_name)
832
841
  if dt_col
833
- else self.client.llen(table_name)
842
+ else self.client.scard(table_name)
834
843
  )
835
- except Exception:
844
+ except Exception as e:
845
+ if debug:
846
+ dprint(f"Failed to get rowcount for {pipe}:\n{e}")
836
847
  return None
837
848
 
838
849
  df = pipe.get_data(begin=begin, end=end, params=params, debug=debug)
@@ -137,6 +137,7 @@ class Pipe:
137
137
  _persist_new_numeric_columns,
138
138
  _persist_new_uuid_columns,
139
139
  _persist_new_bytes_columns,
140
+ _persist_new_geometry_columns,
140
141
  )
141
142
  from ._verify import (
142
143
  verify,
@@ -158,6 +158,7 @@ def sync(
158
158
  'error_callback': error_callback,
159
159
  'sync_chunks': sync_chunks,
160
160
  'chunksize': chunksize,
161
+ 'safe_copy': True,
161
162
  })
162
163
 
163
164
  ### NOTE: Invalidate `_exists` cache before and after syncing.
@@ -268,6 +269,7 @@ def sync(
268
269
  **kw
269
270
  )
270
271
  )
272
+ kw['safe_copy'] = False
271
273
  except Exception as e:
272
274
  get_console().print_exception(
273
275
  suppress=[
@@ -402,6 +404,7 @@ def sync(
402
404
  self._persist_new_numeric_columns(df, debug=debug)
403
405
  self._persist_new_uuid_columns(df, debug=debug)
404
406
  self._persist_new_bytes_columns(df, debug=debug)
407
+ self._persist_new_geometry_columns(df, debug=debug)
405
408
 
406
409
  if debug:
407
410
  dprint(
@@ -1009,7 +1012,7 @@ def _persist_new_numeric_columns(self, df, debug: bool = False) -> SuccessTuple:
1009
1012
 
1010
1013
  self._attributes_sync_time = None
1011
1014
  dtypes = self.parameters.get('dtypes', {})
1012
- dtypes.update({col: 'numeric' for col in numeric_cols})
1015
+ dtypes.update({col: 'numeric' for col in new_numeric_cols})
1013
1016
  self.parameters['dtypes'] = dtypes
1014
1017
  if not self.temporary:
1015
1018
  edit_success, edit_msg = self.edit(interactive=False, debug=debug)
@@ -1034,7 +1037,7 @@ def _persist_new_uuid_columns(self, df, debug: bool = False) -> SuccessTuple:
1034
1037
 
1035
1038
  self._attributes_sync_time = None
1036
1039
  dtypes = self.parameters.get('dtypes', {})
1037
- dtypes.update({col: 'uuid' for col in uuid_cols})
1040
+ dtypes.update({col: 'uuid' for col in new_uuid_cols})
1038
1041
  self.parameters['dtypes'] = dtypes
1039
1042
  if not self.temporary:
1040
1043
  edit_success, edit_msg = self.edit(interactive=False, debug=debug)
@@ -1059,7 +1062,7 @@ def _persist_new_json_columns(self, df, debug: bool = False) -> SuccessTuple:
1059
1062
 
1060
1063
  self._attributes_sync_time = None
1061
1064
  dtypes = self.parameters.get('dtypes', {})
1062
- dtypes.update({col: 'json' for col in json_cols})
1065
+ dtypes.update({col: 'json' for col in new_json_cols})
1063
1066
  self.parameters['dtypes'] = dtypes
1064
1067
 
1065
1068
  if not self.temporary:
@@ -1085,7 +1088,64 @@ def _persist_new_bytes_columns(self, df, debug: bool = False) -> SuccessTuple:
1085
1088
 
1086
1089
  self._attributes_sync_time = None
1087
1090
  dtypes = self.parameters.get('dtypes', {})
1088
- dtypes.update({col: 'bytes' for col in bytes_cols})
1091
+ dtypes.update({col: 'bytes' for col in new_bytes_cols})
1092
+ self.parameters['dtypes'] = dtypes
1093
+
1094
+ if not self.temporary:
1095
+ edit_success, edit_msg = self.edit(interactive=False, debug=debug)
1096
+ if not edit_success:
1097
+ warn(f"Unable to update bytes dtypes for {self}:\n{edit_msg}")
1098
+
1099
+ return edit_success, edit_msg
1100
+
1101
+ return True, "Success"
1102
+
1103
+
1104
+ def _persist_new_geometry_columns(self, df, debug: bool = False) -> SuccessTuple:
1105
+ """
1106
+ Check for new `geometry` columns and update the parameters.
1107
+ """
1108
+ from meerschaum.utils.dataframe import get_geometry_cols
1109
+ geometry_cols_types_srids = get_geometry_cols(df, with_types_srids=True)
1110
+ existing_geometry_cols = [
1111
+ col
1112
+ for col, typ in self.dtypes.items()
1113
+ if typ.startswith('geometry') or typ.startswith('geography')
1114
+ ]
1115
+ new_geometry_cols = [
1116
+ col
1117
+ for col in geometry_cols_types_srids
1118
+ if col not in existing_geometry_cols
1119
+ ]
1120
+ if not new_geometry_cols:
1121
+ return True, "Success"
1122
+
1123
+ self._attributes_sync_time = None
1124
+ dtypes = self.parameters.get('dtypes', {})
1125
+
1126
+ new_cols_types = {}
1127
+ for col, (geometry_type, srid) in geometry_cols_types_srids.items():
1128
+ if col not in new_geometry_cols:
1129
+ continue
1130
+
1131
+ new_dtype = "geometry"
1132
+ modifier = ""
1133
+ if not srid and geometry_type.lower() == 'geometry':
1134
+ new_cols_types[col] = new_dtype
1135
+ continue
1136
+
1137
+ modifier = "["
1138
+ if geometry_type.lower() != 'geometry':
1139
+ modifier += f"{geometry_type}"
1140
+
1141
+ if srid:
1142
+ if modifier != '[':
1143
+ modifier += ", "
1144
+ modifier += f"{srid}"
1145
+ modifier += "]"
1146
+ new_cols_types[col] = f"{new_dtype}{modifier}"
1147
+
1148
+ dtypes.update(new_cols_types)
1089
1149
  self.parameters['dtypes'] = dtypes
1090
1150
 
1091
1151
  if not self.temporary:
@@ -871,7 +871,10 @@ def get_bytes_cols(df: 'pd.DataFrame') -> List[str]:
871
871
  ]
872
872
 
873
873
 
874
- def get_geometry_cols(df: 'pd.DataFrame') -> List[str]:
874
+ def get_geometry_cols(
875
+ df: 'pd.DataFrame',
876
+ with_types_srids: bool = False,
877
+ ) -> Union[List[str], Dict[str, Any]]:
875
878
  """
876
879
  Get the columns which contain shapely objects from a Pandas DataFrame.
877
880
 
@@ -880,9 +883,13 @@ def get_geometry_cols(df: 'pd.DataFrame') -> List[str]:
880
883
  df: pd.DataFrame
881
884
  The DataFrame which may contain bytes strings.
882
885
 
886
+ with_types_srids: bool, default False
887
+ If `True`, return a dictionary mapping columns to geometry types and SRIDs.
888
+
883
889
  Returns
884
890
  -------
885
891
  A list of columns to treat as `geometry`.
892
+ If `with_types_srids`, return a dictionary mapping columns to tuples in the form (type, SRID).
886
893
  """
887
894
  if df is None:
888
895
  return []
@@ -898,7 +905,7 @@ def get_geometry_cols(df: 'pd.DataFrame') -> List[str]:
898
905
  col: df[col].first_valid_index()
899
906
  for col in df.columns
900
907
  }
901
- return [
908
+ geo_cols = [
902
909
  col
903
910
  for col, ix in cols_indices.items()
904
911
  if (
@@ -907,6 +914,31 @@ def get_geometry_cols(df: 'pd.DataFrame') -> List[str]:
907
914
  'shapely' in str(type(df.loc[ix][col]))
908
915
  )
909
916
  ]
917
+ if not with_types_srids:
918
+ return geo_cols
919
+
920
+ gpd = mrsm.attempt_import('geopandas', lazy=False)
921
+ geo_cols_types_srids = {}
922
+ for col in geo_cols:
923
+ try:
924
+ sample_geo_series = gpd.GeoSeries(df[col], crs=None)
925
+ geometry_types = {geom.geom_type for geom in sample_geo_series}
926
+ srid = (
927
+ (
928
+ sample_geo_series.crs.sub_crs_list[0].to_epsg()
929
+ if sample_geo_series.crs.is_compound
930
+ else sample_geo_series.crs.to_epsg()
931
+ )
932
+ if sample_geo_series.crs
933
+ else 0
934
+ )
935
+ geometry_type = list(geometry_types)[0] if len(geometry_types) == 1 else 'geometry'
936
+ except Exception:
937
+ srid = 0
938
+ geometry_type = 'geometry'
939
+ geo_cols_types_srids[col] = (geometry_type, srid)
940
+
941
+ return geo_cols_types_srids
910
942
 
911
943
 
912
944
  def enforce_dtypes(
@@ -1658,6 +1690,8 @@ def to_json(
1658
1690
  orient: str = 'records',
1659
1691
  date_format: str = 'iso',
1660
1692
  date_unit: str = 'us',
1693
+ double_precision: int = 15,
1694
+ geometry_format: str = 'geojson',
1661
1695
  **kwargs: Any
1662
1696
  ) -> str:
1663
1697
  """
@@ -1677,11 +1711,19 @@ def to_json(
1677
1711
  date_unit: str, default 'us'
1678
1712
  The precision of the timestamps.
1679
1713
 
1714
+ double_precision: int, default 15
1715
+ The number of decimal places to use when encoding floating point values (maximum 15).
1716
+
1717
+ geometry_format: str, default 'geojson'
1718
+ The serialization format for geometry data.
1719
+ Accepted values are `geojson`, `wkb_hex`, and `wkt`.
1720
+
1680
1721
  Returns
1681
1722
  -------
1682
1723
  A JSON string.
1683
1724
  """
1684
1725
  import warnings
1726
+ import functools
1685
1727
  from meerschaum.utils.packages import import_pandas
1686
1728
  from meerschaum.utils.dtypes import (
1687
1729
  serialize_bytes,
@@ -1704,10 +1746,16 @@ def to_json(
1704
1746
  with warnings.catch_warnings():
1705
1747
  warnings.simplefilter("ignore")
1706
1748
  for col in geometry_cols:
1707
- df[col] = df[col].apply(serialize_geometry)
1749
+ df[col] = df[col].apply(
1750
+ functools.partial(
1751
+ serialize_geometry,
1752
+ geometry_format=geometry_format,
1753
+ )
1754
+ )
1708
1755
  return df.infer_objects(copy=False).fillna(pd.NA).to_json(
1709
1756
  date_format=date_format,
1710
1757
  date_unit=date_unit,
1758
+ double_precision=double_precision,
1711
1759
  orient=orient,
1712
1760
  **kwargs
1713
1761
  )
@@ -7,6 +7,7 @@ Utility functions for working with data types.
7
7
  """
8
8
 
9
9
  import traceback
10
+ import json
10
11
  import uuid
11
12
  from datetime import timezone, datetime
12
13
  from decimal import Decimal, Context, InvalidOperation, ROUND_HALF_UP
@@ -28,6 +29,7 @@ MRSM_ALIAS_DTYPES: Dict[str, str] = {
28
29
  'guid': 'uuid',
29
30
  'UUID': 'uuid',
30
31
  'geom': 'geometry',
32
+ 'geog': 'geography',
31
33
  }
32
34
  MRSM_PD_DTYPES: Dict[Union[str, None], str] = {
33
35
  'json': 'object',
@@ -76,6 +78,7 @@ def to_pandas_dtype(dtype: str) -> str:
76
78
  return get_pd_type_from_db_type(dtype)
77
79
 
78
80
  from meerschaum.utils.packages import attempt_import
81
+ _ = attempt_import('pyarrow', lazy=False)
79
82
  pandas = attempt_import('pandas', lazy=False)
80
83
 
81
84
  try:
@@ -294,10 +297,21 @@ def attempt_cast_to_geometry(value: Any) -> Any:
294
297
  """
295
298
  Given a value, attempt to coerce it into a `shapely` (`geometry`) object.
296
299
  """
297
- shapely_wkt, shapely_wkb = mrsm.attempt_import('shapely.wkt', 'shapely.wkb', lazy=False)
300
+ shapely, shapely_wkt, shapely_wkb = mrsm.attempt_import(
301
+ 'shapely',
302
+ 'shapely.wkt',
303
+ 'shapely.wkb',
304
+ lazy=False,
305
+ )
298
306
  if 'shapely' in str(type(value)):
299
307
  return value
300
308
 
309
+ if isinstance(value, (dict, list)):
310
+ try:
311
+ return shapely.from_geojson(json.dumps(value))
312
+ except Exception as e:
313
+ return value
314
+
301
315
  value_is_wkt = geometry_is_wkt(value)
302
316
  if value_is_wkt is None:
303
317
  return value
@@ -327,6 +341,9 @@ def geometry_is_wkt(value: Union[str, bytes]) -> Union[bool, None]:
327
341
  Return `None` if `value` should be parsed as neither.
328
342
  """
329
343
  import re
344
+ if not isinstance(value, (str, bytes)):
345
+ return None
346
+
330
347
  if isinstance(value, bytes):
331
348
  return False
332
349
 
@@ -521,7 +538,11 @@ def serialize_bytes(data: bytes) -> str:
521
538
  return base64.b64encode(data).decode('utf-8')
522
539
 
523
540
 
524
- def serialize_geometry(geom: Any, as_wkt: bool = False) -> str:
541
+ def serialize_geometry(
542
+ geom: Any,
543
+ geometry_format: str = 'wkb_hex',
544
+ as_wkt: bool = False,
545
+ ) -> Union[str, Dict[str, Any]]:
525
546
  """
526
547
  Serialize geometry data as a hex-encoded well-known-binary string.
527
548
 
@@ -530,16 +551,22 @@ def serialize_geometry(geom: Any, as_wkt: bool = False) -> str:
530
551
  geom: Any
531
552
  The potential geometry data to be serialized.
532
553
 
533
- as_wkt, bool, default False
534
- If `True`, serialize geometry data as well-known text (WKT)
535
- instead of well-known binary (WKB).
554
+ geometry_format: str, default 'wkb_hex'
555
+ The serialization format for geometry data.
556
+ Accepted formats are `wkb_hex` (well-known binary hex string),
557
+ `wkt` (well-known text), and `geojson`.
536
558
 
537
559
  Returns
538
560
  -------
539
561
  A string containing the geometry data.
540
562
  """
563
+ shapely = mrsm.attempt_import('shapely', lazy=False)
564
+ if geometry_format == 'geojson':
565
+ geojson_str = shapely.to_geojson(geom)
566
+ return json.loads(geojson_str)
567
+
541
568
  if hasattr(geom, 'wkb_hex'):
542
- return geom.wkb_hex if not as_wkt else geom.wkt
569
+ return geom.wkb_hex if geometry_format == 'wkb_hex' else geom.wkt
543
570
 
544
571
  return str(geom)
545
572
 
@@ -711,6 +738,8 @@ def get_geometry_type_srid(
711
738
  ('Point', 4376)
712
739
  """
713
740
  from meerschaum.utils.misc import is_int
741
+ ### NOTE: PostGIS syntax must also be parsed.
742
+ dtype = dtype.replace('(', '[').replace(')', ']')
714
743
  bare_dtype = dtype.split('[', maxsplit=1)[0]
715
744
  modifier = dtype.split(bare_dtype, maxsplit=1)[-1].lstrip('[').rstrip(']')
716
745
  if not modifier:
@@ -74,10 +74,12 @@ DB_TO_PD_DTYPES: Dict[str, Union[str, Dict[str, str]]] = {
74
74
  'DOUBLE': 'float64[pyarrow]',
75
75
  'DECIMAL': 'numeric',
76
76
  'BIGINT': 'int64[pyarrow]',
77
- 'INT': 'int64[pyarrow]',
78
- 'INTEGER': 'int64[pyarrow]',
77
+ 'INT': 'int32[pyarrow]',
78
+ 'INTEGER': 'int32[pyarrow]',
79
79
  'NUMBER': 'numeric',
80
80
  'NUMERIC': 'numeric',
81
+ 'GEOMETRY': 'geometry',
82
+ 'GEOMETRY(GEOMETRY)': 'geometry',
81
83
  'TIMESTAMP': 'datetime64[ns]',
82
84
  'TIMESTAMP WITHOUT TIMEZONE': 'datetime64[ns]',
83
85
  'TIMESTAMP WITH TIMEZONE': 'datetime64[ns, UTC]',
@@ -120,6 +122,8 @@ DB_TO_PD_DTYPES: Dict[str, Union[str, Dict[str, str]]] = {
120
122
  'BYTE': 'bytes',
121
123
  'LOB': 'bytes',
122
124
  'BINARY': 'bytes',
125
+ 'GEOMETRY': 'geometry',
126
+ 'GEOGRAPHY': 'geography',
123
127
  },
124
128
  'default': 'object',
125
129
  }
@@ -139,6 +143,90 @@ PD_TO_DB_DTYPES_FLAVORS: Dict[str, Dict[str, str]] = {
139
143
  'cockroachdb': 'BIGINT',
140
144
  'default': 'INT',
141
145
  },
146
+ 'uint': {
147
+ 'timescaledb': 'BIGINT',
148
+ 'postgresql': 'BIGINT',
149
+ 'postgis': 'BIGINT',
150
+ 'mariadb': 'BIGINT',
151
+ 'mysql': 'BIGINT',
152
+ 'mssql': 'BIGINT',
153
+ 'oracle': 'INT',
154
+ 'sqlite': 'BIGINT',
155
+ 'duckdb': 'BIGINT',
156
+ 'citus': 'BIGINT',
157
+ 'cockroachdb': 'BIGINT',
158
+ 'default': 'INT',
159
+ },
160
+ 'int8': {
161
+ 'timescaledb': 'SMALLINT',
162
+ 'postgresql': 'SMALLINT',
163
+ 'postgis': 'SMALLINT',
164
+ 'mariadb': 'SMALLINT',
165
+ 'mysql': 'SMALLINT',
166
+ 'mssql': 'SMALLINT',
167
+ 'oracle': 'INT',
168
+ 'sqlite': 'INT',
169
+ 'duckdb': 'SMALLINT',
170
+ 'citus': 'SMALLINT',
171
+ 'cockroachdb': 'SMALLINT',
172
+ 'default': 'INT',
173
+ },
174
+ 'uint8': {
175
+ 'timescaledb': 'SMALLINT',
176
+ 'postgresql': 'SMALLINT',
177
+ 'postgis': 'SMALLINT',
178
+ 'mariadb': 'SMALLINT',
179
+ 'mysql': 'SMALLINT',
180
+ 'mssql': 'SMALLINT',
181
+ 'oracle': 'INT',
182
+ 'sqlite': 'INT',
183
+ 'duckdb': 'SMALLINT',
184
+ 'citus': 'SMALLINT',
185
+ 'cockroachdb': 'SMALLINT',
186
+ 'default': 'INT',
187
+ },
188
+ 'int16': {
189
+ 'timescaledb': 'SMALLINT',
190
+ 'postgresql': 'SMALLINT',
191
+ 'postgis': 'SMALLINT',
192
+ 'mariadb': 'SMALLINT',
193
+ 'mysql': 'SMALLINT',
194
+ 'mssql': 'SMALLINT',
195
+ 'oracle': 'INT',
196
+ 'sqlite': 'INT',
197
+ 'duckdb': 'SMALLINT',
198
+ 'citus': 'SMALLINT',
199
+ 'cockroachdb': 'SMALLINT',
200
+ 'default': 'INT',
201
+ },
202
+ 'int32': {
203
+ 'timescaledb': 'INT',
204
+ 'postgresql': 'INT',
205
+ 'postgis': 'INT',
206
+ 'mariadb': 'INT',
207
+ 'mysql': 'INT',
208
+ 'mssql': 'INT',
209
+ 'oracle': 'INT',
210
+ 'sqlite': 'INT',
211
+ 'duckdb': 'INT',
212
+ 'citus': 'INT',
213
+ 'cockroachdb': 'INT',
214
+ 'default': 'INT',
215
+ },
216
+ 'int64': {
217
+ 'timescaledb': 'BIGINT',
218
+ 'postgresql': 'BIGINT',
219
+ 'postgis': 'BIGINT',
220
+ 'mariadb': 'BIGINT',
221
+ 'mysql': 'BIGINT',
222
+ 'mssql': 'BIGINT',
223
+ 'oracle': 'INT',
224
+ 'sqlite': 'BIGINT',
225
+ 'duckdb': 'BIGINT',
226
+ 'citus': 'BIGINT',
227
+ 'cockroachdb': 'BIGINT',
228
+ 'default': 'INT',
229
+ },
142
230
  'float': {
143
231
  'timescaledb': 'DOUBLE PRECISION',
144
232
  'postgresql': 'DOUBLE PRECISION',
@@ -380,6 +468,90 @@ PD_TO_SQLALCHEMY_DTYPES_FLAVORS: Dict[str, Dict[str, str]] = {
380
468
  'cockroachdb': 'BigInteger',
381
469
  'default': 'BigInteger',
382
470
  },
471
+ 'uint': {
472
+ 'timescaledb': 'BigInteger',
473
+ 'postgresql': 'BigInteger',
474
+ 'postgis': 'BigInteger',
475
+ 'mariadb': 'BigInteger',
476
+ 'mysql': 'BigInteger',
477
+ 'mssql': 'BigInteger',
478
+ 'oracle': 'BigInteger',
479
+ 'sqlite': 'BigInteger',
480
+ 'duckdb': 'BigInteger',
481
+ 'citus': 'BigInteger',
482
+ 'cockroachdb': 'BigInteger',
483
+ 'default': 'BigInteger',
484
+ },
485
+ 'int8': {
486
+ 'timescaledb': 'SmallInteger',
487
+ 'postgresql': 'SmallInteger',
488
+ 'postgis': 'SmallInteger',
489
+ 'mariadb': 'SmallInteger',
490
+ 'mysql': 'SmallInteger',
491
+ 'mssql': 'SmallInteger',
492
+ 'oracle': 'SmallInteger',
493
+ 'sqlite': 'SmallInteger',
494
+ 'duckdb': 'SmallInteger',
495
+ 'citus': 'SmallInteger',
496
+ 'cockroachdb': 'SmallInteger',
497
+ 'default': 'SmallInteger',
498
+ },
499
+ 'uint8': {
500
+ 'timescaledb': 'SmallInteger',
501
+ 'postgresql': 'SmallInteger',
502
+ 'postgis': 'SmallInteger',
503
+ 'mariadb': 'SmallInteger',
504
+ 'mysql': 'SmallInteger',
505
+ 'mssql': 'SmallInteger',
506
+ 'oracle': 'SmallInteger',
507
+ 'sqlite': 'SmallInteger',
508
+ 'duckdb': 'SmallInteger',
509
+ 'citus': 'SmallInteger',
510
+ 'cockroachdb': 'SmallInteger',
511
+ 'default': 'SmallInteger',
512
+ },
513
+ 'int16': {
514
+ 'timescaledb': 'SmallInteger',
515
+ 'postgresql': 'SmallInteger',
516
+ 'postgis': 'SmallInteger',
517
+ 'mariadb': 'SmallInteger',
518
+ 'mysql': 'SmallInteger',
519
+ 'mssql': 'SmallInteger',
520
+ 'oracle': 'SmallInteger',
521
+ 'sqlite': 'SmallInteger',
522
+ 'duckdb': 'SmallInteger',
523
+ 'citus': 'SmallInteger',
524
+ 'cockroachdb': 'SmallInteger',
525
+ 'default': 'SmallInteger',
526
+ },
527
+ 'int32': {
528
+ 'timescaledb': 'Integer',
529
+ 'postgresql': 'Integer',
530
+ 'postgis': 'Integer',
531
+ 'mariadb': 'Integer',
532
+ 'mysql': 'Integer',
533
+ 'mssql': 'Integer',
534
+ 'oracle': 'Integer',
535
+ 'sqlite': 'Integer',
536
+ 'duckdb': 'Integer',
537
+ 'citus': 'Integer',
538
+ 'cockroachdb': 'Integer',
539
+ 'default': 'Integer',
540
+ },
541
+ 'int64': {
542
+ 'timescaledb': 'BigInteger',
543
+ 'postgresql': 'BigInteger',
544
+ 'postgis': 'BigInteger',
545
+ 'mariadb': 'BigInteger',
546
+ 'mysql': 'BigInteger',
547
+ 'mssql': 'BigInteger',
548
+ 'oracle': 'BigInteger',
549
+ 'sqlite': 'BigInteger',
550
+ 'duckdb': 'BigInteger',
551
+ 'citus': 'BigInteger',
552
+ 'cockroachdb': 'BigInteger',
553
+ 'default': 'BigInteger',
554
+ },
383
555
  'float': {
384
556
  'timescaledb': 'Float',
385
557
  'postgresql': 'Float',
@@ -596,7 +768,7 @@ def get_pd_type_from_db_type(db_type: str, allow_custom_dtypes: bool = True) ->
596
768
  -------
597
769
  The equivalent datatype for a pandas DataFrame.
598
770
  """
599
- from meerschaum.utils.dtypes import are_dtypes_equal
771
+ from meerschaum.utils.dtypes import are_dtypes_equal, get_geometry_type_srid
600
772
  def parse_custom(_pd_type: str, _db_type: str) -> str:
601
773
  if 'json' in _db_type.lower():
602
774
  return 'json'
@@ -604,6 +776,13 @@ def get_pd_type_from_db_type(db_type: str, allow_custom_dtypes: bool = True) ->
604
776
  precision, scale = get_numeric_precision_scale(None, dtype=_db_type.upper())
605
777
  if precision and scale:
606
778
  return f"numeric[{precision},{scale}]"
779
+ if are_dtypes_equal(_pd_type, 'geometry') and _pd_type != 'object':
780
+ geometry_type, srid = get_geometry_type_srid(_db_type.upper())
781
+ modifiers = [str(modifier) for modifier in (geometry_type, srid) if modifier]
782
+ typ = "geometry" if 'geography' not in _pd_type.lower() else 'geography'
783
+ if not modifiers:
784
+ return typ
785
+ return f"{typ}[{', '.join(modifiers)}]"
607
786
  return _pd_type
608
787
 
609
788
  pd_type = DB_TO_PD_DTYPES.get(db_type.upper().split('(', maxsplit=1)[0].strip(), None)
@@ -88,6 +88,11 @@ packages: Dict[str, Dict[str, str]] = {
88
88
  'mssqlcli' : 'mssql-cli>=1.0.0',
89
89
  'gadwall' : 'gadwall>=0.2.0',
90
90
  },
91
+ 'gis' : {
92
+ 'pyproj' : 'pyproj>=3.7.1',
93
+ 'geopandas' : 'geopandas>=1.0.1',
94
+ 'shapely' : 'shapely>=2.0.7',
95
+ },
91
96
  'stack': {
92
97
  'compose' : 'docker-compose>=1.29.2',
93
98
  },
@@ -135,8 +140,6 @@ packages: Dict[str, Dict[str, str]] = {
135
140
  packages['sql'] = {
136
141
  'numpy' : 'numpy>=1.18.5',
137
142
  'pandas' : 'pandas[parquet]>=2.0.1',
138
- 'geopandas' : 'geopandas>=1.0.1',
139
- 'shapely' : 'shapely>=2.0.7',
140
143
  'pyarrow' : 'pyarrow>=16.1.0',
141
144
  'dask' : 'dask[complete]>=2024.12.1',
142
145
  'partd' : 'partd>=1.4.2',
@@ -150,6 +153,7 @@ packages['sql'] = {
150
153
  }
151
154
  packages['sql'].update(packages['drivers'])
152
155
  packages['sql'].update(packages['core'])
156
+ packages['sql'].update(packages['gis'])
153
157
  packages['dash'] = {
154
158
  'flask_compress' : 'Flask-Compress>=1.10.1',
155
159
  'dash' : 'dash>=2.6.2',
meerschaum/utils/sql.py CHANGED
@@ -528,7 +528,7 @@ NO_CTE_FLAVORS = {'mysql', 'mariadb'}
528
528
  NO_SELECT_INTO_FLAVORS = {'sqlite', 'oracle', 'mysql', 'mariadb', 'duckdb'}
529
529
 
530
530
 
531
- def clean(substring: str) -> str:
531
+ def clean(substring: str) -> None:
532
532
  """
533
533
  Ensure a substring is clean enough to be inserted into a SQL query.
534
534
  Raises an exception when banned words are used.
@@ -932,6 +932,7 @@ def build_where(
932
932
  params: Dict[str, Any],
933
933
  connector: Optional[mrsm.connectors.sql.SQLConnector] = None,
934
934
  with_where: bool = True,
935
+ flavor: str = 'postgresql',
935
936
  ) -> str:
936
937
  """
937
938
  Build the `WHERE` clause based on the input criteria.
@@ -951,6 +952,9 @@ def build_where(
951
952
  with_where: bool, default True:
952
953
  If `True`, include the leading `'WHERE'` string.
953
954
 
955
+ flavor: str, default 'postgresql'
956
+ If `connector` is `None`, fall back to this flavor.
957
+
954
958
  Returns
955
959
  -------
956
960
  A `str` of the `WHERE` clause from the input `params` dictionary for the connector's flavor.
@@ -979,13 +983,11 @@ def build_where(
979
983
  warn(f"Aborting build_where() due to possible SQL injection.")
980
984
  return ''
981
985
 
982
- if connector is None:
983
- from meerschaum import get_connector
984
- connector = get_connector('sql')
986
+ query_flavor = getattr(connector, 'flavor', flavor) if connector is not None else flavor
985
987
  where = ""
986
988
  leading_and = "\n AND "
987
989
  for key, value in params.items():
988
- _key = sql_item_name(key, connector.flavor, None)
990
+ _key = sql_item_name(key, query_flavor, None)
989
991
  ### search across a list (i.e. IN syntax)
990
992
  if isinstance(value, Iterable) and not isinstance(value, (dict, str)):
991
993
  includes = [
@@ -1296,7 +1298,24 @@ def get_table_cols_types(
1296
1298
  if cols_types_docs and not cols_types_docs_filtered:
1297
1299
  cols_types_docs_filtered = cols_types_docs
1298
1300
 
1299
- return {
1301
+ ### NOTE: Check for PostGIS GEOMETRY columns.
1302
+ geometry_cols_types = {}
1303
+ user_defined_cols = [
1304
+ doc
1305
+ for doc in cols_types_docs_filtered
1306
+ if str(doc.get('type', None)).upper() == 'USER-DEFINED'
1307
+ ]
1308
+ if user_defined_cols:
1309
+ geometry_cols_types.update(
1310
+ get_postgis_geo_columns_types(
1311
+ connectable,
1312
+ table,
1313
+ schema=schema,
1314
+ debug=debug,
1315
+ )
1316
+ )
1317
+
1318
+ cols_types = {
1300
1319
  (
1301
1320
  doc['column']
1302
1321
  if flavor != 'oracle' else (
@@ -1317,6 +1336,8 @@ def get_table_cols_types(
1317
1336
  )
1318
1337
  for doc in cols_types_docs_filtered
1319
1338
  }
1339
+ cols_types.update(geometry_cols_types)
1340
+ return cols_types
1320
1341
  except Exception as e:
1321
1342
  warn(f"Failed to fetch columns for table '{table}':\n{e}")
1322
1343
  return {}
@@ -2505,3 +2526,64 @@ def get_reset_autoincrement_queries(
2505
2526
  )
2506
2527
  for query in reset_queries
2507
2528
  ]
2529
+
2530
+
2531
+ def get_postgis_geo_columns_types(
2532
+ connectable: Union[
2533
+ 'mrsm.connectors.sql.SQLConnector',
2534
+ 'sqlalchemy.orm.session.Session',
2535
+ 'sqlalchemy.engine.base.Engine'
2536
+ ],
2537
+ table: str,
2538
+ schema: Optional[str] = 'public',
2539
+ debug: bool = False,
2540
+ ) -> Dict[str, str]:
2541
+ """
2542
+ Return the
2543
+ """
2544
+ from meerschaum.utils.dtypes import get_geometry_type_srid
2545
+ default_type, default_srid = get_geometry_type_srid()
2546
+ default_type = default_type.upper()
2547
+
2548
+ clean(table)
2549
+ clean(str(schema))
2550
+ schema = schema or 'public'
2551
+ truncated_schema_name = truncate_item_name(schema, flavor='postgis')
2552
+ truncated_table_name = truncate_item_name(table, flavor='postgis')
2553
+ query = (
2554
+ "SELECT \"f_geometry_column\" AS \"column\", 'GEOMETRY' AS \"func\", \"type\", \"srid\"\n"
2555
+ "FROM \"geometry_columns\"\n"
2556
+ f"WHERE \"f_table_schema\" = '{truncated_schema_name}'\n"
2557
+ f" AND \"f_table_name\" = '{truncated_table_name}'\n"
2558
+ "UNION ALL\n"
2559
+ "SELECT \"f_geography_column\" AS \"column\", 'GEOGRAPHY' AS \"func\", \"type\", \"srid\"\n"
2560
+ "FROM \"geography_columns\"\n"
2561
+ f"WHERE \"f_table_schema\" = '{truncated_schema_name}'\n"
2562
+ f" AND \"f_table_name\" = '{truncated_table_name}'\n"
2563
+ )
2564
+ debug_kwargs = {'debug': debug} if isinstance(connectable, mrsm.connectors.SQLConnector) else {}
2565
+ result_rows = [
2566
+ row
2567
+ for row in connectable.execute(query, **debug_kwargs).fetchall()
2568
+ ]
2569
+ cols_type_tuples = {
2570
+ row[0]: (row[1], row[2], row[3])
2571
+ for row in result_rows
2572
+ }
2573
+
2574
+ geometry_cols_types = {
2575
+ col: (
2576
+ f"{func}({typ.upper()}, {srid})"
2577
+ if srid
2578
+ else (
2579
+ func
2580
+ + (
2581
+ f'({typ.upper()})'
2582
+ if typ.upper() not in ('GEOMETRY', 'GEOGRAPHY')
2583
+ else ''
2584
+ )
2585
+ )
2586
+ )
2587
+ for col, (func, typ, srid) in cols_type_tuples.items()
2588
+ }
2589
+ return geometry_cols_types
@@ -86,7 +86,10 @@ def activate_venv(
86
86
  target = target_path.as_posix()
87
87
 
88
88
  if venv in active_venvs_order:
89
- sys.path.remove(target)
89
+ try:
90
+ sys.path.remove(target)
91
+ except Exception:
92
+ pass
90
93
  try:
91
94
  active_venvs_order.remove(venv)
92
95
  except Exception:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: meerschaum
3
- Version: 2.9.0rc2
3
+ Version: 2.9.0rc3
4
4
  Summary: Sync Time-Series Pipes with Meerschaum
5
5
  Home-page: https://meerschaum.io
6
6
  Author: Bennett Meares
@@ -87,6 +87,10 @@ Requires-Dist: mycli>=1.23.2; extra == "cli"
87
87
  Requires-Dist: litecli>=1.5.0; extra == "cli"
88
88
  Requires-Dist: mssql-cli>=1.0.0; extra == "cli"
89
89
  Requires-Dist: gadwall>=0.2.0; extra == "cli"
90
+ Provides-Extra: gis
91
+ Requires-Dist: pyproj>=3.7.1; extra == "gis"
92
+ Requires-Dist: geopandas>=1.0.1; extra == "gis"
93
+ Requires-Dist: shapely>=2.0.7; extra == "gis"
90
94
  Provides-Extra: stack
91
95
  Requires-Dist: docker-compose>=1.29.2; extra == "stack"
92
96
  Provides-Extra: build
@@ -126,8 +130,6 @@ Requires-Dist: importlib-metadata>=4.12.0; extra == "extras"
126
130
  Provides-Extra: sql
127
131
  Requires-Dist: numpy>=1.18.5; extra == "sql"
128
132
  Requires-Dist: pandas[parquet]>=2.0.1; extra == "sql"
129
- Requires-Dist: geopandas>=1.0.1; extra == "sql"
130
- Requires-Dist: shapely>=2.0.7; extra == "sql"
131
133
  Requires-Dist: pyarrow>=16.1.0; extra == "sql"
132
134
  Requires-Dist: dask[complete]>=2024.12.1; extra == "sql"
133
135
  Requires-Dist: partd>=1.4.2; extra == "sql"
@@ -167,6 +169,9 @@ Requires-Dist: fasteners>=0.19.0; extra == "sql"
167
169
  Requires-Dist: virtualenv>=20.1.0; extra == "sql"
168
170
  Requires-Dist: attrs>=24.2.0; extra == "sql"
169
171
  Requires-Dist: uv>=0.2.11; extra == "sql"
172
+ Requires-Dist: pyproj>=3.7.1; extra == "sql"
173
+ Requires-Dist: geopandas>=1.0.1; extra == "sql"
174
+ Requires-Dist: shapely>=2.0.7; extra == "sql"
170
175
  Provides-Extra: dash
171
176
  Requires-Dist: Flask-Compress>=1.10.1; extra == "dash"
172
177
  Requires-Dist: dash>=2.6.2; extra == "dash"
@@ -189,8 +194,6 @@ Requires-Dist: httpcore>=1.0.6; extra == "api"
189
194
  Requires-Dist: valkey>=6.0.0; extra == "api"
190
195
  Requires-Dist: numpy>=1.18.5; extra == "api"
191
196
  Requires-Dist: pandas[parquet]>=2.0.1; extra == "api"
192
- Requires-Dist: geopandas>=1.0.1; extra == "api"
193
- Requires-Dist: shapely>=2.0.7; extra == "api"
194
197
  Requires-Dist: pyarrow>=16.1.0; extra == "api"
195
198
  Requires-Dist: dask[complete]>=2024.12.1; extra == "api"
196
199
  Requires-Dist: partd>=1.4.2; extra == "api"
@@ -230,6 +233,9 @@ Requires-Dist: fasteners>=0.19.0; extra == "api"
230
233
  Requires-Dist: virtualenv>=20.1.0; extra == "api"
231
234
  Requires-Dist: attrs>=24.2.0; extra == "api"
232
235
  Requires-Dist: uv>=0.2.11; extra == "api"
236
+ Requires-Dist: pyproj>=3.7.1; extra == "api"
237
+ Requires-Dist: geopandas>=1.0.1; extra == "api"
238
+ Requires-Dist: shapely>=2.0.7; extra == "api"
233
239
  Requires-Dist: pprintpp>=0.4.0; extra == "api"
234
240
  Requires-Dist: asciitree>=0.3.3; extra == "api"
235
241
  Requires-Dist: typing-extensions>=4.7.1; extra == "api"
@@ -292,13 +298,14 @@ Requires-Dist: aiomysql>=0.0.21; extra == "full"
292
298
  Requires-Dist: sqlalchemy-cockroachdb>=2.0.0; extra == "full"
293
299
  Requires-Dist: duckdb>=1.0.0; extra == "full"
294
300
  Requires-Dist: duckdb-engine>=0.13.0; extra == "full"
301
+ Requires-Dist: pyproj>=3.7.1; extra == "full"
302
+ Requires-Dist: geopandas>=1.0.1; extra == "full"
303
+ Requires-Dist: shapely>=2.0.7; extra == "full"
295
304
  Requires-Dist: toga>=0.3.0-dev29; extra == "full"
296
305
  Requires-Dist: pywebview>=3.6.3; extra == "full"
297
306
  Requires-Dist: pycparser>=2.21.0; extra == "full"
298
307
  Requires-Dist: numpy>=1.18.5; extra == "full"
299
308
  Requires-Dist: pandas[parquet]>=2.0.1; extra == "full"
300
- Requires-Dist: geopandas>=1.0.1; extra == "full"
301
- Requires-Dist: shapely>=2.0.7; extra == "full"
302
309
  Requires-Dist: pyarrow>=16.1.0; extra == "full"
303
310
  Requires-Dist: dask[complete]>=2024.12.1; extra == "full"
304
311
  Requires-Dist: partd>=1.4.2; extra == "full"
@@ -127,7 +127,7 @@ meerschaum/api/routes/_index.py,sha256=Z8kuyqm3vmJadw8iIYyswYI4-3IOJ7KXdkhDTv1oU
127
127
  meerschaum/api/routes/_jobs.py,sha256=sEt-UtVd5pN-hJgikTvj1oTKJQ2hhNe8XhjkclwOXOE,12568
128
128
  meerschaum/api/routes/_login.py,sha256=tygEp50EVOMgvTG6CEASlShflbtEK8viJ9O07o0lnnE,2434
129
129
  meerschaum/api/routes/_misc.py,sha256=XxfSvXNGAm8rdvXePXWxX8wc5tjeAdBOSZwYveL3oAM,2591
130
- meerschaum/api/routes/_pipes.py,sha256=1NiL6xKPdFh4LNm5OioedZ_bHUlDBRM7uJU4frjXcDE,28111
130
+ meerschaum/api/routes/_pipes.py,sha256=S5g1ah_2aYh6SqfzMUlkGZ65aaXdjdWcfNG1Wj-vt_M,29048
131
131
  meerschaum/api/routes/_plugins.py,sha256=okstNlv9Bhoiy6JvQWgwjxEi4kQ8adPUcir6k3Y7hH8,6329
132
132
  meerschaum/api/routes/_users.py,sha256=i55LuLTQ2cuzIyWz0PxkWji6aQQUIBPf_FEryKwXI50,7197
133
133
  meerschaum/api/routes/_version.py,sha256=-3A0i4Gk54netFOOwjI_x3YQik9vgHjtq7G_VYbzIJo,750
@@ -146,7 +146,7 @@ meerschaum/config/_preprocess.py,sha256=-AEA8m_--KivZwTQ1sWN6LTn5sio_fUr2XZ51BO6
146
146
  meerschaum/config/_read_config.py,sha256=RLC3HHi_1ndj7ITVDKLD9_uULY3caGRwSz3ATYE-ixA,15014
147
147
  meerschaum/config/_shell.py,sha256=46_m49Txc5q1rGfCgO49ca48BODx45DQJi8D0zz1R18,4245
148
148
  meerschaum/config/_sync.py,sha256=jHcWRkxd82_BgX8Xo8agsWvf7BSbv3qHLWmYl6ehp_0,4242
149
- meerschaum/config/_version.py,sha256=qSHUj8MxnJ9gRyXkAhkZxz2mL83tODyagATrnfHLw2c,74
149
+ meerschaum/config/_version.py,sha256=jDTOyzG0q-CmFfLtorLaX250Zfp1SgHThpYDlP458N0,74
150
150
  meerschaum/config/paths.py,sha256=JjibeGN3YAdSNceRwsd42aNmeUrIgM6ndzC8qZAmNI0,621
151
151
  meerschaum/config/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
152
152
  meerschaum/config/stack/__init__.py,sha256=2UukC0Lmk-aVL1o1qXzumqmuIrw3vu9fD7iCuz4XD4I,10544
@@ -187,14 +187,14 @@ meerschaum/connectors/sql/_users.py,sha256=mRyjsUCfPV52nfTQUbpu9gMXfV_DHXNqEhw4N
187
187
  meerschaum/connectors/sql/tools.py,sha256=jz8huOaRCwGlYdtGfAqAh7SoK8uydYBrasKQba9FT38,187
188
188
  meerschaum/connectors/sql/tables/__init__.py,sha256=53EeJsvmGjj68SpSShdt6kyLuk5Md5O8DnvWC1ra3u8,8876
189
189
  meerschaum/connectors/sql/tables/types.py,sha256=Jc_MTHIBM-KHpQt__Lckp39CeOo7tGOiAk5faDx-znY,1573
190
- meerschaum/connectors/valkey/_ValkeyConnector.py,sha256=_KjJSgUcZhNJZuPfn0vhDBjnOzjtk_jZrjScfhio0Ww,15794
190
+ meerschaum/connectors/valkey/_ValkeyConnector.py,sha256=IJgFXHXH89J2uDP_WaejIzr1L8gSNQtSXGxjXeYm2gw,15854
191
191
  meerschaum/connectors/valkey/__init__.py,sha256=jkVutsygQCvGPLN17cP6wHAjHajxVycnQJbm2eVMuY0,187
192
192
  meerschaum/connectors/valkey/_fetch.py,sha256=MjeE0h3YI4M3LCzy7axQAc_fX_l82vUqX4WXcYoppxE,1920
193
- meerschaum/connectors/valkey/_pipes.py,sha256=sjjO1vehNCkN_2bsqQKy81WWWrOg8a8ElupqzCjZ_7c,24594
193
+ meerschaum/connectors/valkey/_pipes.py,sha256=bnbcDhvmENLtjxZVSXYWc-W3pbtJWVvw815EH5zPwlU,24648
194
194
  meerschaum/connectors/valkey/_plugins.py,sha256=ZqiEW4XZCOpw4G8DUK2IKY6Qrph4mYfTjgXWimgakYY,6267
195
195
  meerschaum/connectors/valkey/_users.py,sha256=AS1vLarrkDA9yPK644GWwRiQiTZVa9x3nlLpyntq40g,7730
196
196
  meerschaum/core/__init__.py,sha256=tjASW10n9uLV6bYhcwP4rggh-ESXSJzgxpSBbVsuISs,251
197
- meerschaum/core/Pipe/__init__.py,sha256=0HWR0SbBXRlnO7YEe6WCK3rWzZ7kqx86E6B8-cbenjI,19354
197
+ meerschaum/core/Pipe/__init__.py,sha256=rHlKky1AZI6kg9J_9_yOVCJHinMB9JN6hy0VMZBQmdk,19393
198
198
  meerschaum/core/Pipe/_attributes.py,sha256=wZQBGspZHmqmab_DNEUrvYsJSZtwaDsj0zeXDHoEBdQ,22419
199
199
  meerschaum/core/Pipe/_bootstrap.py,sha256=gTNGh5e2LmTMrgIpHqrVaL60uPKWCphhsuz8j-lJ2HI,7348
200
200
  meerschaum/core/Pipe/_clear.py,sha256=LghXabgyyc1tD7FNQrh9ExT71ipcg2poM9FDA3k9e4M,2230
@@ -209,7 +209,7 @@ meerschaum/core/Pipe/_fetch.py,sha256=IojFSA_EXBSm0I8BmlDgmUh3M85FFtXjmDJhdxZ8Ll
209
209
  meerschaum/core/Pipe/_index.py,sha256=cYgaVwBVfAYxJBZ6j6MXDqOxnOrD_QnYi33_kIwy_FQ,1944
210
210
  meerschaum/core/Pipe/_register.py,sha256=Sd5xaAW8H7uLTIoommcKb-6kHPRuHJLWNSbPnt2UbvA,2240
211
211
  meerschaum/core/Pipe/_show.py,sha256=nG50y8eBT9TVuKkRgAKtNDNIxysJvMNxfu__lkL1F9k,1352
212
- meerschaum/core/Pipe/_sync.py,sha256=NGiGhBI7M_nyIvVvyIA9_4jUKis1YpK4YL_pnceYGjg,38077
212
+ meerschaum/core/Pipe/_sync.py,sha256=YsNlWepIPHVxfoBsYtPkdl5jMdTIEEd0kHtMllUyAoI,39968
213
213
  meerschaum/core/Pipe/_verify.py,sha256=aXRpD6azrN9A9Z3AXkKQw6NJFqNWqZVpgidjh5BsfLE,22565
214
214
  meerschaum/core/Plugin/__init__.py,sha256=UXg64EvJPgI1PCxkY_KM02-ZmBm4FZpLPIQR_uSJJDc,137
215
215
  meerschaum/core/User/_User.py,sha256=qbI0GIkr3G0PI4d9S49uatbJQ2kH_-z5-GoVJ0fuEtA,6624
@@ -223,7 +223,7 @@ meerschaum/plugins/__init__.py,sha256=Tl5B0Q4rIfgkPpgknJH3UKKB3fS_cAWI9TspKosvBP
223
223
  meerschaum/plugins/bootstrap.py,sha256=VwjpZAuYdqPJW0YoVgAoM_taHkdQHqP902-8T7OWWCI,11339
224
224
  meerschaum/utils/__init__.py,sha256=QrK1K9hIbPCRCM5k2nZGFqGnrqhA0Eh-iSmCU7FG6Cs,612
225
225
  meerschaum/utils/_get_pipes.py,sha256=tu4xKPoDn79Dz2kWM13cXTP4DSCkn-3G9M8KiLftopw,11073
226
- meerschaum/utils/dataframe.py,sha256=SfwFGqBTGVcA5ulWEXOV2GZ1633JPcRXxF_l6oQ7L5o,52020
226
+ meerschaum/utils/dataframe.py,sha256=Rc9B6oc8hQfhNKzHqemzf_l16Zb4xksPMf6XOut6Ozs,53790
227
227
  meerschaum/utils/debug.py,sha256=GyIzJmunkoPnOcZNYVQdT4Sgd-aOb5MI2VbIgATOjIQ,3695
228
228
  meerschaum/utils/interactive.py,sha256=t-6jWozXSqL7lYGDHuwiOjTgr-UKhdcg61q_eR5mikI,3196
229
229
  meerschaum/utils/misc.py,sha256=8TOQQlFyF_aYnc8tnx98lccXr9tFrdlS-ngXeOQjHHY,47407
@@ -232,7 +232,7 @@ meerschaum/utils/pool.py,sha256=vkE42af4fjrTEJTxf6Ek3xGucm1MtEkpsSEiaVzNKHs,2655
232
232
  meerschaum/utils/process.py,sha256=as0-CjG4mqFP0TydVvmAmgki6er4thS5BqUopeiq98Q,8216
233
233
  meerschaum/utils/prompt.py,sha256=qj1As1tuiL0GZTku_YOC6I5DmOU6L5otDR7DW7LA5fM,19397
234
234
  meerschaum/utils/schedule.py,sha256=Vrcd2Qs-UPVn6xBayNUIgludf0Mlb6Wrgq6ATdyhV8c,11451
235
- meerschaum/utils/sql.py,sha256=wOm_9bA2HRn-TGYqyiqwU3ILk3JBOH08X8e5k0TEDes,80786
235
+ meerschaum/utils/sql.py,sha256=jd0YEtocTgSL5fCCPRkD_gIpzSM6gY1yMYbqsgLxpsE,83557
236
236
  meerschaum/utils/threading.py,sha256=awjbVL_QR6G-o_9Qk85utac9cSdqkiC8tQSdERCdrG8,2814
237
237
  meerschaum/utils/typing.py,sha256=U3MC347sh1umpa3Xr1k71eADyDmk4LB6TnVCpq8dVzI,2830
238
238
  meerschaum/utils/warnings.py,sha256=n-phr3BftNNgyPnvnXC_VMSjtCvjiCZ-ewmVfcROhkc,6611
@@ -243,23 +243,23 @@ meerschaum/utils/daemon/RotatingFile.py,sha256=8_bXegBjjzNRlNEjFZ_EHU4pSaDfjXZTw
243
243
  meerschaum/utils/daemon/StdinFile.py,sha256=qdZ8E_RSOkURypwnS50mWeyWyRig1bAY9tKWMTVKajc,3307
244
244
  meerschaum/utils/daemon/__init__.py,sha256=ziRPyu_IM3l7Xd58y3Uvt0fZLoirJ9nuboFIxxult6c,8741
245
245
  meerschaum/utils/daemon/_names.py,sha256=d2ZwTxBoTAqXZkCfZ5LuX2XrkQmLNUq1OTlUqfoH5dA,4515
246
- meerschaum/utils/dtypes/__init__.py,sha256=nFlslv8aL_UFscWNJOwY19F5KFvXsXAR0_UX0rjF6D8,20669
247
- meerschaum/utils/dtypes/sql.py,sha256=MhuFT3wY8cx_o556a-oa3sKVKmAus0_O8ETbIIRlXik,26074
246
+ meerschaum/utils/dtypes/__init__.py,sha256=BCu-a66OdkEn6EwuBN182EDWjXrMIbRQYjuQrXMZ9jc,21491
247
+ meerschaum/utils/dtypes/sql.py,sha256=zhwGnz6MfsaJEH7Sibtd-GfVEImnBSkZh1I3srxgiKE,31428
248
248
  meerschaum/utils/formatting/__init__.py,sha256=bA8qwBeTNIVHVQOBK682bJsKSKik1yS6xYJAoi0RErk,15528
249
249
  meerschaum/utils/formatting/_jobs.py,sha256=izsqPJhTtUkXUUtWnbXtReYsUYwulXtci3pBj72Ne64,6637
250
250
  meerschaum/utils/formatting/_pipes.py,sha256=gwl8-xCN5GYqBZJ7SkY20BebcofY0nU5X8Y4Emf5dz8,19570
251
251
  meerschaum/utils/formatting/_pprint.py,sha256=wyTmjHFnsHbxfyuytjTWzH-D42Z65GuIisQ_W6UnRPg,3096
252
252
  meerschaum/utils/formatting/_shell.py,sha256=2bFvtwNXapjl9jdlc0fg79PRWHbYVcllKiVcG5g36qI,3678
253
253
  meerschaum/utils/packages/__init__.py,sha256=TdKaj2tmN4bFwzusOfMv24P5ET7Zv73vyoOf9GOIr5E,64427
254
- meerschaum/utils/packages/_packages.py,sha256=BWBJRlwWqZD4SPcM-SbVxEJ2pwclUhioSeP1ldSiYWE,9047
254
+ meerschaum/utils/packages/_packages.py,sha256=6fDjQXVJ7tQv6fMpKXV45RSSIST0T0I6WsKKu_YiPII,9191
255
255
  meerschaum/utils/packages/lazy_loader.py,sha256=VHnph3VozH29R4JnSSBfwtA5WKZYZQFT_GeQSShCnuc,2540
256
256
  meerschaum/utils/venv/_Venv.py,sha256=gc1TCeAj-kTZbQFAT9xl1bi4HXFV5ApT0dPOJfxwr78,3748
257
- meerschaum/utils/venv/__init__.py,sha256=6FDfOSBsGgw2RIXvBuFEwlF5740RIHs4Qum0ekati9I,27249
258
- meerschaum-2.9.0rc2.dist-info/LICENSE,sha256=jG2zQEdRNt88EgHUWPpXVWmOrOduUQRx7MnYV9YIPaw,11359
259
- meerschaum-2.9.0rc2.dist-info/METADATA,sha256=18-lm3TfFldZ0KZdp6YkgF5HDpzfPii7thoQ7ibqMuA,24930
260
- meerschaum-2.9.0rc2.dist-info/NOTICE,sha256=OTA9Fcthjf5BRvWDDIcBC_xfLpeDV-RPZh3M-HQBRtQ,114
261
- meerschaum-2.9.0rc2.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
262
- meerschaum-2.9.0rc2.dist-info/entry_points.txt,sha256=5YBVzibw-0rNA_1VjB16z5GABsOGf-CDhW4yqH8C7Gc,88
263
- meerschaum-2.9.0rc2.dist-info/top_level.txt,sha256=bNoSiDj0El6buocix-FRoAtJOeq1qOF5rRm2u9i7Q6A,11
264
- meerschaum-2.9.0rc2.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
265
- meerschaum-2.9.0rc2.dist-info/RECORD,,
257
+ meerschaum/utils/venv/__init__.py,sha256=RhWuDohBEROIu_9T6BNPgYCrGtuE14w7nXHR1E2qyh8,27321
258
+ meerschaum-2.9.0rc3.dist-info/LICENSE,sha256=jG2zQEdRNt88EgHUWPpXVWmOrOduUQRx7MnYV9YIPaw,11359
259
+ meerschaum-2.9.0rc3.dist-info/METADATA,sha256=5awKWBk3UdIqqX03PfwY4FcRMI53-iK-vqxjDFAJNAE,25225
260
+ meerschaum-2.9.0rc3.dist-info/NOTICE,sha256=OTA9Fcthjf5BRvWDDIcBC_xfLpeDV-RPZh3M-HQBRtQ,114
261
+ meerschaum-2.9.0rc3.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
262
+ meerschaum-2.9.0rc3.dist-info/entry_points.txt,sha256=5YBVzibw-0rNA_1VjB16z5GABsOGf-CDhW4yqH8C7Gc,88
263
+ meerschaum-2.9.0rc3.dist-info/top_level.txt,sha256=bNoSiDj0El6buocix-FRoAtJOeq1qOF5rRm2u9i7Q6A,11
264
+ meerschaum-2.9.0rc3.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
265
+ meerschaum-2.9.0rc3.dist-info/RECORD,,