meerschaum 2.1.7__py3-none-any.whl → 2.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. meerschaum/__main__.py +1 -1
  2. meerschaum/_internal/arguments/_parser.py +3 -0
  3. meerschaum/_internal/entry.py +3 -2
  4. meerschaum/actions/install.py +7 -3
  5. meerschaum/actions/show.py +128 -42
  6. meerschaum/actions/sync.py +7 -3
  7. meerschaum/api/__init__.py +24 -14
  8. meerschaum/api/_oauth2.py +4 -4
  9. meerschaum/api/dash/callbacks/dashboard.py +93 -23
  10. meerschaum/api/dash/callbacks/jobs.py +55 -3
  11. meerschaum/api/dash/jobs.py +34 -8
  12. meerschaum/api/dash/keys.py +1 -1
  13. meerschaum/api/dash/pages/dashboard.py +14 -4
  14. meerschaum/api/dash/pipes.py +137 -26
  15. meerschaum/api/dash/plugins.py +25 -9
  16. meerschaum/api/resources/static/js/xterm.js +1 -1
  17. meerschaum/api/resources/templates/termpage.html +3 -0
  18. meerschaum/api/routes/_login.py +5 -4
  19. meerschaum/api/routes/_plugins.py +6 -3
  20. meerschaum/config/_dash.py +11 -0
  21. meerschaum/config/_default.py +3 -1
  22. meerschaum/config/_jobs.py +13 -4
  23. meerschaum/config/_paths.py +2 -0
  24. meerschaum/config/_sync.py +2 -3
  25. meerschaum/config/_version.py +1 -1
  26. meerschaum/config/stack/__init__.py +6 -7
  27. meerschaum/config/stack/grafana/__init__.py +1 -1
  28. meerschaum/config/static/__init__.py +4 -1
  29. meerschaum/connectors/__init__.py +2 -0
  30. meerschaum/connectors/api/_plugins.py +2 -1
  31. meerschaum/connectors/sql/SQLConnector.py +4 -2
  32. meerschaum/connectors/sql/_create_engine.py +9 -9
  33. meerschaum/connectors/sql/_instance.py +3 -1
  34. meerschaum/connectors/sql/_pipes.py +54 -38
  35. meerschaum/connectors/sql/_plugins.py +0 -2
  36. meerschaum/connectors/sql/_sql.py +7 -9
  37. meerschaum/core/User/_User.py +158 -16
  38. meerschaum/core/User/__init__.py +1 -1
  39. meerschaum/plugins/_Plugin.py +12 -3
  40. meerschaum/plugins/__init__.py +23 -1
  41. meerschaum/utils/daemon/Daemon.py +89 -36
  42. meerschaum/utils/daemon/FileDescriptorInterceptor.py +140 -0
  43. meerschaum/utils/daemon/RotatingFile.py +130 -14
  44. meerschaum/utils/daemon/__init__.py +3 -0
  45. meerschaum/utils/dtypes/__init__.py +9 -5
  46. meerschaum/utils/packages/__init__.py +21 -5
  47. meerschaum/utils/packages/_packages.py +18 -20
  48. meerschaum/utils/process.py +13 -10
  49. meerschaum/utils/schedule.py +276 -30
  50. meerschaum/utils/threading.py +1 -0
  51. meerschaum/utils/typing.py +1 -1
  52. {meerschaum-2.1.7.dist-info → meerschaum-2.2.0.dist-info}/METADATA +59 -62
  53. {meerschaum-2.1.7.dist-info → meerschaum-2.2.0.dist-info}/RECORD +59 -57
  54. {meerschaum-2.1.7.dist-info → meerschaum-2.2.0.dist-info}/WHEEL +1 -1
  55. {meerschaum-2.1.7.dist-info → meerschaum-2.2.0.dist-info}/LICENSE +0 -0
  56. {meerschaum-2.1.7.dist-info → meerschaum-2.2.0.dist-info}/NOTICE +0 -0
  57. {meerschaum-2.1.7.dist-info → meerschaum-2.2.0.dist-info}/entry_points.txt +0 -0
  58. {meerschaum-2.1.7.dist-info → meerschaum-2.2.0.dist-info}/top_level.txt +0 -0
  59. {meerschaum-2.1.7.dist-info → meerschaum-2.2.0.dist-info}/zip-safe +0 -0
@@ -70,6 +70,9 @@ window.addEventListener(
70
70
  if (!fl){ continue; }
71
71
  fl_val = event.data['input_flags_texts'][index];
72
72
  if (!fl_val){ continue; }
73
+ if (fl_val.includes(' ')){
74
+ fl_val = "'" + fl_val + "'";
75
+ }
73
76
  flags_str += " " + fl + " " + fl_val;
74
77
  }
75
78
 
@@ -39,10 +39,11 @@ def login(
39
39
  else (data.username, data.password)
40
40
  ) if not no_auth else ('no-auth', 'no-auth')
41
41
 
42
- from meerschaum.core.User._User import get_pwd_context
42
+ from meerschaum.core.User._User import verify_password
43
43
  user = User(username, password)
44
- correct_password = no_auth or get_pwd_context().verify(
45
- password, get_api_connector().get_user_password_hash(user, debug=debug)
44
+ correct_password = no_auth or verify_password(
45
+ password,
46
+ get_api_connector().get_user_password_hash(user, debug=debug)
46
47
  )
47
48
  if not correct_password:
48
49
  raise InvalidCredentialsException
@@ -51,7 +52,7 @@ def login(
51
52
  expires_delta = timedelta(minutes=expires_minutes)
52
53
  expires_dt = datetime.now(timezone.utc).replace(tzinfo=None) + expires_delta
53
54
  access_token = manager.create_access_token(
54
- data = dict(sub=username),
55
+ data = {'sub': username},
55
56
  expires = expires_delta
56
57
  )
57
58
  return {
@@ -90,18 +90,21 @@ def register_plugin(
90
90
  pass
91
91
 
92
92
  plugin = Plugin(name, version=version, attributes=attributes)
93
+ if curr_user is None:
94
+ return (
95
+ False,
96
+ "Cannot register a plugin without logging in (are you running with `--insecure`)?"
97
+ )
98
+
93
99
  if curr_user is not None:
94
100
  plugin_user_id = get_api_connector().get_plugin_user_id(plugin)
95
101
  curr_user_id = get_api_connector().get_user_id(curr_user) if curr_user is not None else -1
96
102
  if plugin_user_id is not None and plugin_user_id != curr_user_id:
97
103
  return False, f"User '{curr_user.username}' cannot edit plugin '{plugin}'."
98
104
  plugin.user_id = curr_user_id
99
- else:
100
- plugin.user_id = -1
101
105
 
102
106
  success, msg = get_api_connector().register_plugin(plugin, make_archive=False, debug=debug)
103
107
 
104
- ### TODO delete and install new version of plugin on success
105
108
  if success:
106
109
  archive_path = plugin.archive_path
107
110
  temp_archive_path = pathlib.Path(str(archive_path) + '.tmp')
@@ -0,0 +1,11 @@
1
+ #! /usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ # vim:fenc=utf-8
4
+
5
+ """
6
+ Define default configuration for the Dash application.
7
+ """
8
+
9
+ default_dash_config = {
10
+ 'max_num_pipes_cards': 20,
11
+ }
@@ -44,6 +44,7 @@ default_meerschaum_config = {
44
44
  },
45
45
  'local': {
46
46
  'host': 'localhost',
47
+ 'port': 8000,
47
48
  },
48
49
  'mrsm': {
49
50
  'host': 'api.mrsm.io',
@@ -151,7 +152,6 @@ default_config['pipes'] = default_pipes_config
151
152
  default_config['plugins'] = default_plugins_config
152
153
  from meerschaum.config._jobs import default_jobs_config
153
154
  default_config['jobs'] = default_jobs_config
154
- # default_config['experimental'] = default_experimental_config
155
155
  ### add configs from other packages
156
156
  try:
157
157
  import meerschaum.config.stack
@@ -160,6 +160,8 @@ except ImportError as e:
160
160
  finally:
161
161
  from meerschaum.config.stack import default_stack_config
162
162
  default_config['stack'] = default_stack_config
163
+ from meerschaum.config._dash import default_dash_config
164
+ default_config['dash'] = default_dash_config
163
165
 
164
166
  default_header_comment = """
165
167
  #####################################################################
@@ -9,13 +9,22 @@ Default configuration for jobs.
9
9
  default_jobs_config = {
10
10
  'timeout_seconds': 8,
11
11
  'check_timeout_interval_seconds': 0.1,
12
- 'logs' : {
12
+ 'terminal': {
13
+ 'lines': 40,
14
+ 'columns': 70,
15
+ },
16
+ 'logs': {
17
+ 'timestamps': {
18
+ 'enabled': True,
19
+ 'format': '%Y-%m-%d %H:%M',
20
+ 'follow_format': '%H:%M',
21
+ },
13
22
  'num_files_to_keep': 5,
14
23
  'max_file_size': 100_000,
15
24
  'lines_to_show': 30,
16
- 'refresh_files_seconds': 5.0,
17
- 'min_buffer_len': 15,
18
- 'colors' : [
25
+ 'refresh_files_seconds': 5,
26
+ 'min_buffer_len': 5,
27
+ 'colors': [
19
28
  'cyan',
20
29
  'magenta',
21
30
  'orange3',
@@ -129,6 +129,7 @@ paths = {
129
129
 
130
130
  'PLUGINS_RESOURCES_PATH' : ('{INTERNAL_RESOURCES_PATH}', 'plugins'),
131
131
  'PLUGINS_INTERNAL_LOCK_PATH' : ('{INTERNAL_RESOURCES_PATH}', 'plugins.lock'),
132
+ 'PLUGINS_PACKAGES_INTERNAL_PATH' : ('{INTERNAL_RESOURCES_PATH}', 'packaged_plugins'),
132
133
  'PLUGINS_ARCHIVES_RESOURCES_PATH': ('{PLUGINS_RESOURCES_PATH}', '.archives'),
133
134
  'PLUGINS_TEMP_RESOURCES_PATH' : ('{PLUGINS_RESOURCES_PATH}', '.tmp'),
134
135
  'PLUGINS_INIT_PATH' : ('{PLUGINS_RESOURCES_PATH}', '__init__.py'),
@@ -153,6 +154,7 @@ paths = {
153
154
 
154
155
  'DAEMON_RESOURCES_PATH' : ('{ROOT_DIR_PATH}', 'jobs'),
155
156
  'LOGS_RESOURCES_PATH' : ('{ROOT_DIR_PATH}', 'logs'),
157
+ 'DAEMON_ERROR_LOG_PATH' : ('{ROOT_DIR_PATH}', 'daemon_errors.log'),
156
158
  }
157
159
 
158
160
  def set_root(root: Union[Path, str]):
@@ -52,7 +52,7 @@ def sync_yaml_configs(
52
52
  if not path.exists():
53
53
  return "", {}
54
54
  header_comment = ""
55
- with open(path, 'r') as f:
55
+ with open(path, 'r', encoding='utf-8') as f:
56
56
  if _yaml is not None:
57
57
  config = yaml.load(f)
58
58
  else:
@@ -84,7 +84,7 @@ def sync_yaml_configs(
84
84
  new_path = sub_path
85
85
 
86
86
  ### write changes
87
- with open(new_path, 'w+') as f:
87
+ with open(new_path, 'w+', encoding='utf-8') as f:
88
88
  f.write(new_header)
89
89
  f.write(new_config_text)
90
90
  if permissions is not None:
@@ -133,4 +133,3 @@ def sync_files(keys: Optional[List[str]] = None):
133
133
  for k in keys:
134
134
  if k in key_functions:
135
135
  key_functions[k]()
136
-
@@ -2,4 +2,4 @@
2
2
  Specify the Meerschaum release version.
3
3
  """
4
4
 
5
- __version__ = "2.1.7"
5
+ __version__ = "2.2.0"
@@ -33,7 +33,7 @@ api_host = "api"
33
33
 
34
34
  env_dict = {
35
35
  'COMPOSE_PROJECT_NAME' : 'mrsm',
36
- 'TIMESCALEDB_VERSION' : 'latest-pg15-oss',
36
+ 'TIMESCALEDB_VERSION' : 'latest-pg16-oss',
37
37
  'POSTGRES_USER' : f'{db_user}',
38
38
  'POSTGRES_PASSWORD' : f'{db_pass}',
39
39
  'POSTGRES_DB' : f'{db_base}',
@@ -97,7 +97,6 @@ compose_header = """
97
97
 
98
98
 
99
99
  default_docker_compose_config = {
100
- 'version': '3.9',
101
100
  'services': {
102
101
  'db': {
103
102
  'environment': {
@@ -233,11 +232,11 @@ NECESSARY_FILES = [STACK_COMPOSE_PATH, GRAFANA_DATASOURCE_PATH, GRAFANA_DASHBOAR
233
232
  def get_necessary_files():
234
233
  from meerschaum.config import get_config
235
234
  return {
236
- STACK_COMPOSE_PATH : (
235
+ STACK_COMPOSE_PATH: (
237
236
  get_config('stack', STACK_COMPOSE_FILENAME, substitute=True), compose_header
238
237
  ),
239
- GRAFANA_DATASOURCE_PATH : get_config('stack', 'grafana', 'datasource', substitute=True),
240
- GRAFANA_DASHBOARD_PATH : get_config('stack', 'grafana', 'dashboard', substitute=True),
238
+ GRAFANA_DATASOURCE_PATH: get_config('stack', 'grafana', 'datasource', substitute=True),
239
+ GRAFANA_DASHBOARD_PATH: get_config('stack', 'grafana', 'dashboard', substitute=True),
241
240
  }
242
241
 
243
242
 
@@ -251,8 +250,8 @@ def write_stack(
251
250
  return sync_files(['stack'])
252
251
 
253
252
  def edit_stack(
254
- action : Optional[List[str]] = None,
255
- debug : bool = False,
253
+ action: Optional[List[str]] = None,
254
+ debug: bool = False,
256
255
  **kw
257
256
  ):
258
257
  """Open docker-compose.yaml or .env for editing."""
@@ -17,7 +17,7 @@ default_datasource = {
17
17
  'type': 'postgres',
18
18
  'jsonData': {
19
19
  'sslmode': 'disable',
20
- 'postgresVersion': 1400,
20
+ 'postgresVersion': 1500,
21
21
  'timescaledb': True,
22
22
  },
23
23
  'user': db_user,
@@ -60,6 +60,7 @@ STATIC_CONFIG: Dict[str, Any] = {
60
60
  'gid': 'MRSM_GID',
61
61
  'noask': 'MRSM_NOASK',
62
62
  'id': 'MRSM_SERVER_ID',
63
+ 'daemon_id': 'MRSM_DAEMON_ID',
63
64
  'uri_regex': r'MRSM_([a-zA-Z0-9]*)_(\d*[a-zA-Z][a-zA-Z0-9-_+]*$)',
64
65
  'prefix': 'MRSM_',
65
66
  },
@@ -103,11 +104,13 @@ STATIC_CONFIG: Dict[str, Any] = {
103
104
  },
104
105
  'users': {
105
106
  'password_hash': {
107
+ 'algorithm_name': 'sha256',
108
+ 'salt_bytes': 16,
106
109
  'schemes': [
107
110
  'pbkdf2_sha256',
108
111
  ],
109
112
  'default': 'pbkdf2_sha256',
110
- 'pbkdf2_sha256__default_rounds': 30000,
113
+ 'pbkdf2_sha256__default_rounds': 3_000_000,
111
114
  },
112
115
  'min_username_length': 1,
113
116
  'max_username_length': 26,
@@ -317,6 +317,8 @@ def load_plugin_connectors():
317
317
  from meerschaum.plugins import get_plugins, import_plugins
318
318
  to_import = []
319
319
  for plugin in get_plugins():
320
+ if plugin is None:
321
+ continue
320
322
  with open(plugin.__file__, encoding='utf-8') as f:
321
323
  text = f.read()
322
324
  if 'make_connector' in text:
@@ -49,6 +49,7 @@ def register_plugin(
49
49
  def install_plugin(
50
50
  self,
51
51
  name: str,
52
+ skip_deps: bool = False,
52
53
  force: bool = False,
53
54
  debug: bool = False
54
55
  ) -> SuccessTuple:
@@ -78,7 +79,7 @@ def install_plugin(
78
79
  success, msg = False, fail_msg
79
80
  return success, msg
80
81
  plugin = Plugin(name, archive_path=archive_path, repo_connector=self)
81
- return plugin.install(force=force, debug=debug)
82
+ return plugin.install(skip_deps=skip_deps, force=force, debug=debug)
82
83
 
83
84
  def get_plugins(
84
85
  self,
@@ -128,8 +128,10 @@ class SQLConnector(Connector):
128
128
  """
129
129
  if 'uri' in kw:
130
130
  uri = kw['uri']
131
- if uri.startswith('postgres://'):
132
- uri = uri.replace('postgres://', 'postgresql://', 1)
131
+ if uri.startswith('postgres') and not uri.startswith('postgresql'):
132
+ uri = uri.replace('postgres', 'postgresql', 1)
133
+ if uri.startswith('postgresql') and not uri.startswith('postgresql+'):
134
+ uri = uri.replace('postgresql://', 'postgresql+psycopg', 1)
133
135
  if uri.startswith('timescaledb://'):
134
136
  uri = uri.replace('timescaledb://', 'postgresql://', 1)
135
137
  flavor = 'timescaledb'
@@ -28,7 +28,7 @@ default_create_engine_args = {
28
28
  }
29
29
  flavor_configs = {
30
30
  'timescaledb' : {
31
- 'engine' : 'postgresql',
31
+ 'engine' : 'postgresql+psycopg',
32
32
  'create_engine' : default_create_engine_args,
33
33
  'omit_create_engine': {'method',},
34
34
  'to_sql' : {},
@@ -38,7 +38,7 @@ flavor_configs = {
38
38
  },
39
39
  },
40
40
  'postgresql' : {
41
- 'engine' : 'postgresql',
41
+ 'engine' : 'postgresql+psycopg',
42
42
  'create_engine' : default_create_engine_args,
43
43
  'omit_create_engine': {'method',},
44
44
  'to_sql' : {},
@@ -48,7 +48,7 @@ flavor_configs = {
48
48
  },
49
49
  },
50
50
  'citus' : {
51
- 'engine' : 'postgresql',
51
+ 'engine' : 'postgresql+psycopg',
52
52
  'create_engine' : default_create_engine_args,
53
53
  'omit_create_engine': {'method',},
54
54
  'to_sql' : {},
@@ -154,10 +154,10 @@ install_flavor_drivers = {
154
154
  'duckdb': ['duckdb', 'duckdb_engine'],
155
155
  'mysql': ['pymysql'],
156
156
  'mariadb': ['pymysql'],
157
- 'timescaledb': ['psycopg2'],
158
- 'postgresql': ['psycopg2'],
159
- 'citus': ['psycopg2'],
160
- 'cockroachdb': ['psycopg2', 'sqlalchemy_cockroachdb', 'sqlalchemy_cockroachdb.psycopg2'],
157
+ 'timescaledb': ['psycopg'],
158
+ 'postgresql': ['psycopg'],
159
+ 'citus': ['psycopg'],
160
+ 'cockroachdb': ['psycopg', 'sqlalchemy_cockroachdb', 'sqlalchemy_cockroachdb.psycopg'],
161
161
  'mssql': ['pyodbc'],
162
162
  'oracle': ['cx_Oracle'],
163
163
  }
@@ -165,7 +165,7 @@ require_patching_flavors = {'cockroachdb': [('sqlalchemy-cockroachdb', 'sqlalche
165
165
 
166
166
  flavor_dialects = {
167
167
  'cockroachdb': (
168
- 'cockroachdb', 'sqlalchemy_cockroachdb.psycopg2', 'CockroachDBDialect_psycopg2'
168
+ 'cockroachdb', 'sqlalchemy_cockroachdb.psycopg', 'CockroachDBDialect_psycopg'
169
169
  ),
170
170
  'duckdb': ('duckdb', 'duckdb_engine', 'Dialect'),
171
171
  }
@@ -242,7 +242,7 @@ def create_engine(
242
242
 
243
243
  ### Sometimes the timescaledb:// flavor can slip in.
244
244
  if _uri and self.flavor in ('timescaledb',) and self.flavor in _uri:
245
- engine_str = engine_str.replace(f'{self.flavor}://', 'postgresql://')
245
+ engine_str = engine_str.replace(f'{self.flavor}', 'postgresql', 1)
246
246
 
247
247
  if debug:
248
248
  dprint(
@@ -155,7 +155,9 @@ def _drop_old_temporary_tables(
155
155
  temp_tables_table = get_tables(mrsm_instance=self, create=False, debug=debug)['temp_tables']
156
156
  last_check = getattr(self, '_stale_temporary_tables_check_timestamp', 0)
157
157
  now_ts = time.perf_counter()
158
- if refresh or not last_check or (now_ts - last_check) > 60:
158
+ if not last_check:
159
+ self._stale_temporary_tables_check_timestamp = 0
160
+ if refresh or (now_ts - last_check) < 60:
159
161
  self._stale_temporary_tables_check_timestamp = now_ts
160
162
  return self._drop_temporary_tables(debug=debug)
161
163
 
@@ -752,7 +752,7 @@ def get_pipe_data(
752
752
  debug = debug,
753
753
  **kw
754
754
  )
755
-
755
+
756
756
  if is_dask:
757
757
  index_col = pipe.columns.get('datetime', None)
758
758
  kw['index_col'] = index_col
@@ -763,6 +763,7 @@ def get_pipe_data(
763
763
  if typ == 'numeric' and col in dtypes
764
764
  ]
765
765
  kw['coerce_float'] = kw.get('coerce_float', (len(numeric_columns) == 0))
766
+
766
767
  df = self.read(
767
768
  query,
768
769
  dtype = dtypes,
@@ -1478,43 +1479,11 @@ def sync_pipe_inplace(
1478
1479
  from meerschaum.utils.misc import generate_password
1479
1480
  from meerschaum.utils.debug import dprint
1480
1481
 
1481
- sqlalchemy, sqlalchemy_orm = mrsm.attempt_import('sqlalchemy', 'sqlalchemy.orm')
1482
- metadef = self.get_pipe_metadef(
1483
- pipe,
1484
- params = params,
1485
- begin = begin,
1486
- end = end,
1487
- check_existing = check_existing,
1488
- debug = debug,
1489
- )
1490
- pipe_name = sql_item_name(pipe.target, self.flavor, self.get_pipe_schema(pipe))
1491
- upsert = pipe.parameters.get('upsert', False) and f'{self.flavor}-upsert' in update_queries
1492
- internal_schema = self.internal_schema
1493
- database = getattr(self, 'database', self.parse_uri(self.URI).get('database', None))
1494
-
1495
- if not pipe.exists(debug=debug):
1496
- create_pipe_query = get_create_table_query(
1497
- metadef,
1498
- pipe.target,
1499
- self.flavor,
1500
- schema = self.get_pipe_schema(pipe),
1501
- )
1502
- result = self.exec(create_pipe_query, debug=debug)
1503
- if result is None:
1504
- return False, f"Could not insert new data into {pipe} from its SQL query definition."
1505
- if not self.create_indices(pipe, debug=debug):
1506
- warn(f"Failed to create indices for {pipe}. Continuing...")
1507
-
1508
- rowcount = pipe.get_rowcount(debug=debug)
1509
- return True, f"Inserted {rowcount}, updated 0 rows."
1510
-
1511
- session = sqlalchemy_orm.Session(self.engine)
1512
- connectable = session if self.flavor != 'duckdb' else self
1513
-
1514
1482
  transact_id = generate_password(3)
1515
1483
  def get_temp_table_name(label: str) -> str:
1516
1484
  return '-' + transact_id + '_' + label + '_' + pipe.target
1517
1485
 
1486
+ internal_schema = self.internal_schema
1518
1487
  temp_table_roots = ['backtrack', 'new', 'delta', 'joined', 'unseen', 'update']
1519
1488
  temp_tables = {
1520
1489
  table_root: get_temp_table_name(table_root)
@@ -1528,6 +1497,17 @@ def sync_pipe_inplace(
1528
1497
  )
1529
1498
  for table_root, table_name_raw in temp_tables.items()
1530
1499
  }
1500
+ metadef = self.get_pipe_metadef(
1501
+ pipe,
1502
+ params = params,
1503
+ begin = begin,
1504
+ end = end,
1505
+ check_existing = check_existing,
1506
+ debug = debug,
1507
+ )
1508
+ pipe_name = sql_item_name(pipe.target, self.flavor, self.get_pipe_schema(pipe))
1509
+ upsert = pipe.parameters.get('upsert', False) and f'{self.flavor}-upsert' in update_queries
1510
+ database = getattr(self, 'database', self.parse_uri(self.URI).get('database', None))
1531
1511
 
1532
1512
  def clean_up_temp_tables(ready_to_drop: bool = False):
1533
1513
  log_success, log_msg = self._log_temporary_tables_creation(
@@ -1541,6 +1521,36 @@ def sync_pipe_inplace(
1541
1521
  )
1542
1522
  if not log_success:
1543
1523
  warn(log_msg)
1524
+ drop_stale_success, drop_stale_msg = self._drop_old_temporary_tables(
1525
+ refresh = False,
1526
+ debug = debug,
1527
+ )
1528
+ if not drop_stale_success:
1529
+ warn(drop_stale_msg)
1530
+ return drop_stale_success, drop_stale_msg
1531
+
1532
+ sqlalchemy, sqlalchemy_orm = mrsm.attempt_import('sqlalchemy', 'sqlalchemy.orm')
1533
+ if not pipe.exists(debug=debug):
1534
+ create_pipe_query = get_create_table_query(
1535
+ metadef,
1536
+ pipe.target,
1537
+ self.flavor,
1538
+ schema = self.get_pipe_schema(pipe),
1539
+ )
1540
+ result = self.exec(create_pipe_query, debug=debug)
1541
+ if result is None:
1542
+ _ = clean_up_temp_tables()
1543
+ return False, f"Could not insert new data into {pipe} from its SQL query definition."
1544
+
1545
+ if not self.create_indices(pipe, debug=debug):
1546
+ warn(f"Failed to create indices for {pipe}. Continuing...")
1547
+
1548
+ rowcount = pipe.get_rowcount(debug=debug)
1549
+ _ = clean_up_temp_tables()
1550
+ return True, f"Inserted {rowcount}, updated 0 rows."
1551
+
1552
+ session = sqlalchemy_orm.Session(self.engine)
1553
+ connectable = session if self.flavor != 'duckdb' else self
1544
1554
 
1545
1555
  create_new_query = get_create_table_query(
1546
1556
  metadef,
@@ -1908,10 +1918,6 @@ def sync_pipe_inplace(
1908
1918
  )
1909
1919
  _ = clean_up_temp_tables(ready_to_drop=True)
1910
1920
 
1911
- drop_stale_success, drop_stale_msg = self._drop_old_temporary_tables(refresh=False, debug=debug)
1912
- if not drop_stale_success:
1913
- warn(drop_stale_msg)
1914
-
1915
1921
  return True, msg
1916
1922
 
1917
1923
 
@@ -2372,6 +2378,16 @@ def get_pipe_columns_types(
2372
2378
  """
2373
2379
  if not pipe.exists(debug=debug):
2374
2380
  return {}
2381
+
2382
+ if self.flavor == 'duckdb':
2383
+ from meerschaum.utils.sql import get_table_cols_types
2384
+ return get_table_cols_types(
2385
+ pipe.target,
2386
+ self,
2387
+ flavor = self.flavor,
2388
+ schema = self.schema,
2389
+ )
2390
+
2375
2391
  table_columns = {}
2376
2392
  try:
2377
2393
  pipe_table = self.get_pipe_table(pipe, debug=debug)
@@ -108,9 +108,7 @@ def get_plugin_version(
108
108
  plugins_tbl = get_tables(mrsm_instance=self, debug=debug)['plugins']
109
109
  from meerschaum.utils.packages import attempt_import
110
110
  sqlalchemy = attempt_import('sqlalchemy')
111
-
112
111
  query = sqlalchemy.select(plugins_tbl.c.version).where(plugins_tbl.c.plugin_name == plugin.name)
113
-
114
112
  return self.value(query, debug=debug)
115
113
 
116
114
  def get_plugin_user_id(
@@ -943,17 +943,15 @@ def psql_insert_copy(
943
943
  ) for row in data_iter
944
944
  )
945
945
 
946
+ table_name = sql_item_name(table.name, 'postgresql', table.schema)
947
+ columns = ', '.join(f'"{k}"' for k in keys)
948
+ sql = f"COPY {table_name} ({columns}) FROM STDIN WITH CSV NULL '\\N'"
949
+
946
950
  dbapi_conn = conn.connection
947
951
  with dbapi_conn.cursor() as cur:
948
- s_buf = StringIO()
949
- writer = csv.writer(s_buf)
950
- writer.writerows(data_iter)
951
- s_buf.seek(0)
952
-
953
- columns = ', '.join(f'"{k}"' for k in keys)
954
- table_name = sql_item_name(table.name, 'postgresql', table.schema)
955
- sql = f"COPY {table_name} ({columns}) FROM STDIN WITH CSV NULL '\\N'"
956
- cur.copy_expert(sql=sql, file=s_buf)
952
+ with cur.copy(sql) as copy:
953
+ writer = csv.writer(copy)
954
+ writer.writerows(data_iter)
957
955
 
958
956
 
959
957
  def format_sql_query_for_dask(query: str) -> 'sqlalchemy.sql.selectable.Select':