kaqing 2.0.188__py3-none-any.whl → 2.0.211__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kaqing might be problematic. Click here for more details.

Files changed (78) hide show
  1. adam/batch.py +7 -7
  2. adam/commands/app/utils_app.py +1 -1
  3. adam/commands/bash/bash.py +1 -1
  4. adam/commands/bash/utils_bash.py +1 -1
  5. adam/commands/cassandra/__init__.py +0 -0
  6. adam/commands/command.py +1 -1
  7. adam/commands/commands_utils.py +8 -13
  8. adam/commands/{alter_tables.py → cql/alter_tables.py} +1 -1
  9. adam/commands/cql/completions_c.py +1 -0
  10. adam/commands/cql/utils_cql.py +14 -13
  11. adam/commands/debug/__init__.py +0 -0
  12. adam/commands/debug/debug.py +22 -0
  13. adam/commands/debug/debug_completes.py +35 -0
  14. adam/commands/debug/debug_timings.py +35 -0
  15. adam/commands/devices/device.py +1 -1
  16. adam/commands/devices/devices.py +1 -1
  17. adam/commands/download_cassandra_log.py +45 -0
  18. adam/commands/export/export_databases.py +13 -8
  19. adam/commands/export/export_sessions.py +12 -11
  20. adam/commands/export/exporter.py +140 -53
  21. adam/commands/export/import_session.py +0 -4
  22. adam/commands/export/importer.py +11 -11
  23. adam/commands/export/importer_athena.py +15 -6
  24. adam/commands/export/importer_sqlite.py +19 -8
  25. adam/commands/export/utils_export.py +37 -15
  26. adam/commands/generate_report.py +52 -0
  27. adam/commands/medusa/medusa_restore.py +0 -16
  28. adam/commands/nodetool.py +1 -1
  29. adam/commands/os/__init__.py +0 -0
  30. adam/commands/postgres/postgres_databases.py +2 -3
  31. adam/commands/postgres/postgres_ls.py +1 -1
  32. adam/commands/postgres/utils_postgres.py +2 -1
  33. adam/commands/preview_table.py +1 -1
  34. adam/commands/restart_cluster.py +47 -0
  35. adam/commands/restart_node.py +51 -0
  36. adam/commands/restart_nodes.py +47 -0
  37. adam/commands/show/show_cassandra_status.py +3 -10
  38. adam/commands/show/show_cli_commands.py +1 -1
  39. adam/commands/show/show_processes.py +1 -1
  40. adam/commands/show/show_storage.py +2 -1
  41. adam/config.py +4 -6
  42. adam/embedded_params.py +1 -1
  43. adam/repl.py +5 -3
  44. adam/repl_commands.py +23 -17
  45. adam/repl_session.py +4 -3
  46. adam/repl_state.py +6 -0
  47. adam/sql/async_executor.py +44 -0
  48. adam/sql/lark_completer.py +6 -4
  49. adam/sql/qingl.lark +1076 -0
  50. adam/sso/cred_cache.py +2 -5
  51. adam/utils.py +206 -83
  52. adam/utils_k8s/app_clusters.py +11 -4
  53. adam/utils_k8s/app_pods.py +10 -5
  54. adam/utils_k8s/cassandra_clusters.py +8 -4
  55. adam/utils_k8s/cassandra_nodes.py +14 -5
  56. adam/utils_k8s/kube_context.py +1 -4
  57. adam/{pod_exec_result.py → utils_k8s/pod_exec_result.py} +8 -2
  58. adam/utils_k8s/pods.py +83 -24
  59. adam/utils_local.py +78 -2
  60. adam/utils_repl/repl_completer.py +10 -89
  61. adam/utils_sqlite.py +3 -8
  62. adam/version.py +1 -1
  63. {kaqing-2.0.188.dist-info → kaqing-2.0.211.dist-info}/METADATA +1 -1
  64. {kaqing-2.0.188.dist-info → kaqing-2.0.211.dist-info}/RECORD +67 -65
  65. adam/commands/cat.py +0 -36
  66. adam/commands/cd.py +0 -41
  67. adam/commands/download_file.py +0 -47
  68. adam/commands/find_files.py +0 -51
  69. adam/commands/find_processes.py +0 -76
  70. adam/commands/head.py +0 -36
  71. adam/commands/logs.py +0 -37
  72. adam/commands/ls.py +0 -41
  73. adam/commands/report.py +0 -61
  74. adam/commands/restart.py +0 -60
  75. adam/commands/shell.py +0 -41
  76. {kaqing-2.0.188.dist-info → kaqing-2.0.211.dist-info}/WHEEL +0 -0
  77. {kaqing-2.0.188.dist-info → kaqing-2.0.211.dist-info}/entry_points.txt +0 -0
  78. {kaqing-2.0.188.dist-info → kaqing-2.0.211.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,7 @@
1
1
  from datetime import datetime
2
+ import os
2
3
  import time
4
+ import traceback
3
5
 
4
6
  from adam.commands.command import InvalidArgumentsException
5
7
  from adam.commands.cql.utils_cql import cassandra_table_names, run_cql, table_spec
@@ -8,13 +10,12 @@ from adam.commands.export.export_sessions import ExportSessions
8
10
  from adam.commands.export.importer import Importer
9
11
  from adam.commands.export.importer_athena import AthenaImporter
10
12
  from adam.commands.export.importer_sqlite import SqliteImporter
11
- from adam.commands.export.utils_export import ExportSpec, ExportTableStatus, ExportTableSpec, ImportSpec, csv_dir, find_files, state_with_pod
13
+ from adam.commands.export.utils_export import ExportSpec, ExportTableStatus, ExportTableSpec, ImportSpec, csv_dir, export_log_dir, find_files, os_system_exec, state_with_pod
12
14
  from adam.config import Config
13
- from adam.pod_exec_result import PodExecResult
15
+ from adam.repl_session import ReplSession
14
16
  from adam.repl_state import ReplState
15
- from adam.utils import debug, log, parallelize, log2, ing, log_exc
17
+ from adam.utils import debug, kaqing_log_file_name, log, offload, parallelize, log2, ing, log_exc
16
18
  from adam.utils_k8s.cassandra_nodes import CassandraNodes
17
- from adam.utils_k8s.pods import log_prefix
18
19
 
19
20
  class Exporter:
20
21
  def export_tables(args: list[str], state: ReplState, export_only: bool = False, max_workers = 0) -> tuple[list[str], ExportSpec]:
@@ -133,11 +134,9 @@ class Exporter:
133
134
 
134
135
  prefix = Importer.prefix_from_importer(spec.importer)
135
136
  if spec.session:
136
- spec.session = f'{prefix}{spec.session[1:]}'
137
+ state.export_session = f'{prefix}{spec.session[1:]}'
137
138
  else:
138
- spec.session = f'{prefix}{datetime.now().strftime("%Y%m%d%H%M%S")[3:]}'
139
-
140
- state.export_session = spec.session
139
+ state.export_session = f'{prefix}{datetime.now().strftime("%Y%m%d%H%M%S")[3:]}'
141
140
 
142
141
  return spec
143
142
 
@@ -154,89 +153,160 @@ class Exporter:
154
153
  if export_state == 'init':
155
154
  CassandraNodes.exec(state.pod, state.namespace, f'rm -rf {csv_dir()}/{spec.session}_*', show_out=Config().is_debug(), shell='bash')
156
155
 
157
- action = f'[{spec.session}] Exporting|Exported'
156
+ job_log = kaqing_log_file_name()
157
+
158
+ action = f'[{spec.session}] Triggering export of'
158
159
  if export_state == 'init':
159
160
  action = f'[{spec.session}] Preparing|Prepared'
160
161
  elif export_state == 'import':
161
162
  action = f'[{spec.session}] Importing|Imported'
162
163
 
163
- with parallelize(spec.tables, max_workers, msg=action + ' {size} Cassandra tables') as exec:
164
- return exec.map(lambda table: Exporter.export_table(table, state, spec.session, spec.importer, export_only, len(spec.tables) > 1, consistency=spec.consistency, export_state=export_state)), spec
165
-
166
- def export_table(spec: ExportTableSpec, state: ReplState, session: str, importer: str, export_only = False, multi_tables = True, consistency: str = None, export_state=None):
164
+ msg = action + ' {size} Cassandra tables'
165
+
166
+ if export_state != 'init':
167
+ log2(f'[{spec.session}] Logging to {job_log}...')
168
+ ReplSession().append_history(f':cat {job_log}')
169
+
170
+ pod = state.pod
171
+ with parallelize(spec.tables, max_workers, msg=msg, collect=export_state == 'init', name='exporter') as exec:
172
+ return exec.map(lambda table: Exporter.export_table(table,
173
+ state.with_pod(pod),
174
+ spec.session,
175
+ spec.importer,
176
+ export_only,
177
+ len(spec.tables) > 1,
178
+ consistency=spec.consistency,
179
+ export_state=export_state,
180
+ job_log=None if export_state == 'init' else job_log)), spec
181
+
182
+ def export_table(spec: ExportTableSpec,
183
+ state: ReplState,
184
+ session: str,
185
+ importer: str,
186
+ export_only = False,
187
+ multi_tables = True,
188
+ consistency: str = None,
189
+ export_state=None,
190
+ job_log: str = None):
167
191
  s: str = None
168
192
 
169
193
  table, target_table, columns = Exporter.resove_table_n_columns(spec, state, include_ks_in_target=False, importer=importer)
170
194
 
171
- log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
195
+ log_file = f'{export_log_dir()}/{session}_{spec.keyspace}.{target_table}.log'
172
196
  create_db = not state.export_session
173
197
 
174
198
  if export_state == 'init':
175
199
  Exporter.create_table_log(spec, state, session, table, target_table)
176
200
  return 'table_log_created'
177
201
  else:
178
- if export_state == 'pending_export':
179
- Exporter.export_to_csv(spec, state, session, table, target_table, columns, multi_tables=multi_tables, consistency=consistency)
202
+ try:
203
+ if export_state == 'pending_export':
204
+ Exporter.export_to_csv(spec, state, session, table, target_table, columns, multi_tables=multi_tables, consistency=consistency, job_log=job_log)
180
205
 
181
- log_files: list[str] = find_files(state.pod, state.namespace, f'{log_file}*')
182
- if not log_files:
183
- return s
206
+ log_files: list[str] = find_files(state.pod, state.namespace, f'{log_file}*')
207
+ if not log_files:
208
+ return s
184
209
 
185
- log_file = log_files[0]
210
+ log_file = log_files[0]
186
211
 
187
- status: ExportTableStatus = ExportTableStatus.from_log_file(state.pod, state.namespace, session, log_file)
188
- while status.status != 'done':
189
- if status.status == 'export_in_pregress':
190
- debug('Exporting to CSV is still in progess, sleeping for 1 sec...')
191
- time.sleep(1)
192
- elif status.status == 'exported':
193
- log_file = Exporter.rename_to_pending_import(spec, state, session, target_table)
194
- if importer == 'csv' or export_only:
195
- return 'pending_import'
196
- elif status.status == 'pending_import':
197
- log_file, session = Exporter.import_from_csv(spec, state, session, importer, table, target_table, columns, multi_tables=multi_tables, create_db=create_db)
212
+ status: ExportTableStatus = ExportTableStatus.from_log_file(state.pod, state.namespace, session, log_file)
198
213
 
199
- status = ExportTableStatus.from_log_file(state.pod, state.namespace, session, log_file)
214
+ with offload(name='exporter') as exec:
215
+ ctx = ExportTableContext(spec, state, session, importer, export_only, multi_tables, table, target_table, columns, create_db, log_file, status, job_log)
216
+ exec.submit(lambda: Exporter.export_loop(ctx))
217
+ # Exporter.export_loop(ExportTableContext(spec, state, session, importer, export_only, multi_tables, table, target_table, columns, create_db, log_file, status))
218
+ except:
219
+ traceback.print_exc()
200
220
 
201
221
  return status.status
202
222
 
223
+ def export_loop(ctx: 'ExportTableContext'):
224
+ try:
225
+ while ctx.status.status != 'done':
226
+ if ctx.status.status == 'export_in_pregress':
227
+ debug('Exporting to CSV is still in progess, sleeping for 1 sec...')
228
+ time.sleep(1)
229
+ elif ctx.status.status == 'exported':
230
+ ctx.log_file = Exporter.rename_to_pending_import(ctx.spec, ctx.state, ctx.session, ctx.target_table)
231
+ ExportSessions.clear_export_session_cache()
232
+ if ctx.importer == 'csv' or ctx.export_only:
233
+ return 'pending_import'
234
+ elif ctx.status.status == 'pending_import':
235
+ ctx.log_file, ctx.session = Exporter.import_from_csv(ctx.spec,
236
+ ctx.state,
237
+ ctx.session,
238
+ ctx.importer,
239
+ ctx.table,
240
+ ctx.target_table,
241
+ ctx.columns,
242
+ multi_tables=ctx.multi_tables,
243
+ create_db=ctx.create_db,
244
+ job_log=ctx.f)
245
+
246
+ ctx.status = ExportTableStatus.from_log_file(ctx.state.pod, ctx.state.namespace, ctx.session, ctx.log_file)
247
+
248
+ return ctx.status.status
249
+ except:
250
+ traceback.print_exc()
251
+
203
252
  def create_table_log(spec: ExportTableSpec, state: ReplState, session: str, table: str, target_table: str):
204
- log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
253
+ log_file = f'{export_log_dir()}/{session}_{spec.keyspace}.{target_table}.log'
254
+ dir = os.path.dirname(log_file)
205
255
 
206
- CassandraNodes.exec(state.pod, state.namespace, f'rm -f {log_file}* && touch {log_file}', show_out=Config().is_debug(), shell='bash')
256
+ cmd = f'rm -f {log_file}* && mkdir -p {dir} && touch {log_file}'
257
+ os_system_exec(cmd, show_out=Config().is_debug())
207
258
 
208
259
  return table
209
260
 
210
- def export_to_csv(spec: ExportTableSpec, state: ReplState, session: str, table: str, target_table: str, columns: str, multi_tables = True, consistency: str = None):
261
+ def export_to_csv(spec: ExportTableSpec,
262
+ state: ReplState,
263
+ session: str,
264
+ table: str,
265
+ target_table: str,
266
+ columns: str,
267
+ multi_tables = True,
268
+ consistency: str = None,
269
+ job_log: str = None):
211
270
  db = f'{session}_{target_table}'
212
271
 
213
272
  CassandraNodes.exec(state.pod, state.namespace, f'mkdir -p {csv_dir()}/{db}', show_out=Config().is_debug(), shell='bash')
214
273
  csv_file = f'{csv_dir()}/{db}/{table}.csv'
215
- log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
274
+ table_log_file = f'{export_log_dir()}/{session}_{spec.keyspace}.{target_table}.log'
216
275
 
217
276
  suppress_ing_log = Config().is_debug() or multi_tables
218
277
  queries = []
219
278
  if consistency:
220
279
  queries.append(f'CONSISTENCY {consistency}')
221
280
  queries.append(f"COPY {spec.keyspace}.{table}({columns}) TO '{csv_file}' WITH HEADER = TRUE")
222
- r: PodExecResult = ing(
223
- f'[{session}] Dumping table {spec.keyspace}.{table}{f" with consistency {consistency}" if consistency else ""}',
224
- lambda: run_cql(state, ';'.join(queries), show_out=Config().is_debug(), backgrounded=True, log_file=log_file),
225
- suppress_log=suppress_ing_log)
226
281
 
227
- return log_file
282
+ with ing(f'[{session}] Triggering dump of table {spec.keyspace}.{table}{f" with consistency {consistency}" if consistency else ""}',
283
+ suppress_log=suppress_ing_log,
284
+ job_log = job_log):
285
+ run_cql(state, ';'.join(queries), show_out=Config().is_debug(), backgrounded=True, log_file=table_log_file, history=False)
286
+
287
+ return table_log_file
228
288
 
229
289
  def rename_to_pending_import(spec: ExportTableSpec, state: ReplState, session: str, target_table: str):
230
- log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
290
+ log_file = f'{export_log_dir()}/{session}_{spec.keyspace}.{target_table}.log'
231
291
  to = f'{log_file}.pending_import'
232
292
 
233
- CassandraNodes.exec(state.pod, state.namespace, f'mv {log_file} {to}', show_out=Config().is_debug(), shell='bash')
293
+ cmd =f'mv {log_file} {to}'
294
+ os_system_exec(cmd, show_out=Config().is_debug())
234
295
 
235
296
  return to
236
297
 
237
- def import_from_csv(spec: ExportTableSpec, state: ReplState, session: str, importer: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
298
+ def import_from_csv(spec: ExportTableSpec,
299
+ state: ReplState,
300
+ session: str,
301
+ importer: str,
302
+ table: str,
303
+ target_table: str,
304
+ columns: str,
305
+ multi_tables = True,
306
+ create_db = False,
307
+ job_log: str = None):
238
308
  im = AthenaImporter() if importer == 'athena' else SqliteImporter()
239
- return im.import_from_csv(state, session if session else state.export_session, spec.keyspace, table, target_table, columns, multi_tables, create_db)
309
+ return im.import_from_csv(state, session if session else state.export_session, spec.keyspace, table, target_table, columns, multi_tables, create_db, job_log=job_log)
240
310
 
241
311
  def resove_table_n_columns(spec: ExportTableSpec, state: ReplState, include_ks_in_target = False, importer = 'sqlite'):
242
312
  table = spec.table
@@ -262,6 +332,22 @@ class Exporter:
262
332
 
263
333
  return table, target_table, columns
264
334
 
335
+ class ExportTableContext:
336
+ def __init__(self, spec: ExportTableSpec, state: ReplState, session: str, importer: str, export_only = False, multi_tables = True, table: str = None, target_table: str = None, columns: str = None, create_db = False, log_file: str = None, status: ExportTableStatus = None, f: str = None):
337
+ self.spec = spec
338
+ self.state = state
339
+ self.session = session
340
+ self.importer = importer
341
+ self.export_only = export_only
342
+ self.multi_tables = multi_tables
343
+ self.table = table
344
+ self.target_table = target_table
345
+ self.columns = columns
346
+ self.create_db = create_db
347
+ self.log_file = log_file
348
+ self.status = status
349
+ self.f = f
350
+
265
351
  class ExportService:
266
352
  def __init__(self, handler: 'ExporterHandler'):
267
353
  self.handler = handler
@@ -279,16 +365,17 @@ class ExportService:
279
365
 
280
366
  ExportSessions.clear_export_session_cache()
281
367
 
282
- if spec.importer == 'csv' or export_only:
283
- ExportSessions.show_session(state.sts, state.pod, state.namespace, spec.session)
284
- else:
285
- log()
286
- with export_db(state) as dbs:
287
- dbs.show_database()
368
+ # if spec.importer == 'csv' or export_only:
369
+ # ExportSessions.show_session(state.sts, state.pod, state.namespace, spec.session)
370
+ # else:
371
+ # log()
372
+ # with export_db(state) as dbs:
373
+ # dbs.show_database()
288
374
  finally:
375
+ pass
289
376
  # if exporting to csv, do not bind the new session id to repl state
290
- if spec and spec.importer == 'csv':
291
- state.export_session = export_session
377
+ # if spec and spec.importer == 'csv':
378
+ # state.export_session = export_session
292
379
 
293
380
  return state
294
381
 
@@ -34,10 +34,6 @@ class ImportSession(Command):
34
34
  return exporter.import_session(spec)
35
35
 
36
36
  def completion(self, state: ReplState):
37
- # warm up cache
38
- # ExportSessions.export_session_names(state.sts, state.pod, state.namespace)
39
- # ExportSessions.export_session_names(state.sts, state.pod, state.namespace, export_state='pending_import')
40
-
41
37
  return {}
42
38
 
43
39
  def help(self, _: ReplState):
@@ -1,11 +1,9 @@
1
1
  from abc import abstractmethod
2
2
 
3
- from adam.commands.export.utils_export import csv_dir
3
+ from adam.commands.export.utils_export import csv_dir, export_log_dir, os_system_exec
4
4
  from adam.config import Config
5
5
  from adam.repl_state import ReplState
6
6
  from adam.utils import ing
7
- from adam.utils_k8s.cassandra_nodes import CassandraNodes
8
- from adam.utils_k8s.pods import log_prefix
9
7
 
10
8
  class Importer:
11
9
  @abstractmethod
@@ -13,7 +11,7 @@ class Importer:
13
11
  pass
14
12
 
15
13
  @abstractmethod
16
- def import_from_csv(self, state: ReplState, from_session: str, keyspace: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
14
+ def import_from_csv(self, state: ReplState, from_session: str, keyspace: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False, log_file: str = None):
17
15
  pass
18
16
 
19
17
  @abstractmethod
@@ -27,11 +25,12 @@ class Importer:
27
25
  pod = state.pod
28
26
  namespace = state.namespace
29
27
  to_session = state.export_session
30
- log_file = f'{log_prefix()}-{from_session}_{keyspace}.{target_table}.log.pending_import'
28
+ log_file = f'{export_log_dir()}/{from_session}_{keyspace}.{target_table}.log.pending_import'
31
29
 
32
- to = f'{log_prefix()}-{to_session}_{keyspace}.{target_table}.log.done'
30
+ to = f'{export_log_dir()}/{to_session}_{keyspace}.{target_table}.log.done'
33
31
 
34
- CassandraNodes.exec(pod, namespace, f'mv {log_file} {to}', show_out=Config().is_debug(), shell='bash')
32
+ cmd = f'mv {log_file} {to}'
33
+ os_system_exec(cmd, show_out=Config().is_debug())
35
34
 
36
35
  return to, to_session
37
36
 
@@ -41,12 +40,13 @@ class Importer:
41
40
 
42
41
  return session
43
42
 
44
- def remove_csv(self, state: ReplState, from_session: str, table: str, target_table: str, multi_tables = True):
43
+ def remove_csv(self, state: ReplState, from_session: str, table: str, target_table: str, multi_tables = True, job_log: str = None):
45
44
  pod = state.pod
46
45
  namespace = state.namespace
47
46
 
48
- with ing(f'[{from_session}] Cleaning up temporary files', suppress_log=multi_tables):
49
- CassandraNodes.exec(pod, namespace, f'rm -rf {self.csv_file(from_session, table, target_table)}', show_out=Config().is_debug(), shell='bash')
47
+ with ing(f'[{from_session}] Cleaning up temporary files', suppress_log=multi_tables, job_log=job_log):
48
+ cmd = f'rm -rf {self.csv_file(from_session, table, target_table)}'
49
+ os_system_exec(cmd, show_out=Config().is_debug())
50
50
 
51
51
  def db(self, session: str, keyspace: str):
52
52
  return f'{session}_{keyspace}'
@@ -78,4 +78,4 @@ class Importer:
78
78
  elif session.startswith('e'):
79
79
  importer = 'athena'
80
80
 
81
- return importer
81
+ return importer
@@ -19,9 +19,16 @@ class AthenaImporter(Importer):
19
19
  def prefix(self):
20
20
  return 'e'
21
21
 
22
- def import_from_csv(self, state: ReplState, from_session: str,
23
- keyspace: str, table: str, target_table: str, columns: str,
24
- multi_tables = True, create_db = False):
22
+ def import_from_csv(self,
23
+ state: ReplState,
24
+ from_session: str,
25
+ keyspace: str,
26
+ table: str,
27
+ target_table: str,
28
+ columns: str,
29
+ multi_tables = True,
30
+ create_db = False,
31
+ job_log: str = None):
25
32
  csv_file = self.csv_file(from_session, table, target_table)
26
33
  pod = state.pod
27
34
  namespace = state.namespace
@@ -47,12 +54,14 @@ class AthenaImporter(Importer):
47
54
  return to, to_session
48
55
  finally:
49
56
  if succeeded:
50
- self.remove_csv(state, from_session, table, target_table, multi_tables)
57
+ self.remove_csv(state, from_session, table, target_table, multi_tables, job_log=job_log)
51
58
  Athena.clear_cache()
52
59
 
53
- if not multi_tables:
60
+ if multi_tables:
61
+ log2(f'[{to_session}] {keyspace}.{target_table} OK', file=job_log)
62
+ else:
54
63
  with export_db(state) as dbs:
55
- dbs.sql(f'select * from {database}.{target_table} limit 10')
64
+ dbs.sql(f'select * from {keyspace}.{target_table} limit 10', backgrounded=True, export_log=job_log)
56
65
 
57
66
  def import_from_local_csv(self, state: ReplState,
58
67
  keyspace: str, table: str, csv_file: str, multi_tables = True, create_db = False):
@@ -1,9 +1,10 @@
1
+ from typing import TextIO
1
2
  import pandas
2
3
 
3
4
  from adam.commands.export.export_databases import export_db
4
5
  from adam.commands.export.importer import Importer
5
6
  from adam.repl_state import ReplState
6
- from adam.utils import GeneratorStream, bytes_generator_from_file, ing
7
+ from adam.utils import GeneratorStream, bytes_generator_from_file, ing, log2
7
8
  from adam.utils_k8s.pods import Pods
8
9
  from adam.utils_sqlite import SQLite, sqlite
9
10
 
@@ -11,9 +12,17 @@ class SqliteImporter(Importer):
11
12
  def prefix(self):
12
13
  return 's'
13
14
 
14
- def import_from_csv(self, state: ReplState, from_session: str,
15
- keyspace: str, table: str, target_table: str, columns: str,
16
- multi_tables = True, create_db = False):
15
+ def import_from_csv(self,
16
+ state: ReplState,
17
+ from_session: str,
18
+ keyspace: str,
19
+ table: str,
20
+ target_table: str,
21
+ columns: str,
22
+ multi_tables = True,
23
+ create_db = False,
24
+ job_log: str = None):
25
+
17
26
  csv_file = self.csv_file(from_session, table, target_table)
18
27
  pod = state.pod
19
28
  namespace = state.namespace
@@ -21,7 +30,7 @@ class SqliteImporter(Importer):
21
30
 
22
31
  succeeded = False
23
32
  try:
24
- with ing(f'[{to_session}] Uploading to Sqlite', suppress_log=multi_tables):
33
+ with ing(f'[{to_session}] Uploading to Sqlite', suppress_log=multi_tables, job_log=job_log):
25
34
  # create a connection to single keyspace
26
35
  with sqlite(to_session, keyspace) as conn:
27
36
  bytes = Pods.read_file(pod, 'cassandra', namespace, csv_file)
@@ -35,12 +44,14 @@ class SqliteImporter(Importer):
35
44
  return to, to_session
36
45
  finally:
37
46
  if succeeded:
38
- self.remove_csv(state, from_session, table, target_table, multi_tables)
47
+ self.remove_csv(state, from_session, table, target_table, multi_tables, job_log=job_log)
39
48
  SQLite.clear_cache()
40
49
 
41
- if not multi_tables:
50
+ if multi_tables:
51
+ log2(f'[{to_session}] {keyspace}.{target_table} OK', file=job_log)
52
+ else:
42
53
  with export_db(state) as dbs:
43
- dbs.sql(f'select * from {keyspace}.{target_table} limit 10')
54
+ dbs.sql(f'select * from {keyspace}.{target_table} limit 10', backgrounded=True, export_log=job_log)
44
55
 
45
56
  def import_from_local_csv(self, state: ReplState,
46
57
  keyspace: str, table: str, csv_file: str, multi_tables = True, create_db = False):
@@ -1,12 +1,13 @@
1
1
  import io
2
+ import os
2
3
  import re
3
4
 
4
5
  from adam.config import Config
5
- from adam.pod_exec_result import PodExecResult
6
+ from adam.utils import ExecResult, creating_dir, log2
6
7
  from adam.repl_state import ReplState
7
8
  from adam.utils_k8s.cassandra_nodes import CassandraNodes
8
- from adam.utils_k8s.pods import log_prefix
9
9
  from adam.utils_k8s.statefulsets import StatefulSets
10
+ from adam.utils_local import local_exec
10
11
 
11
12
  class ImportSpec:
12
13
  def __init__(self, table_name: str, session: str = None, files: list[str] = None, importer: str = None):
@@ -196,7 +197,7 @@ class ExportTableStatus:
196
197
  statuses: list[ExportTableStatus] = []
197
198
 
198
199
  status_in_whole = 'done'
199
- log_files: list[str] = find_files(pod, namespace, f'{log_prefix()}-{export_session}_*.log*')
200
+ log_files: list[str] = find_files(pod, namespace, f'{export_log_dir()}/{export_session}_*.log*')
200
201
 
201
202
  for log_file in log_files:
202
203
  status: ExportTableStatus = ExportTableStatus.from_log_file(pod, namespace, export_session, log_file)
@@ -211,7 +212,7 @@ class ExportTableStatus:
211
212
  def get_csv_files_n_table(target_table: str):
212
213
  db = f'{copy_session}_{target_table}'
213
214
  csv_file = f'{csv_dir()}/{db}/*.csv'
214
- csv_files: list[str] = find_files(pod, namespace, csv_file)
215
+ csv_files: list[str] = find_files(pod, namespace, csv_file, remote=True)
215
216
  if csv_files:
216
217
  table = target_table
217
218
  m = re.match(f'{csv_dir()}/{db}/(.*).csv', csv_files[0])
@@ -221,7 +222,7 @@ class ExportTableStatus:
221
222
 
222
223
  return csv_files, target_table
223
224
 
224
- m = re.match(f'{log_prefix()}-{copy_session}_(.*?)\.(.*?)\.log(.*)', log_file)
225
+ m = re.match(f'{export_log_dir()}/{copy_session}_(.*?)\.(.*?)\.log(.*)', log_file)
225
226
  if m:
226
227
  keyspace = m.group(1)
227
228
  target_table = m.group(2)
@@ -234,7 +235,8 @@ class ExportTableStatus:
234
235
 
235
236
  # 4 rows exported to 1 files in 0 day, 0 hour, 0 minute, and 1.335 seconds.
236
237
  pattern = 'rows exported to'
237
- r: PodExecResult = CassandraNodes.exec(pod, namespace, f"grep '{pattern}' {log_file}", show_out=Config().is_debug(), shell='bash')
238
+ r: ExecResult = local_exec(['grep', pattern, log_file], show_out=Config().is_debug())
239
+
238
240
  if r.exit_code() == 0:
239
241
  csv_files, table = get_csv_files_n_table(target_table)
240
242
  if csv_files:
@@ -246,17 +248,26 @@ class ExportTableStatus:
246
248
 
247
249
  return ExportTableStatus(None, None, 'unknown')
248
250
 
249
- def csv_dir():
250
- return Config().get('export.csv_dir', '/c3/cassandra/tmp')
251
-
252
- def find_files(pod: str, namespace: str, pattern: str, mmin: int = 0):
253
- if mmin:
254
- r = CassandraNodes.exec(pod, namespace, f'find {pattern} -mmin -{mmin}', show_out=Config().is_debug(), shell='bash')
251
+ def find_files(pod: str, namespace: str, pattern: str, mmin: int = 0, remote = False):
252
+ stdout = ''
253
+ if not remote:
254
+ # find . -maxdepth 1 -type f -name '*'
255
+ dir = os.path.dirname(pattern)
256
+ base = os.path.basename(pattern)
257
+ cmd = ['find', dir, '-name', base]
258
+ if mmin:
259
+ cmd += ['-mmin', f'-{mmin}']
260
+
261
+ stdout = local_exec(cmd, show_out=Config().is_debug()).stdout
255
262
  else:
256
- r = CassandraNodes.exec(pod, namespace, f'find {pattern}', show_out=Config().is_debug(), shell='bash')
263
+ cmd = f'find {pattern}'
264
+ if mmin:
265
+ cmd = f'{cmd} -mmin -{mmin}'
266
+
267
+ stdout = CassandraNodes.exec(pod, namespace, cmd, show_out=Config().is_debug(), shell='bash').stdout
257
268
 
258
269
  log_files = []
259
- for line in r.stdout.split('\n'):
270
+ for line in stdout.split('\n'):
260
271
  line = line.strip(' \r')
261
272
  if line:
262
273
  log_files.append(line)
@@ -341,4 +352,15 @@ class PodPushHandler:
341
352
  return False
342
353
 
343
354
  def state_with_pod(state: ReplState, pod: str = None):
344
- return PodPushHandler(state, pod=pod)
355
+ return PodPushHandler(state, pod=pod)
356
+
357
+ def os_system_exec(cmd: str, show_out = False):
358
+ if show_out: log2(cmd)
359
+
360
+ os.system(cmd)
361
+
362
+ def csv_dir():
363
+ return Config().get('export.csv_dir', '/c3/cassandra/tmp')
364
+
365
+ def export_log_dir():
366
+ return creating_dir(Config().get('export.log-dir', '/tmp/qing-db/q/export/logs'))
@@ -0,0 +1,52 @@
1
+ import click
2
+ import json
3
+
4
+ from adam.checks.check_result import CheckResult
5
+ from adam.checks.check_utils import run_checks
6
+ from adam.commands import extract_options
7
+ from adam.commands.command import Command
8
+ from adam.commands.commands_utils import kaqing_log_file
9
+ from adam.repl_state import ReplState
10
+ from adam.utils import log2
11
+
12
+ class GenerateReport(Command):
13
+ COMMAND = 'generate report'
14
+
15
+ # the singleton pattern
16
+ def __new__(cls, *args, **kwargs):
17
+ if not hasattr(cls, 'instance'): cls.instance = super(GenerateReport, cls).__new__(cls)
18
+
19
+ return cls.instance
20
+
21
+ def __init__(self, successor: Command=None):
22
+ super().__init__(successor)
23
+
24
+ def command(self):
25
+ return GenerateReport.COMMAND
26
+
27
+ def required(self):
28
+ return ReplState.NON_L
29
+
30
+ def run(self, cmd: str, state: ReplState):
31
+ if not(args := self.args(cmd)):
32
+ return super().run(cmd, state)
33
+
34
+ with self.validate(args, state) as (args, state):
35
+ with extract_options(args, ['-s', '--show']) as (args, show_out):
36
+ results = run_checks(state.sts, state.namespace, state.pod, show_out=show_out)
37
+ output = CheckResult.report(results)
38
+
39
+ if state.in_repl:
40
+ with kaqing_log_file() as json_file:
41
+ json.dump(output, json_file, indent=2)
42
+ log2(f'Report stored in {json_file.name}.')
43
+ else:
44
+ click.echo(json.dumps(output, indent=2))
45
+
46
+ return output
47
+
48
+ def completion(self, state: ReplState):
49
+ return super().completion(state, {'-s': None})
50
+
51
+ def help(self, _: ReplState):
52
+ return f"{GenerateReport.COMMAND} [-s]\t generate report"
@@ -1,4 +1,3 @@
1
- from collections.abc import Callable
2
1
  from datetime import datetime
3
2
  from functools import partial
4
3
 
@@ -8,7 +7,6 @@ from adam.commands.medusa.utils_medusa import medusa_backup_names
8
7
  from adam.utils_k8s.statefulsets import StatefulSets
9
8
  from adam.repl_state import ReplState, RequiredState
10
9
  from adam.utils_k8s.custom_resources import CustomResources
11
- from adam.config import Config
12
10
  from adam.utils import tabulize, log2, log_exc
13
11
 
14
12
  class MedusaRestore(Command):
@@ -69,20 +67,6 @@ class MedusaRestore(Command):
69
67
 
70
68
  def completion(self, state: ReplState):
71
69
  return super().completion(state, lambda: {id: None for id in medusa_backup_names(state)}, auto_key='medusa.backups')
72
- # if sc := super().completion(state):
73
- # ns = state.namespace
74
- # dc: str = StatefulSets.get_datacenter(state.sts, ns)
75
- # if not dc:
76
- # return {}
77
-
78
- # if Config().get('medusa.restore-auto-complete', False):
79
- # leaf = {id: None for id in [f"{x['metadata']['name']}" for x in CustomResources.medusa_show_backupjobs(dc, ns)]}
80
-
81
- # return super().completion(state, leaf)
82
- # else:
83
- # return sc
84
-
85
- return {}
86
70
 
87
71
  def help(self, _: ReplState):
88
72
  return f'{MedusaRestore.COMMAND}\t start a restore job'
adam/commands/nodetool.py CHANGED
@@ -38,7 +38,7 @@ class NodeTool(Command):
38
38
  return state
39
39
 
40
40
  def completion(self, state: ReplState):
41
- return super().completion(state, {c: {'&': None} for c in NODETOOL_COMMANDS}, pods=Devices.device(state).pods(state, '-'))
41
+ return super().completion(state, {c: {'&': None} for c in NODETOOL_COMMANDS}, pods=Devices.of(state).pods(state, '-'))
42
42
 
43
43
  def help(self, _: ReplState):
44
44
  return f'{NodeTool.COMMAND} <sub-command> [&]\t run nodetool with arguments'
File without changes