kaqing 2.0.184__py3-none-any.whl → 2.0.227__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kaqing might be problematic. Click here for more details.

Files changed (205) hide show
  1. adam/app_session.py +1 -1
  2. adam/batch.py +15 -15
  3. adam/checks/compactionstats.py +2 -1
  4. adam/checks/cpu.py +2 -1
  5. adam/checks/disk.py +6 -5
  6. adam/checks/gossip.py +2 -1
  7. adam/checks/memory.py +2 -1
  8. adam/checks/status.py +2 -1
  9. adam/commands/app/app.py +4 -4
  10. adam/commands/app/app_ping.py +2 -2
  11. adam/commands/{login.py → app/login.py} +2 -2
  12. adam/commands/app/show_app_actions.py +3 -3
  13. adam/commands/app/show_app_id.py +2 -2
  14. adam/commands/app/show_app_queues.py +2 -2
  15. adam/commands/{show → app}/show_login.py +3 -3
  16. adam/commands/app/utils_app.py +9 -1
  17. adam/commands/audit/audit.py +8 -24
  18. adam/commands/audit/audit_repair_tables.py +3 -3
  19. adam/commands/audit/audit_run.py +3 -3
  20. adam/commands/audit/completions_l.py +15 -0
  21. adam/commands/audit/show_last10.py +2 -3
  22. adam/commands/audit/show_slow10.py +2 -2
  23. adam/commands/audit/show_top10.py +2 -2
  24. adam/commands/bash/bash.py +3 -3
  25. adam/commands/bash/utils_bash.py +1 -1
  26. adam/commands/cassandra/download_cassandra_log.py +45 -0
  27. adam/commands/cassandra/restart_cluster.py +47 -0
  28. adam/commands/cassandra/restart_node.py +51 -0
  29. adam/commands/cassandra/restart_nodes.py +47 -0
  30. adam/commands/{rollout.py → cassandra/rollout.py} +3 -3
  31. adam/commands/{show → cassandra}/show_cassandra_repairs.py +7 -5
  32. adam/commands/{show → cassandra}/show_cassandra_status.py +24 -17
  33. adam/commands/{show → cassandra}/show_cassandra_version.py +2 -2
  34. adam/commands/cassandra/show_processes.py +50 -0
  35. adam/commands/cassandra/show_storage.py +44 -0
  36. adam/commands/{watch.py → cassandra/watch.py} +2 -2
  37. adam/commands/cli/__init__.py +0 -0
  38. adam/commands/{cli_commands.py → cli/cli_commands.py} +6 -1
  39. adam/commands/{clipboard_copy.py → cli/clipboard_copy.py} +4 -4
  40. adam/commands/{show/show_commands.py → cli/show_cli_commands.py} +5 -5
  41. adam/commands/code.py +2 -2
  42. adam/commands/command.py +54 -14
  43. adam/commands/commands_utils.py +14 -6
  44. adam/commands/config/__init__.py +0 -0
  45. adam/commands/{param_get.py → config/param_get.py} +2 -2
  46. adam/commands/{param_set.py → config/param_set.py} +2 -2
  47. adam/commands/{show → config}/show_params.py +3 -3
  48. adam/commands/{alter_tables.py → cql/alter_tables.py} +3 -3
  49. adam/commands/cql/completions_c.py +29 -0
  50. adam/commands/cql/cqlsh.py +4 -8
  51. adam/commands/cql/utils_cql.py +36 -17
  52. adam/commands/debug/__init__.py +0 -0
  53. adam/commands/debug/debug.py +22 -0
  54. adam/commands/debug/debug_completes.py +35 -0
  55. adam/commands/debug/debug_timings.py +35 -0
  56. adam/commands/debug/show_offloaded_completes.py +45 -0
  57. adam/commands/deploy/code_start.py +2 -2
  58. adam/commands/deploy/code_stop.py +2 -2
  59. adam/commands/deploy/deploy_frontend.py +2 -2
  60. adam/commands/deploy/deploy_pg_agent.py +2 -2
  61. adam/commands/deploy/deploy_pod.py +2 -2
  62. adam/commands/deploy/undeploy_frontend.py +2 -2
  63. adam/commands/deploy/undeploy_pg_agent.py +2 -2
  64. adam/commands/deploy/undeploy_pod.py +2 -2
  65. adam/commands/devices/device.py +37 -11
  66. adam/commands/devices/device_app.py +7 -7
  67. adam/commands/devices/device_auit_log.py +2 -2
  68. adam/commands/devices/device_cass.py +6 -6
  69. adam/commands/devices/device_export.py +7 -4
  70. adam/commands/devices/device_postgres.py +19 -9
  71. adam/commands/devices/devices.py +1 -1
  72. adam/commands/diag/__init__.py +0 -0
  73. adam/commands/{check.py → diag/check.py} +3 -3
  74. adam/commands/diag/generate_report.py +52 -0
  75. adam/commands/{issues.py → diag/issues.py} +3 -2
  76. adam/commands/exit.py +2 -2
  77. adam/commands/export/clean_up_all_export_sessions.py +2 -2
  78. adam/commands/export/clean_up_export_sessions.py +2 -2
  79. adam/commands/export/completions_x.py +11 -0
  80. adam/commands/export/download_export_session.py +5 -5
  81. adam/commands/export/drop_export_database.py +2 -2
  82. adam/commands/export/drop_export_databases.py +2 -2
  83. adam/commands/export/export.py +3 -19
  84. adam/commands/export/export_databases.py +20 -11
  85. adam/commands/export/export_select.py +9 -34
  86. adam/commands/export/export_sessions.py +13 -11
  87. adam/commands/export/export_use.py +6 -6
  88. adam/commands/export/export_x_select.py +48 -0
  89. adam/commands/export/exporter.py +140 -53
  90. adam/commands/export/import_files.py +3 -7
  91. adam/commands/export/import_session.py +2 -6
  92. adam/commands/export/importer.py +12 -13
  93. adam/commands/export/importer_athena.py +15 -35
  94. adam/commands/export/importer_sqlite.py +19 -8
  95. adam/commands/export/show_column_counts.py +11 -12
  96. adam/commands/export/show_export_databases.py +4 -4
  97. adam/commands/export/show_export_session.py +5 -5
  98. adam/commands/export/show_export_sessions.py +4 -4
  99. adam/commands/export/utils_export.py +40 -25
  100. adam/commands/fs/__init__.py +0 -0
  101. adam/commands/{cat.py → fs/cat.py} +4 -4
  102. adam/commands/fs/cat_local.py +42 -0
  103. adam/commands/{cd.py → fs/cd.py} +4 -4
  104. adam/commands/{download_file.py → fs/download_file.py} +7 -7
  105. adam/commands/{find_files.py → fs/find_files.py} +7 -7
  106. adam/commands/{find_processes.py → fs/find_processes.py} +14 -22
  107. adam/commands/{head.py → fs/head.py} +5 -5
  108. adam/commands/fs/head_local.py +46 -0
  109. adam/commands/{ls.py → fs/ls.py} +4 -4
  110. adam/commands/fs/ls_local.py +40 -0
  111. adam/commands/{pwd.py → fs/pwd.py} +2 -2
  112. adam/commands/fs/rm.py +18 -0
  113. adam/commands/fs/rm_downloads.py +39 -0
  114. adam/commands/fs/rm_logs.py +44 -0
  115. adam/commands/fs/rm_logs_local.py +38 -0
  116. adam/commands/{shell.py → fs/shell.py} +2 -2
  117. adam/commands/{show → fs}/show_adam.py +3 -3
  118. adam/commands/{show → fs}/show_host.py +2 -2
  119. adam/commands/fs/show_last_results.py +39 -0
  120. adam/commands/fs/tail.py +36 -0
  121. adam/commands/fs/tail_local.py +46 -0
  122. adam/commands/fs/utils_fs.py +192 -0
  123. adam/commands/help.py +2 -2
  124. adam/commands/intermediate_command.py +3 -0
  125. adam/commands/kubectl.py +2 -2
  126. adam/commands/medusa/medusa_backup.py +2 -2
  127. adam/commands/medusa/medusa_restore.py +4 -18
  128. adam/commands/medusa/medusa_show_backupjobs.py +2 -2
  129. adam/commands/medusa/medusa_show_restorejobs.py +2 -2
  130. adam/commands/medusa/utils_medusa.py +15 -0
  131. adam/commands/nodetool/__init__.py +0 -0
  132. adam/commands/nodetool/nodetool.py +87 -0
  133. adam/commands/nodetool/utils_nodetool.py +44 -0
  134. adam/commands/postgres/completions_p.py +22 -0
  135. adam/commands/postgres/postgres.py +10 -20
  136. adam/commands/postgres/postgres_databases.py +3 -3
  137. adam/commands/postgres/postgres_ls.py +3 -3
  138. adam/commands/postgres/postgres_preview.py +2 -2
  139. adam/commands/postgres/utils_postgres.py +12 -2
  140. adam/commands/preview_table.py +3 -4
  141. adam/commands/reaper/reaper_forward.py +2 -2
  142. adam/commands/reaper/reaper_forward_stop.py +2 -2
  143. adam/commands/reaper/reaper_restart.py +2 -2
  144. adam/commands/reaper/reaper_run_abort.py +2 -2
  145. adam/commands/reaper/reaper_runs.py +14 -12
  146. adam/commands/reaper/reaper_runs_abort.py +2 -2
  147. adam/commands/reaper/reaper_schedule_activate.py +8 -4
  148. adam/commands/reaper/reaper_schedule_start.py +3 -4
  149. adam/commands/reaper/reaper_schedule_stop.py +3 -4
  150. adam/commands/reaper/reaper_schedules.py +2 -2
  151. adam/commands/reaper/reaper_status.py +2 -2
  152. adam/commands/reaper/utils_reaper.py +41 -6
  153. adam/commands/repair/repair_log.py +2 -2
  154. adam/commands/repair/repair_run.py +2 -2
  155. adam/commands/repair/repair_scan.py +2 -4
  156. adam/commands/repair/repair_stop.py +2 -3
  157. adam/commands/{show/show.py → show.py} +12 -11
  158. adam/config.py +4 -5
  159. adam/embedded_params.py +1 -1
  160. adam/repl.py +24 -10
  161. adam/repl_commands.py +68 -45
  162. adam/repl_session.py +16 -1
  163. adam/repl_state.py +16 -1
  164. adam/sql/async_executor.py +62 -0
  165. adam/sql/lark_completer.py +286 -0
  166. adam/sql/lark_parser.py +604 -0
  167. adam/sql/qingl.lark +1075 -0
  168. adam/sso/cred_cache.py +2 -5
  169. adam/utils.py +259 -82
  170. adam/utils_async_job.py +73 -0
  171. adam/utils_k8s/app_clusters.py +11 -4
  172. adam/utils_k8s/app_pods.py +10 -5
  173. adam/utils_k8s/cassandra_clusters.py +19 -7
  174. adam/utils_k8s/cassandra_nodes.py +16 -6
  175. adam/utils_k8s/k8s.py +9 -0
  176. adam/utils_k8s/kube_context.py +1 -4
  177. adam/{pod_exec_result.py → utils_k8s/pod_exec_result.py} +8 -2
  178. adam/utils_k8s/pods.py +189 -29
  179. adam/utils_k8s/statefulsets.py +5 -2
  180. adam/utils_local.py +78 -2
  181. adam/utils_repl/appendable_completer.py +6 -0
  182. adam/utils_repl/repl_completer.py +51 -4
  183. adam/utils_sqlite.py +3 -8
  184. adam/version.py +1 -1
  185. {kaqing-2.0.184.dist-info → kaqing-2.0.227.dist-info}/METADATA +1 -1
  186. kaqing-2.0.227.dist-info/RECORD +280 -0
  187. kaqing-2.0.227.dist-info/top_level.txt +2 -0
  188. teddy/__init__.py +0 -0
  189. teddy/lark_parser.py +436 -0
  190. teddy/lark_parser2.py +618 -0
  191. adam/commands/cql/cql_completions.py +0 -32
  192. adam/commands/export/export_select_x.py +0 -54
  193. adam/commands/logs.py +0 -37
  194. adam/commands/nodetool.py +0 -69
  195. adam/commands/postgres/psql_completions.py +0 -11
  196. adam/commands/report.py +0 -61
  197. adam/commands/restart.py +0 -60
  198. adam/commands/show/show_processes.py +0 -49
  199. adam/commands/show/show_storage.py +0 -42
  200. kaqing-2.0.184.dist-info/RECORD +0 -244
  201. kaqing-2.0.184.dist-info/top_level.txt +0 -1
  202. /adam/commands/{show → cassandra}/__init__.py +0 -0
  203. /adam/commands/{nodetool_commands.py → nodetool/nodetool_commands.py} +0 -0
  204. {kaqing-2.0.184.dist-info → kaqing-2.0.227.dist-info}/WHEEL +0 -0
  205. {kaqing-2.0.184.dist-info → kaqing-2.0.227.dist-info}/entry_points.txt +0 -0
@@ -1,5 +1,6 @@
1
1
  from datetime import datetime
2
2
  import time
3
+ import traceback
3
4
 
4
5
  from adam.commands.command import InvalidArgumentsException
5
6
  from adam.commands.cql.utils_cql import cassandra_table_names, run_cql, table_spec
@@ -8,13 +9,13 @@ from adam.commands.export.export_sessions import ExportSessions
8
9
  from adam.commands.export.importer import Importer
9
10
  from adam.commands.export.importer_athena import AthenaImporter
10
11
  from adam.commands.export.importer_sqlite import SqliteImporter
11
- from adam.commands.export.utils_export import ExportSpec, ExportTableStatus, ExportTableSpec, ImportSpec, csv_dir, find_files, state_with_pod
12
+ from adam.commands.export.utils_export import ExportSpec, ExportTableStatus, ExportTableSpec, ImportSpec, csv_dir, fs_exec, state_with_pod, table_log_dir
12
13
  from adam.config import Config
13
- from adam.pod_exec_result import PodExecResult
14
+ from adam.repl_session import ReplSession
14
15
  from adam.repl_state import ReplState
15
- from adam.utils import debug, log, parallelize, log2, ing, log_exc
16
+ from adam.utils import debug, kaqing_log_file_name, log, log_to_pods, offload, parallelize, log2, ing, log_exc
16
17
  from adam.utils_k8s.cassandra_nodes import CassandraNodes
17
- from adam.utils_k8s.pods import log_prefix
18
+ from adam.utils_k8s.pods import Pods
18
19
 
19
20
  class Exporter:
20
21
  def export_tables(args: list[str], state: ReplState, export_only: bool = False, max_workers = 0) -> tuple[list[str], ExportSpec]:
@@ -133,11 +134,9 @@ class Exporter:
133
134
 
134
135
  prefix = Importer.prefix_from_importer(spec.importer)
135
136
  if spec.session:
136
- spec.session = f'{prefix}{spec.session[1:]}'
137
+ state.export_session = f'{prefix}{spec.session[1:]}'
137
138
  else:
138
- spec.session = f'{prefix}{datetime.now().strftime("%Y%m%d%H%M%S")[3:]}'
139
-
140
- state.export_session = spec.session
139
+ state.export_session = f'{prefix}{datetime.now().strftime("%Y%m%d%H%M%S")[3:]}'
141
140
 
142
141
  return spec
143
142
 
@@ -154,89 +153,160 @@ class Exporter:
154
153
  if export_state == 'init':
155
154
  CassandraNodes.exec(state.pod, state.namespace, f'rm -rf {csv_dir()}/{spec.session}_*', show_out=Config().is_debug(), shell='bash')
156
155
 
157
- action = f'[{spec.session}] Exporting|Exported'
156
+ job_log = kaqing_log_file_name()
157
+
158
+ action = f'[{spec.session}] Triggering export of'
158
159
  if export_state == 'init':
159
160
  action = f'[{spec.session}] Preparing|Prepared'
160
161
  elif export_state == 'import':
161
162
  action = f'[{spec.session}] Importing|Imported'
162
163
 
163
- with parallelize(spec.tables, max_workers, msg=action + ' {size} Cassandra tables') as exec:
164
- return exec.map(lambda table: Exporter.export_table(table, state, spec.session, spec.importer, export_only, len(spec.tables) > 1, consistency=spec.consistency, export_state=export_state)), spec
165
-
166
- def export_table(spec: ExportTableSpec, state: ReplState, session: str, importer: str, export_only = False, multi_tables = True, consistency: str = None, export_state=None):
164
+ msg = action + ' {size} Cassandra tables'
165
+
166
+ if export_state != 'init':
167
+ log2(f'[{spec.session}] Logging to {job_log}...')
168
+ ReplSession().append_history(f':cat {job_log}')
169
+
170
+ pod = state.pod
171
+ with parallelize(spec.tables, max_workers, msg=msg, collect=export_state == 'init', name='exporter') as exec:
172
+ return exec.map(lambda table: Exporter.export_table(table,
173
+ state.with_pod(pod),
174
+ spec.session,
175
+ spec.importer,
176
+ export_only,
177
+ len(spec.tables) > 1,
178
+ consistency=spec.consistency,
179
+ export_state=export_state,
180
+ job_log=None if export_state == 'init' else job_log)), spec
181
+
182
+ def export_table(spec: ExportTableSpec,
183
+ state: ReplState,
184
+ session: str,
185
+ importer: str,
186
+ export_only = False,
187
+ multi_tables = True,
188
+ consistency: str = None,
189
+ export_state=None,
190
+ job_log: str = None):
167
191
  s: str = None
168
192
 
169
193
  table, target_table, columns = Exporter.resove_table_n_columns(spec, state, include_ks_in_target=False, importer=importer)
170
194
 
171
- log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
195
+ log_file = f'{table_log_dir(state.pod, state.namespace)}/{session}_{spec.keyspace}.{target_table}.log'
172
196
  create_db = not state.export_session
173
197
 
174
198
  if export_state == 'init':
175
199
  Exporter.create_table_log(spec, state, session, table, target_table)
176
200
  return 'table_log_created'
177
201
  else:
178
- if export_state == 'pending_export':
179
- Exporter.export_to_csv(spec, state, session, table, target_table, columns, multi_tables=multi_tables, consistency=consistency)
202
+ try:
203
+ if export_state == 'pending_export':
204
+ Exporter.export_to_csv(spec, state, session, table, target_table, columns, multi_tables=multi_tables, consistency=consistency, job_log=job_log)
180
205
 
181
- log_files: list[str] = find_files(state.pod, state.namespace, f'{log_file}*')
182
- if not log_files:
183
- return s
206
+ log_files: list[str] = Pods.find_files(state.pod, 'cassandra', state.namespace, f'{log_file}*', remote=log_to_pods())
207
+ if not log_files:
208
+ return s
184
209
 
185
- log_file = log_files[0]
210
+ log_file = log_files[0]
186
211
 
187
- status: ExportTableStatus = ExportTableStatus.from_log_file(state.pod, state.namespace, session, log_file)
188
- while status.status != 'done':
189
- if status.status == 'export_in_pregress':
190
- debug('Exporting to CSV is still in progess, sleeping for 1 sec...')
191
- time.sleep(1)
192
- elif status.status == 'exported':
193
- log_file = Exporter.rename_to_pending_import(spec, state, session, target_table)
194
- if importer == 'csv' or export_only:
195
- return 'pending_import'
196
- elif status.status == 'pending_import':
197
- log_file, session = Exporter.import_from_csv(spec, state, session, importer, table, target_table, columns, multi_tables=multi_tables, create_db=create_db)
212
+ status: ExportTableStatus = ExportTableStatus.from_log_file(state.pod, state.namespace, session, log_file)
198
213
 
199
- status = ExportTableStatus.from_log_file(state.pod, state.namespace, session, log_file)
214
+ with offload(name='exporter') as exec:
215
+ ctx = ExportTableContext(spec, state, session, importer, export_only, multi_tables, table, target_table, columns, create_db, log_file, status, job_log)
216
+ exec.submit(lambda: Exporter.export_loop(ctx))
217
+ # Exporter.export_loop(ExportTableContext(spec, state, session, importer, export_only, multi_tables, table, target_table, columns, create_db, log_file, status))
218
+ except:
219
+ traceback.print_exc()
200
220
 
201
221
  return status.status
202
222
 
223
+ def export_loop(ctx: 'ExportTableContext'):
224
+ try:
225
+ while ctx.status.status != 'done':
226
+ if ctx.status.status == 'export_in_pregress':
227
+ debug('Exporting to CSV is still in progess, sleeping for 1 sec...')
228
+ time.sleep(1)
229
+ elif ctx.status.status == 'exported':
230
+ ctx.log_file = Exporter.rename_to_pending_import(ctx.spec, ctx.state, ctx.session, ctx.target_table)
231
+ ExportSessions.clear_export_session_cache()
232
+ if ctx.importer == 'csv' or ctx.export_only:
233
+ return 'pending_import'
234
+ elif ctx.status.status == 'pending_import':
235
+ ctx.log_file, ctx.session = Exporter.import_from_csv(ctx.spec,
236
+ ctx.state,
237
+ ctx.session,
238
+ ctx.importer,
239
+ ctx.table,
240
+ ctx.target_table,
241
+ ctx.columns,
242
+ multi_tables=ctx.multi_tables,
243
+ create_db=ctx.create_db,
244
+ job_log=ctx.f)
245
+
246
+ ctx.status = ExportTableStatus.from_log_file(ctx.state.pod, ctx.state.namespace, ctx.session, ctx.log_file)
247
+
248
+ return ctx.status.status
249
+ except:
250
+ traceback.print_exc()
251
+
203
252
  def create_table_log(spec: ExportTableSpec, state: ReplState, session: str, table: str, target_table: str):
204
- log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
253
+ dir = table_log_dir(state.pod, state.namespace)
254
+ log_file = f'{dir}/{session}_{spec.keyspace}.{target_table}.log'
205
255
 
206
- CassandraNodes.exec(state.pod, state.namespace, f'rm -f {log_file}* && touch {log_file}', show_out=Config().is_debug(), shell='bash')
256
+ cmd = f'rm -f {log_file}* && mkdir -p {dir} && touch {log_file}'
257
+ fs_exec(state.pod, state.namespace, cmd, show_out=Config().is_debug())
207
258
 
208
259
  return table
209
260
 
210
- def export_to_csv(spec: ExportTableSpec, state: ReplState, session: str, table: str, target_table: str, columns: str, multi_tables = True, consistency: str = None):
261
+ def export_to_csv(spec: ExportTableSpec,
262
+ state: ReplState,
263
+ session: str,
264
+ table: str,
265
+ target_table: str,
266
+ columns: str,
267
+ multi_tables = True,
268
+ consistency: str = None,
269
+ job_log: str = None):
211
270
  db = f'{session}_{target_table}'
212
271
 
213
272
  CassandraNodes.exec(state.pod, state.namespace, f'mkdir -p {csv_dir()}/{db}', show_out=Config().is_debug(), shell='bash')
214
273
  csv_file = f'{csv_dir()}/{db}/{table}.csv'
215
- log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
274
+ table_log_file = f'{table_log_dir(state.pod, state.namespace)}/{session}_{spec.keyspace}.{target_table}.log'
216
275
 
217
276
  suppress_ing_log = Config().is_debug() or multi_tables
218
277
  queries = []
219
278
  if consistency:
220
279
  queries.append(f'CONSISTENCY {consistency}')
221
280
  queries.append(f"COPY {spec.keyspace}.{table}({columns}) TO '{csv_file}' WITH HEADER = TRUE")
222
- r: PodExecResult = ing(
223
- f'[{session}] Dumping table {spec.keyspace}.{table}{f" with consistency {consistency}" if consistency else ""}',
224
- lambda: run_cql(state, ';'.join(queries), show_out=Config().is_debug(), backgrounded=True, log_file=log_file),
225
- suppress_log=suppress_ing_log)
226
281
 
227
- return log_file
282
+ with ing(f'[{session}] Triggering dump of table {spec.keyspace}.{table}{f" with consistency {consistency}" if consistency else ""}',
283
+ suppress_log=suppress_ing_log,
284
+ job_log = job_log):
285
+ run_cql(state, ';'.join(queries), show_out=Config().is_debug(), backgrounded=True, log_file=table_log_file, history=False)
286
+
287
+ return table_log_file
228
288
 
229
289
  def rename_to_pending_import(spec: ExportTableSpec, state: ReplState, session: str, target_table: str):
230
- log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
290
+ log_file = f'{table_log_dir(state.pod, state.namespace)}/{session}_{spec.keyspace}.{target_table}.log'
231
291
  to = f'{log_file}.pending_import'
232
292
 
233
- CassandraNodes.exec(state.pod, state.namespace, f'mv {log_file} {to}', show_out=Config().is_debug(), shell='bash')
293
+ cmd =f'mv {log_file} {to}'
294
+ fs_exec(state.pod, state.namespace, cmd, show_out=Config().is_debug())
234
295
 
235
296
  return to
236
297
 
237
- def import_from_csv(spec: ExportTableSpec, state: ReplState, session: str, importer: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
298
+ def import_from_csv(spec: ExportTableSpec,
299
+ state: ReplState,
300
+ session: str,
301
+ importer: str,
302
+ table: str,
303
+ target_table: str,
304
+ columns: str,
305
+ multi_tables = True,
306
+ create_db = False,
307
+ job_log: str = None):
238
308
  im = AthenaImporter() if importer == 'athena' else SqliteImporter()
239
- return im.import_from_csv(state, session if session else state.export_session, spec.keyspace, table, target_table, columns, multi_tables, create_db)
309
+ return im.import_from_csv(state, session if session else state.export_session, spec.keyspace, table, target_table, columns, multi_tables, create_db, job_log=job_log)
240
310
 
241
311
  def resove_table_n_columns(spec: ExportTableSpec, state: ReplState, include_ks_in_target = False, importer = 'sqlite'):
242
312
  table = spec.table
@@ -262,6 +332,22 @@ class Exporter:
262
332
 
263
333
  return table, target_table, columns
264
334
 
335
+ class ExportTableContext:
336
+ def __init__(self, spec: ExportTableSpec, state: ReplState, session: str, importer: str, export_only = False, multi_tables = True, table: str = None, target_table: str = None, columns: str = None, create_db = False, log_file: str = None, status: ExportTableStatus = None, f: str = None):
337
+ self.spec = spec
338
+ self.state = state
339
+ self.session = session
340
+ self.importer = importer
341
+ self.export_only = export_only
342
+ self.multi_tables = multi_tables
343
+ self.table = table
344
+ self.target_table = target_table
345
+ self.columns = columns
346
+ self.create_db = create_db
347
+ self.log_file = log_file
348
+ self.status = status
349
+ self.f = f
350
+
265
351
  class ExportService:
266
352
  def __init__(self, handler: 'ExporterHandler'):
267
353
  self.handler = handler
@@ -279,16 +365,17 @@ class ExportService:
279
365
 
280
366
  ExportSessions.clear_export_session_cache()
281
367
 
282
- if spec.importer == 'csv' or export_only:
283
- ExportSessions.show_session(state.sts, state.pod, state.namespace, spec.session)
284
- else:
285
- log()
286
- with export_db(state) as dbs:
287
- dbs.show_database()
368
+ # if spec.importer == 'csv' or export_only:
369
+ # ExportSessions.show_session(state.sts, state.pod, state.namespace, spec.session)
370
+ # else:
371
+ # log()
372
+ # with export_db(state) as dbs:
373
+ # dbs.show_database()
288
374
  finally:
375
+ pass
289
376
  # if exporting to csv, do not bind the new session id to repl state
290
- if spec and spec.importer == 'csv':
291
- state.export_session = export_session
377
+ # if spec and spec.importer == 'csv':
378
+ # state.export_session = export_session
292
379
 
293
380
  return state
294
381
 
@@ -33,12 +33,8 @@ class ImportCSVFiles(Command):
33
33
  with export(state) as exporter:
34
34
  return exporter.import_files(spec)
35
35
 
36
- def completion(self, state: ReplState):
37
- # warm up cache
38
- ExportSessions.export_session_names(state.sts, state.pod, state.namespace)
39
- ExportSessions.export_session_names(state.sts, state.pod, state.namespace, export_state='pending_import')
40
-
36
+ def completion(self, _: ReplState):
41
37
  return {}
42
38
 
43
- def help(self, _: ReplState):
44
- return f'{ImportCSVFiles.COMMAND} <file-names,...>\t import files in session to Athena or SQLite'
39
+ def help(self, state: ReplState):
40
+ return super().help(state, 'import files in session to SQLite(or Athena)', args='<file-names>,...')
@@ -34,11 +34,7 @@ class ImportSession(Command):
34
34
  return exporter.import_session(spec)
35
35
 
36
36
  def completion(self, state: ReplState):
37
- # warm up cache
38
- ExportSessions.export_session_names(state.sts, state.pod, state.namespace)
39
- ExportSessions.export_session_names(state.sts, state.pod, state.namespace, export_state='pending_import')
40
-
41
37
  return {}
42
38
 
43
- def help(self, _: ReplState):
44
- return f'{ImportSession.COMMAND} <export-session-name>\t import files in session to Athena or SQLite'
39
+ def help(self, state: ReplState):
40
+ return super().help(state, 'import tables in session to SQLite(or Athena)', args='<export-session-name>')
@@ -1,11 +1,9 @@
1
1
  from abc import abstractmethod
2
2
 
3
- from adam.commands.export.utils_export import csv_dir
3
+ from adam.commands.export.utils_export import csv_dir, fs_exec, table_log_dir
4
4
  from adam.config import Config
5
5
  from adam.repl_state import ReplState
6
6
  from adam.utils import ing
7
- from adam.utils_k8s.cassandra_nodes import CassandraNodes
8
- from adam.utils_k8s.pods import log_prefix
9
7
 
10
8
  class Importer:
11
9
  @abstractmethod
@@ -13,7 +11,7 @@ class Importer:
13
11
  pass
14
12
 
15
13
  @abstractmethod
16
- def import_from_csv(self, state: ReplState, from_session: str, keyspace: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
14
+ def import_from_csv(self, state: ReplState, from_session: str, keyspace: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False, log_file: str = None):
17
15
  pass
18
16
 
19
17
  @abstractmethod
@@ -24,14 +22,14 @@ class Importer:
24
22
  pass
25
23
 
26
24
  def move_to_done(self, state: ReplState, from_session: str, keyspace: str, target_table: str):
27
- pod = state.pod
28
- namespace = state.namespace
25
+ dir = table_log_dir(state.pod, state.namespace)
29
26
  to_session = state.export_session
30
- log_file = f'{log_prefix()}-{from_session}_{keyspace}.{target_table}.log.pending_import'
27
+ log_file = f'{dir}/{from_session}_{keyspace}.{target_table}.log.pending_import'
31
28
 
32
- to = f'{log_prefix()}-{to_session}_{keyspace}.{target_table}.log.done'
29
+ to = f'{dir}/{to_session}_{keyspace}.{target_table}.log.done'
33
30
 
34
- CassandraNodes.exec(pod, namespace, f'mv {log_file} {to}', show_out=Config().is_debug(), shell='bash')
31
+ cmd = f'mv {log_file} {to}'
32
+ fs_exec(state.pod, state.namespace, cmd, show_out=Config().is_debug())
35
33
 
36
34
  return to, to_session
37
35
 
@@ -41,12 +39,13 @@ class Importer:
41
39
 
42
40
  return session
43
41
 
44
- def remove_csv(self, state: ReplState, from_session: str, table: str, target_table: str, multi_tables = True):
42
+ def remove_csv(self, state: ReplState, from_session: str, table: str, target_table: str, multi_tables = True, job_log: str = None):
45
43
  pod = state.pod
46
44
  namespace = state.namespace
47
45
 
48
- with ing(f'[{from_session}] Cleaning up temporary files', suppress_log=multi_tables):
49
- CassandraNodes.exec(pod, namespace, f'rm -rf {self.csv_file(from_session, table, target_table)}', show_out=Config().is_debug(), shell='bash')
46
+ with ing(f'[{from_session}] Cleaning up temporary files', suppress_log=multi_tables, job_log=job_log):
47
+ cmd = f'rm -rf {self.csv_file(from_session, table, target_table)}'
48
+ fs_exec(state.pod, state.namespace, cmd, show_out=Config().is_debug())
50
49
 
51
50
  def db(self, session: str, keyspace: str):
52
51
  return f'{session}_{keyspace}'
@@ -78,4 +77,4 @@ class Importer:
78
77
  elif session.startswith('e'):
79
78
  importer = 'athena'
80
79
 
81
- return importer
80
+ return importer
@@ -19,9 +19,16 @@ class AthenaImporter(Importer):
19
19
  def prefix(self):
20
20
  return 'e'
21
21
 
22
- def import_from_csv(self, state: ReplState, from_session: str,
23
- keyspace: str, table: str, target_table: str, columns: str,
24
- multi_tables = True, create_db = False):
22
+ def import_from_csv(self,
23
+ state: ReplState,
24
+ from_session: str,
25
+ keyspace: str,
26
+ table: str,
27
+ target_table: str,
28
+ columns: str,
29
+ multi_tables = True,
30
+ create_db = False,
31
+ job_log: str = None):
25
32
  csv_file = self.csv_file(from_session, table, target_table)
26
33
  pod = state.pod
27
34
  namespace = state.namespace
@@ -39,35 +46,6 @@ class AthenaImporter(Importer):
39
46
  s3.upload_fileobj(GeneratorStream(bytes), bucket, f'export/{database}/{keyspace}/{target_table}/{table}.csv')
40
47
 
41
48
  self.create_schema(to_session, bucket, database, keyspace, table, columns, multi_tables, create_db)
42
- # msg: str = None
43
- # if create_db:
44
- # msg = f"[{to_session}] Creating database {database}"
45
- # else:
46
- # msg = f"[{to_session}] Creating table {target_table}"
47
- # with ing(msg, suppress_log=multi_tables):
48
- # query = f'CREATE DATABASE IF NOT EXISTS {database};'
49
- # debug(query)
50
- # Athena.query(query, 'default')
51
-
52
- # query = f'DROP TABLE IF EXISTS {target_table};'
53
- # debug(query)
54
- # Athena.query(query, database)
55
-
56
- # athena_columns = ', '.join([f'{c} string' for c in columns.split(',')])
57
- # query = f'CREATE EXTERNAL TABLE IF NOT EXISTS {target_table}(\n' + \
58
- # f' {athena_columns})\n' + \
59
- # "ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'\n" + \
60
- # 'WITH SERDEPROPERTIES (\n' + \
61
- # ' "separatorChar" = ",",\n' + \
62
- # ' "quoteChar" = "\\"")\n' + \
63
- # f"LOCATION 's3://{bucket}/export/{database}/{keyspace}/{target_table}'\n" + \
64
- # 'TBLPROPERTIES ("skip.header.line.count"="1");'
65
- # debug(query)
66
- # try:
67
- # Athena.query(query, database)
68
- # except Exception as e:
69
- # log2(f'*** Failed query:\n{query}')
70
- # raise e
71
49
 
72
50
  to, _ = self.move_to_done(state, from_session, keyspace, target_table)
73
51
 
@@ -76,12 +54,14 @@ class AthenaImporter(Importer):
76
54
  return to, to_session
77
55
  finally:
78
56
  if succeeded:
79
- self.remove_csv(state, from_session, table, target_table, multi_tables)
57
+ self.remove_csv(state, from_session, table, target_table, multi_tables, job_log=job_log)
80
58
  Athena.clear_cache()
81
59
 
82
- if not multi_tables:
60
+ if multi_tables:
61
+ log2(f'[{to_session}] {keyspace}.{target_table} OK', file=job_log)
62
+ else:
83
63
  with export_db(state) as dbs:
84
- dbs.sql(f'select * from {database}.{target_table} limit 10')
64
+ dbs.sql(f'select * from {keyspace}.{target_table} limit 10', backgrounded=True, export_log=job_log)
85
65
 
86
66
  def import_from_local_csv(self, state: ReplState,
87
67
  keyspace: str, table: str, csv_file: str, multi_tables = True, create_db = False):
@@ -1,9 +1,10 @@
1
+ from typing import TextIO
1
2
  import pandas
2
3
 
3
4
  from adam.commands.export.export_databases import export_db
4
5
  from adam.commands.export.importer import Importer
5
6
  from adam.repl_state import ReplState
6
- from adam.utils import GeneratorStream, bytes_generator_from_file, ing
7
+ from adam.utils import GeneratorStream, bytes_generator_from_file, ing, log2
7
8
  from adam.utils_k8s.pods import Pods
8
9
  from adam.utils_sqlite import SQLite, sqlite
9
10
 
@@ -11,9 +12,17 @@ class SqliteImporter(Importer):
11
12
  def prefix(self):
12
13
  return 's'
13
14
 
14
- def import_from_csv(self, state: ReplState, from_session: str,
15
- keyspace: str, table: str, target_table: str, columns: str,
16
- multi_tables = True, create_db = False):
15
+ def import_from_csv(self,
16
+ state: ReplState,
17
+ from_session: str,
18
+ keyspace: str,
19
+ table: str,
20
+ target_table: str,
21
+ columns: str,
22
+ multi_tables = True,
23
+ create_db = False,
24
+ job_log: str = None):
25
+
17
26
  csv_file = self.csv_file(from_session, table, target_table)
18
27
  pod = state.pod
19
28
  namespace = state.namespace
@@ -21,7 +30,7 @@ class SqliteImporter(Importer):
21
30
 
22
31
  succeeded = False
23
32
  try:
24
- with ing(f'[{to_session}] Uploading to Sqlite', suppress_log=multi_tables):
33
+ with ing(f'[{to_session}] Uploading to Sqlite', suppress_log=multi_tables, job_log=job_log):
25
34
  # create a connection to single keyspace
26
35
  with sqlite(to_session, keyspace) as conn:
27
36
  bytes = Pods.read_file(pod, 'cassandra', namespace, csv_file)
@@ -35,12 +44,14 @@ class SqliteImporter(Importer):
35
44
  return to, to_session
36
45
  finally:
37
46
  if succeeded:
38
- self.remove_csv(state, from_session, table, target_table, multi_tables)
47
+ self.remove_csv(state, from_session, table, target_table, multi_tables, job_log=job_log)
39
48
  SQLite.clear_cache()
40
49
 
41
- if not multi_tables:
50
+ if multi_tables:
51
+ log2(f'[{to_session}] {keyspace}.{target_table} OK', file=job_log)
52
+ else:
42
53
  with export_db(state) as dbs:
43
- dbs.sql(f'select * from {keyspace}.{target_table} limit 10')
54
+ dbs.sql(f'select * from {keyspace}.{target_table} limit 10', backgrounded=True, export_log=job_log)
44
55
 
45
56
  def import_from_local_csv(self, state: ReplState,
46
57
  keyspace: str, table: str, csv_file: str, multi_tables = True, create_db = False):
@@ -1,5 +1,6 @@
1
- from adam.commands import validate_args
1
+ from adam.commands import extract_trailing_options, validate_args
2
2
  from adam.commands.command import Command
3
+ from adam.commands.cql.utils_cql import cassandra_table_names
3
4
  from adam.commands.export.export_databases import ExportDatabases, export_db
4
5
  from adam.config import Config
5
6
  from adam.repl_state import ReplState, RequiredState
@@ -27,19 +28,17 @@ class ShowColumnCounts(Command):
27
28
  return super().run(cmd, state)
28
29
 
29
30
  with self.validate(args, state) as (args, state):
30
- with validate_args(args, state, name='SQL statement') as table:
31
- with export_db(state) as dbs:
32
- query = Config().get(f'export.column_counts_query', 'select id, count(id) as columns from {table} group by id')
33
- query = query.replace('{table}', table)
34
- dbs.sql(query, state.export_session)
31
+ with extract_trailing_options(args, '&') as (args, backgrounded):
32
+ with validate_args(args, state, name='SQL statement') as table:
33
+ with export_db(state) as dbs:
34
+ query = Config().get(f'export.column_counts_query', 'select id, count(id) as columns from {table} group by id')
35
+ query = query.replace('{table}', table)
36
+ dbs.sql(query, state.export_session, backgrounded=backgrounded)
35
37
 
36
38
  return state
37
39
 
38
40
  def completion(self, state: ReplState):
39
- if not state.export_session:
40
- return {}
41
+ return super().completion(state, lambda: {t: None for t in ExportDatabases.table_names(state.export_session)}, auto_key='x.tables')
41
42
 
42
- return super().completion(state, lambda: {t: None for t in ExportDatabases.table_names(state.export_session)})
43
-
44
- def help(self, _: ReplState):
45
- return f'{ShowColumnCounts.COMMAND} <export-table-name>\t show column count per id'
43
+ def help(self, state: ReplState):
44
+ return super().help(state, 'show column count per id', args='<export-table-name>')
@@ -31,8 +31,8 @@ class ShowExportDatabases(Command):
31
31
 
32
32
  return state
33
33
 
34
- def completion(self, state: ReplState):
35
- return DeviceExport().ls_completion(ShowExportDatabases.COMMAND, state, default = super().completion(state))
34
+ def completion(self, _: ReplState):
35
+ return {}
36
36
 
37
- def help(self, _: ReplState):
38
- return f'{ShowExportDatabases.COMMAND}\t list export databases'
37
+ def help(self, state: ReplState):
38
+ return super().help(state, 'list export databases')
@@ -1,6 +1,6 @@
1
1
  from adam.commands import validate_args
2
2
  from adam.commands.command import Command
3
- from adam.commands.export.export_sessions import ExportSessions, export_session
3
+ from adam.commands.export.export_sessions import export_session
4
4
  from adam.repl_state import ReplState, RequiredState
5
5
 
6
6
  class ShowExportSession(Command):
@@ -32,8 +32,8 @@ class ShowExportSession(Command):
32
32
 
33
33
  return state
34
34
 
35
- def completion(self, state: ReplState):
36
- return super().completion(state, {session: None for session in ExportSessions.export_session_names(state.sts, state.pod, state.namespace)})
35
+ def completion(self, _: ReplState):
36
+ return {}
37
37
 
38
- def help(self, _: ReplState):
39
- return f'{ShowExportSession.COMMAND} <export-session-name>\t show export session'
38
+ def help(self, state: ReplState):
39
+ return super().help(state, 'show export session', args='<export-session-name>')
@@ -30,8 +30,8 @@ class ShowExportSessions(Command):
30
30
 
31
31
  return state
32
32
 
33
- def completion(self, state: ReplState):
34
- return super().completion(state)
33
+ def completion(self, _: ReplState):
34
+ return {}
35
35
 
36
- def help(self, _: ReplState):
37
- return f'{ShowExportSessions.COMMAND}\t list export sessions'
36
+ def help(self, state: ReplState):
37
+ return super().help(state, 'list export sessions')