kaqing 2.0.171__py3-none-any.whl → 2.0.204__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (194) hide show
  1. adam/app_session.py +5 -10
  2. adam/apps.py +18 -4
  3. adam/batch.py +7 -7
  4. adam/checks/check_utils.py +3 -1
  5. adam/checks/disk.py +2 -3
  6. adam/columns/memory.py +3 -4
  7. adam/commands/__init__.py +15 -6
  8. adam/commands/alter_tables.py +26 -41
  9. adam/commands/app/__init__.py +0 -0
  10. adam/commands/{app_cmd.py → app/app.py} +2 -2
  11. adam/commands/{show → app}/show_app_actions.py +7 -15
  12. adam/commands/{show → app}/show_app_queues.py +1 -4
  13. adam/{utils_app.py → commands/app/utils_app.py} +9 -1
  14. adam/commands/audit/audit.py +9 -26
  15. adam/commands/audit/audit_repair_tables.py +5 -7
  16. adam/commands/audit/audit_run.py +1 -1
  17. adam/commands/audit/completions_l.py +15 -0
  18. adam/commands/audit/show_last10.py +2 -14
  19. adam/commands/audit/show_slow10.py +2 -13
  20. adam/commands/audit/show_top10.py +2 -11
  21. adam/commands/audit/utils_show_top10.py +15 -3
  22. adam/commands/bash/bash.py +1 -1
  23. adam/commands/bash/utils_bash.py +1 -1
  24. adam/commands/cassandra/__init__.py +0 -0
  25. adam/commands/cassandra/download_cassandra_log.py +45 -0
  26. adam/commands/cassandra/nodetool.py +64 -0
  27. adam/commands/cassandra/nodetool_commands.py +120 -0
  28. adam/commands/cassandra/restart_cluster.py +47 -0
  29. adam/commands/cassandra/restart_node.py +51 -0
  30. adam/commands/cassandra/restart_nodes.py +47 -0
  31. adam/commands/cassandra/rollout.py +88 -0
  32. adam/commands/cat.py +5 -19
  33. adam/commands/cd.py +7 -9
  34. adam/commands/check.py +10 -18
  35. adam/commands/cli_commands.py +6 -1
  36. adam/commands/{cp.py → clipboard_copy.py} +34 -36
  37. adam/commands/code.py +2 -2
  38. adam/commands/command.py +139 -22
  39. adam/commands/commands_utils.py +14 -12
  40. adam/commands/cql/alter_tables.py +66 -0
  41. adam/commands/cql/completions_c.py +29 -0
  42. adam/commands/cql/cqlsh.py +3 -7
  43. adam/commands/cql/utils_cql.py +23 -61
  44. adam/commands/debug/__init__.py +0 -0
  45. adam/commands/debug/debug.py +22 -0
  46. adam/commands/debug/debug_completes.py +35 -0
  47. adam/commands/debug/debug_timings.py +35 -0
  48. adam/commands/deploy/deploy_pg_agent.py +2 -2
  49. adam/commands/deploy/deploy_pod.py +2 -4
  50. adam/commands/deploy/undeploy_pg_agent.py +2 -2
  51. adam/commands/devices/device.py +40 -9
  52. adam/commands/devices/device_app.py +19 -29
  53. adam/commands/devices/device_auit_log.py +3 -3
  54. adam/commands/devices/device_cass.py +17 -23
  55. adam/commands/devices/device_export.py +12 -11
  56. adam/commands/devices/device_postgres.py +79 -63
  57. adam/commands/devices/devices.py +1 -1
  58. adam/commands/download_cassandra_log.py +45 -0
  59. adam/commands/download_file.py +47 -0
  60. adam/commands/export/clean_up_all_export_sessions.py +3 -3
  61. adam/commands/export/clean_up_export_sessions.py +7 -19
  62. adam/commands/export/completions_x.py +11 -0
  63. adam/commands/export/download_export_session.py +40 -0
  64. adam/commands/export/drop_export_database.py +6 -22
  65. adam/commands/export/drop_export_databases.py +3 -9
  66. adam/commands/export/export.py +1 -17
  67. adam/commands/export/export_databases.py +109 -32
  68. adam/commands/export/export_select.py +8 -55
  69. adam/commands/export/export_sessions.py +211 -0
  70. adam/commands/export/export_use.py +13 -16
  71. adam/commands/export/export_x_select.py +48 -0
  72. adam/commands/export/exporter.py +176 -167
  73. adam/commands/export/import_files.py +44 -0
  74. adam/commands/export/import_session.py +10 -6
  75. adam/commands/export/importer.py +24 -9
  76. adam/commands/export/importer_athena.py +114 -44
  77. adam/commands/export/importer_sqlite.py +45 -23
  78. adam/commands/export/show_column_counts.py +11 -20
  79. adam/commands/export/show_export_databases.py +5 -2
  80. adam/commands/export/show_export_session.py +6 -15
  81. adam/commands/export/show_export_sessions.py +4 -11
  82. adam/commands/export/utils_export.py +79 -27
  83. adam/commands/find_files.py +51 -0
  84. adam/commands/find_processes.py +76 -0
  85. adam/commands/generate_report.py +52 -0
  86. adam/commands/head.py +36 -0
  87. adam/commands/help.py +2 -2
  88. adam/commands/intermediate_command.py +6 -3
  89. adam/commands/login.py +3 -6
  90. adam/commands/ls.py +2 -2
  91. adam/commands/medusa/medusa_backup.py +13 -16
  92. adam/commands/medusa/medusa_restore.py +26 -37
  93. adam/commands/medusa/medusa_show_backupjobs.py +7 -7
  94. adam/commands/medusa/medusa_show_restorejobs.py +6 -6
  95. adam/commands/medusa/utils_medusa.py +15 -0
  96. adam/commands/nodetool.py +3 -8
  97. adam/commands/os/__init__.py +0 -0
  98. adam/commands/os/cat.py +36 -0
  99. adam/commands/os/download_file.py +47 -0
  100. adam/commands/os/find_files.py +51 -0
  101. adam/commands/os/find_processes.py +76 -0
  102. adam/commands/os/head.py +36 -0
  103. adam/commands/os/shell.py +41 -0
  104. adam/commands/param_get.py +10 -12
  105. adam/commands/param_set.py +7 -10
  106. adam/commands/postgres/completions_p.py +22 -0
  107. adam/commands/postgres/postgres.py +25 -40
  108. adam/commands/postgres/postgres_databases.py +269 -0
  109. adam/commands/postgres/utils_postgres.py +33 -20
  110. adam/commands/preview_table.py +4 -2
  111. adam/commands/pwd.py +4 -6
  112. adam/commands/reaper/reaper_forward.py +2 -2
  113. adam/commands/reaper/reaper_run_abort.py +4 -10
  114. adam/commands/reaper/reaper_runs.py +3 -3
  115. adam/commands/reaper/reaper_schedule_activate.py +12 -12
  116. adam/commands/reaper/reaper_schedule_start.py +7 -12
  117. adam/commands/reaper/reaper_schedule_stop.py +7 -12
  118. adam/commands/reaper/utils_reaper.py +13 -6
  119. adam/commands/repair/repair_log.py +1 -4
  120. adam/commands/repair/repair_run.py +3 -8
  121. adam/commands/repair/repair_scan.py +1 -6
  122. adam/commands/repair/repair_stop.py +1 -5
  123. adam/commands/restart_cluster.py +47 -0
  124. adam/commands/restart_node.py +51 -0
  125. adam/commands/restart_nodes.py +47 -0
  126. adam/commands/shell.py +9 -2
  127. adam/commands/show/show.py +4 -4
  128. adam/commands/show/show_adam.py +3 -3
  129. adam/commands/show/show_cassandra_repairs.py +5 -6
  130. adam/commands/show/show_cassandra_status.py +29 -29
  131. adam/commands/show/show_cassandra_version.py +1 -4
  132. adam/commands/show/{show_commands.py → show_cli_commands.py} +3 -6
  133. adam/commands/show/show_login.py +3 -9
  134. adam/commands/show/show_params.py +2 -5
  135. adam/commands/show/show_processes.py +15 -16
  136. adam/commands/show/show_storage.py +9 -8
  137. adam/config.py +4 -5
  138. adam/embedded_params.py +1 -1
  139. adam/log.py +4 -4
  140. adam/repl.py +26 -18
  141. adam/repl_commands.py +32 -20
  142. adam/repl_session.py +9 -1
  143. adam/repl_state.py +39 -10
  144. adam/sql/async_executor.py +44 -0
  145. adam/sql/lark_completer.py +286 -0
  146. adam/sql/lark_parser.py +604 -0
  147. adam/sql/qingl.lark +1076 -0
  148. adam/sql/sql_completer.py +4 -6
  149. adam/sql/sql_state_machine.py +25 -13
  150. adam/sso/authn_ad.py +2 -5
  151. adam/sso/authn_okta.py +2 -4
  152. adam/sso/cred_cache.py +2 -5
  153. adam/sso/idp.py +8 -11
  154. adam/utils.py +299 -105
  155. adam/utils_athena.py +18 -18
  156. adam/utils_audits.py +3 -7
  157. adam/utils_issues.py +2 -2
  158. adam/utils_k8s/app_clusters.py +4 -4
  159. adam/utils_k8s/app_pods.py +8 -6
  160. adam/utils_k8s/cassandra_clusters.py +16 -5
  161. adam/utils_k8s/cassandra_nodes.py +7 -6
  162. adam/utils_k8s/custom_resources.py +11 -17
  163. adam/utils_k8s/jobs.py +7 -11
  164. adam/utils_k8s/k8s.py +14 -5
  165. adam/utils_k8s/kube_context.py +3 -6
  166. adam/{pod_exec_result.py → utils_k8s/pod_exec_result.py} +4 -4
  167. adam/utils_k8s/pods.py +98 -36
  168. adam/utils_k8s/statefulsets.py +5 -2
  169. adam/utils_local.py +42 -0
  170. adam/utils_repl/appendable_completer.py +6 -0
  171. adam/utils_repl/repl_completer.py +45 -2
  172. adam/utils_repl/state_machine.py +3 -3
  173. adam/utils_sqlite.py +58 -30
  174. adam/version.py +1 -1
  175. {kaqing-2.0.171.dist-info → kaqing-2.0.204.dist-info}/METADATA +1 -1
  176. kaqing-2.0.204.dist-info/RECORD +277 -0
  177. kaqing-2.0.204.dist-info/top_level.txt +2 -0
  178. teddy/__init__.py +0 -0
  179. teddy/lark_parser.py +436 -0
  180. teddy/lark_parser2.py +618 -0
  181. adam/commands/cql/cql_completions.py +0 -33
  182. adam/commands/export/export_handlers.py +0 -71
  183. adam/commands/export/export_select_x.py +0 -54
  184. adam/commands/logs.py +0 -37
  185. adam/commands/postgres/postgres_context.py +0 -274
  186. adam/commands/postgres/psql_completions.py +0 -10
  187. adam/commands/report.py +0 -61
  188. adam/commands/restart.py +0 -60
  189. kaqing-2.0.171.dist-info/RECORD +0 -236
  190. kaqing-2.0.171.dist-info/top_level.txt +0 -1
  191. /adam/commands/{app_ping.py → app/app_ping.py} +0 -0
  192. /adam/commands/{show → app}/show_app_id.py +0 -0
  193. {kaqing-2.0.171.dist-info → kaqing-2.0.204.dist-info}/WHEEL +0 -0
  194. {kaqing-2.0.171.dist-info → kaqing-2.0.204.dist-info}/entry_points.txt +0 -0
@@ -1,9 +1,11 @@
1
+ import os
1
2
  import boto3
2
3
 
4
+ from adam.commands.export.export_databases import export_db
3
5
  from adam.commands.export.importer import Importer
4
- from adam.commands.export.utils_export import GeneratorStream
5
6
  from adam.config import Config
6
- from adam.utils import log2, ing
7
+ from adam.repl_state import ReplState
8
+ from adam.utils import GeneratorStream, bytes_generator_from_file, debug, log2, ing
7
9
  from adam.utils_athena import Athena
8
10
  from adam.utils_k8s.pods import Pods
9
11
 
@@ -17,9 +19,14 @@ class AthenaImporter(Importer):
17
19
  def prefix(self):
18
20
  return 'e'
19
21
 
20
- def import_from_csv(self, pod: str, namespace: str, to_session: str, from_session: str, keyspace: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
22
+ def import_from_csv(self, state: ReplState, from_session: str,
23
+ keyspace: str, table: str, target_table: str, columns: str,
24
+ multi_tables = True, create_db = False):
21
25
  csv_file = self.csv_file(from_session, table, target_table)
22
- db = self.db(to_session, keyspace)
26
+ pod = state.pod
27
+ namespace = state.namespace
28
+ to_session = state.export_session
29
+ database = self.db(to_session, keyspace)
23
30
 
24
31
  succeeded = False
25
32
  try:
@@ -29,52 +36,115 @@ class AthenaImporter(Importer):
29
36
  bytes = Pods.read_file(pod, 'cassandra', namespace, csv_file)
30
37
 
31
38
  s3 = boto3.client('s3')
32
- s3.upload_fileobj(GeneratorStream(bytes), bucket, f'export/{db}/{keyspace}/{target_table}/{table}.csv')
33
-
34
- msg: str = None
35
- if create_db:
36
- msg = f"[{to_session}] Creating database {db}"
37
- else:
38
- msg = f"[{to_session}] Creating table {target_table}"
39
- with ing(msg, suppress_log=multi_tables):
40
- query = f'CREATE DATABASE IF NOT EXISTS {db};'
41
- if Config().is_debug():
42
- log2(query)
43
- Athena.query(query, 'default')
44
-
45
- query = f'DROP TABLE IF EXISTS {target_table};'
46
- if Config().is_debug():
47
- log2(query)
48
- Athena.query(query, db)
49
-
50
- athena_columns = ', '.join([f'{c} string' for c in columns.split(',')])
51
- query = f'CREATE EXTERNAL TABLE IF NOT EXISTS {target_table}(\n' + \
52
- f' {athena_columns})\n' + \
53
- "ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'\n" + \
54
- 'WITH SERDEPROPERTIES (\n' + \
55
- ' "separatorChar" = ",",\n' + \
56
- ' "quoteChar" = "\\"")\n' + \
57
- f"LOCATION 's3://{bucket}/export/{db}/{keyspace}/{target_table}'\n" + \
58
- 'TBLPROPERTIES ("skip.header.line.count"="1");'
59
- if Config().is_debug():
60
- log2(query)
61
- try:
62
- Athena.query(query, db)
63
- except Exception as e:
64
- log2(f'*** Failed query:\n{query}')
65
- raise e
66
-
67
- to, _ = self.move_to_done(pod, namespace, to_session, from_session, keyspace, target_table)
39
+ s3.upload_fileobj(GeneratorStream(bytes), bucket, f'export/{database}/{keyspace}/{target_table}/{table}.csv')
40
+
41
+ self.create_schema(to_session, bucket, database, keyspace, table, columns, multi_tables, create_db)
42
+
43
+ to, _ = self.move_to_done(state, from_session, keyspace, target_table)
68
44
 
69
45
  succeeded = True
70
46
 
71
47
  return to, to_session
72
48
  finally:
73
49
  if succeeded:
74
- self.remove_csv(pod, namespace, from_session, table, target_table, multi_tables)
50
+ self.remove_csv(state, from_session, table, target_table, multi_tables)
51
+ Athena.clear_cache()
52
+
53
+ if multi_tables:
54
+ log2(f'[{to_session}] {keyspace}.{target_table} OK')
55
+ else:
56
+ with export_db(state) as dbs:
57
+ dbs.sql(f'select * from {keyspace}.{target_table} limit 10')
58
+
59
+ def import_from_local_csv(self, state: ReplState,
60
+ keyspace: str, table: str, csv_file: str, multi_tables = True, create_db = False):
61
+ to_session = state.export_session
62
+ database = self.db(to_session, keyspace)
63
+
64
+ succeeded = False
65
+ try:
66
+ columns = None
67
+ with open(csv_file, 'r') as f:
68
+ columns = f.readline()
69
+
70
+ bucket = Config().get('export.bucket', 'c3.ops--qing')
71
+
72
+ with ing(f'[{to_session}] Uploading to S3', suppress_log=multi_tables):
73
+ bytes = bytes_generator_from_file(csv_file)
74
+
75
+ s3 = boto3.client('s3')
76
+ s3.upload_fileobj(GeneratorStream(bytes), bucket, f'export/{database}/{keyspace}/{table}/{os.path.basename(csv_file)}')
77
+
78
+ self.create_schema(to_session, bucket, database, keyspace, table, columns, multi_tables, create_db)
79
+ # msg: str = None
80
+ # if create_db:
81
+ # msg = f"[{to_session}] Creating database {database}"
82
+ # else:
83
+ # msg = f"[{to_session}] Creating table {target_table}"
84
+ # with ing(msg, suppress_log=multi_tables):
85
+ # query = f'CREATE DATABASE IF NOT EXISTS {database};'
86
+ # debug(query)
87
+ # Athena.query(query, 'default')
88
+
89
+ # query = f'DROP TABLE IF EXISTS {target_table};'
90
+ # debug(query)
91
+ # Athena.query(query, database)
92
+
93
+ # athena_columns = ', '.join([f'{c} string' for c in columns.split(',')])
94
+ # query = f'CREATE EXTERNAL TABLE IF NOT EXISTS {target_table}(\n' + \
95
+ # f' {athena_columns})\n' + \
96
+ # "ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'\n" + \
97
+ # 'WITH SERDEPROPERTIES (\n' + \
98
+ # ' "separatorChar" = ",",\n' + \
99
+ # ' "quoteChar" = "\\"")\n' + \
100
+ # f"LOCATION 's3://{bucket}/export/{database}/{keyspace}/{target_table}'\n" + \
101
+ # 'TBLPROPERTIES ("skip.header.line.count"="1");'
102
+ # debug(query)
103
+ # try:
104
+ # Athena.query(query, database)
105
+ # except Exception as e:
106
+ # log2(f'*** Failed query:\n{query}')
107
+ # raise e
108
+
109
+ succeeded = True
110
+
111
+ return csv_file, to_session
112
+ finally:
113
+ if succeeded:
75
114
  Athena.clear_cache()
76
115
 
77
116
  if not multi_tables:
78
- query = f'select * from {target_table} limit 10'
79
- log2(query)
80
- Athena.run_query(query, db)
117
+ with export_db(state) as dbs:
118
+ dbs.sql(f'select * from {database}.{table} limit 10')
119
+
120
+ def create_schema(self, to_session: str, bucket: str, database: str, keyspace: str, table: str, columns: list[str], multi_tables: bool, create_db = False):
121
+ msg: str = None
122
+ if create_db:
123
+ msg = f"[{to_session}] Creating database {database}"
124
+ else:
125
+ msg = f"[{to_session}] Creating table {table}"
126
+
127
+ with ing(msg, suppress_log=multi_tables):
128
+ query = f'CREATE DATABASE IF NOT EXISTS {database};'
129
+ debug(query)
130
+ Athena.query(query, 'default')
131
+
132
+ query = f'DROP TABLE IF EXISTS {table};'
133
+ debug(query)
134
+ Athena.query(query, database)
135
+
136
+ athena_columns = ', '.join([f'{c} string' for c in columns.split(',')])
137
+ query = f'CREATE EXTERNAL TABLE IF NOT EXISTS {table}(\n' + \
138
+ f' {athena_columns})\n' + \
139
+ "ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'\n" + \
140
+ 'WITH SERDEPROPERTIES (\n' + \
141
+ ' "separatorChar" = ",",\n' + \
142
+ ' "quoteChar" = "\\"")\n' + \
143
+ f"LOCATION 's3://{bucket}/export/{database}/{keyspace}/{table}'\n" + \
144
+ 'TBLPROPERTIES ("skip.header.line.count"="1");'
145
+ debug(query)
146
+ try:
147
+ Athena.query(query, database)
148
+ except Exception as e:
149
+ log2(f'*** Failed query:\n{query}')
150
+ raise e
@@ -1,47 +1,69 @@
1
- import os
2
- import sqlite3
3
1
  import pandas
4
2
 
3
+ from adam.commands.export.export_databases import export_db
5
4
  from adam.commands.export.importer import Importer
6
- from adam.commands.export.utils_export import GeneratorStream
7
- from adam.utils import log2, ing
5
+ from adam.repl_state import ReplState
6
+ from adam.utils import GeneratorStream, bytes_generator_from_file, ing, log2
8
7
  from adam.utils_k8s.pods import Pods
9
- from adam.utils_sqlite import SQLite
8
+ from adam.utils_sqlite import SQLite, sqlite
10
9
 
11
10
  class SqliteImporter(Importer):
12
11
  def prefix(self):
13
12
  return 's'
14
13
 
15
- def import_from_csv(self, pod: str, namespace: str, to_session: str, from_session: str, keyspace: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
14
+ def import_from_csv(self, state: ReplState, from_session: str,
15
+ keyspace: str, table: str, target_table: str, columns: str,
16
+ multi_tables = True, create_db = False):
16
17
  csv_file = self.csv_file(from_session, table, target_table)
17
- db = self.db(to_session, keyspace)
18
+ pod = state.pod
19
+ namespace = state.namespace
20
+ to_session = state.export_session
18
21
 
19
22
  succeeded = False
20
- conn = None
21
23
  try:
22
- os.makedirs(SQLite.local_db_dir(), exist_ok=True)
23
- conn = sqlite3.connect(f'{SQLite.local_db_dir()}/{db}.db')
24
-
25
24
  with ing(f'[{to_session}] Uploading to Sqlite', suppress_log=multi_tables):
26
- bytes = Pods.read_file(pod, 'cassandra', namespace, csv_file)
27
- df = pandas.read_csv(GeneratorStream(bytes))
28
-
29
- df.to_sql(target_table, conn, index=False, if_exists='replace')
25
+ # create a connection to single keyspace
26
+ with sqlite(to_session, keyspace) as conn:
27
+ bytes = Pods.read_file(pod, 'cassandra', namespace, csv_file)
28
+ df = pandas.read_csv(GeneratorStream(bytes))
29
+ df.to_sql(target_table, conn, index=False, if_exists='replace')
30
30
 
31
- to, _ = self.move_to_done(pod, namespace, to_session, from_session, keyspace, target_table)
31
+ to, _ = self.move_to_done(state, from_session, keyspace, target_table)
32
32
 
33
33
  succeeded = True
34
34
 
35
35
  return to, to_session
36
36
  finally:
37
37
  if succeeded:
38
- self.remove_csv(pod, namespace, from_session, table, target_table, multi_tables)
38
+ self.remove_csv(state, from_session, table, target_table, multi_tables)
39
39
  SQLite.clear_cache()
40
40
 
41
- if not multi_tables:
42
- query = f'select * from {target_table} limit 10'
43
- log2(query)
44
- SQLite.run_query(query, conn_passed=conn)
41
+ if multi_tables:
42
+ log2(f'[{to_session}] {keyspace}.{target_table} OK')
43
+ else:
44
+ with export_db(state) as dbs:
45
+ dbs.sql(f'select * from {keyspace}.{target_table} limit 10')
46
+
47
+ def import_from_local_csv(self, state: ReplState,
48
+ keyspace: str, table: str, csv_file: str, multi_tables = True, create_db = False):
49
+ to_session = state.export_session
50
+
51
+ succeeded = False
52
+ try:
53
+ with ing(f'[{to_session}] Uploading to Sqlite', suppress_log=multi_tables):
54
+ # create a connection to single keyspace
55
+ with sqlite(to_session, keyspace) as conn:
56
+ bytes = bytes_generator_from_file(csv_file)
57
+ df = pandas.read_csv(GeneratorStream(bytes))
58
+ df.to_sql(table, conn, index=False, if_exists='replace')
45
59
 
46
- if conn:
47
- conn.close()
60
+ succeeded = True
61
+
62
+ return csv_file, to_session
63
+ finally:
64
+ if succeeded:
65
+ SQLite.clear_cache()
66
+
67
+ if not multi_tables:
68
+ with export_db(state) as dbs:
69
+ dbs.sql(f'select * from {keyspace}.{table} limit 10')
@@ -1,8 +1,9 @@
1
+ from adam.commands import extract_trailing_options, validate_args
1
2
  from adam.commands.command import Command
2
- from adam.commands.export.export_databases import ExportDatabases
3
+ from adam.commands.cql.utils_cql import cassandra_table_names
4
+ from adam.commands.export.export_databases import ExportDatabases, export_db
3
5
  from adam.config import Config
4
6
  from adam.repl_state import ReplState, RequiredState
5
- from adam.utils import log2
6
7
 
7
8
  class ShowColumnCounts(Command):
8
9
  COMMAND = 'show column counts on'
@@ -27,28 +28,18 @@ class ShowColumnCounts(Command):
27
28
  return super().run(cmd, state)
28
29
 
29
30
  with self.validate(args, state) as (args, state):
30
- if not args:
31
- if state.in_repl:
32
- log2('Use a SQL statement.')
33
- else:
34
- log2('* SQL statement is missing.')
35
-
36
- Command.display_help()
37
-
38
- return 'command-missing'
39
-
40
- table = args[0]
41
- query = Config().get(f'export.column_counts_query', 'select id, count(id) as columns from {table} group by id')
42
- query = query.replace('{table}', table)
43
- ExportDatabases.run_query(query, state.export_session)
31
+ with extract_trailing_options(args, '&') as (args, backgrounded):
32
+ with validate_args(args, state, name='SQL statement') as table:
33
+ with export_db(state) as dbs:
34
+ query = Config().get(f'export.column_counts_query', 'select id, count(id) as columns from {table} group by id')
35
+ query = query.replace('{table}', table)
36
+ dbs.sql(query, state.export_session, backgrounded=backgrounded)
44
37
 
45
38
  return state
46
39
 
47
40
  def completion(self, state: ReplState):
48
- if not state.export_session:
49
- return {}
50
-
51
- return super().completion(state, lambda: {t: None for t in ExportDatabases.table_names(state.export_session)})
41
+ return super().completion(state, lambda: {t: None for t in ExportDatabases.table_names(state.export_session)}, auto_key='x.tables')
42
+ # return {}
52
43
 
53
44
  def help(self, _: ReplState):
54
45
  return f'{ShowColumnCounts.COMMAND} <export-table-name>\t show column count per id'
@@ -1,5 +1,6 @@
1
1
  from adam.commands.command import Command
2
2
  from adam.commands.devices.device_export import DeviceExport
3
+ from adam.commands.export.export_databases import export_db
3
4
  from adam.repl_state import ReplState
4
5
 
5
6
  class ShowExportDatabases(Command):
@@ -25,12 +26,14 @@ class ShowExportDatabases(Command):
25
26
  return super().run(cmd, state)
26
27
 
27
28
  with self.validate(args, state) as (args, state):
28
- DeviceExport().show_export_databases()
29
+ with export_db(state) as dbs:
30
+ dbs.show_databases()
29
31
 
30
32
  return state
31
33
 
32
34
  def completion(self, state: ReplState):
33
- return DeviceExport().ls_completion(ShowExportDatabases.COMMAND, state, default = super().completion(state))
35
+ return {}
36
+ # return DeviceExport().ls_completion(ShowExportDatabases.COMMAND, state, default = super().completion(state))
34
37
 
35
38
  def help(self, _: ReplState):
36
39
  return f'{ShowExportDatabases.COMMAND}\t list export databases'
@@ -1,8 +1,7 @@
1
+ from adam.commands import validate_args
1
2
  from adam.commands.command import Command
2
- from adam.commands.export.export_databases import ExportDatabases
3
- from adam.commands.export.exporter import Exporter
3
+ from adam.commands.export.export_sessions import ExportSessions, export_session
4
4
  from adam.repl_state import ReplState, RequiredState
5
- from adam.utils import log2
6
5
 
7
6
  class ShowExportSession(Command):
8
7
  COMMAND = 'show export session'
@@ -27,22 +26,14 @@ class ShowExportSession(Command):
27
26
  return super().run(cmd, state)
28
27
 
29
28
  with self.validate(args, state) as (args, state):
30
- if not args:
31
- if state.in_repl:
32
- log2('Specify export database name.')
33
- else:
34
- log2('* Database name is missing.')
35
-
36
- Command.display_help()
37
-
38
- return 'command-missing'
39
-
40
- ExportDatabases.disply_export_session(state.sts, state.pod, state.namespace, args[0])
29
+ with validate_args(args, state, name='export session') as session:
30
+ with export_session(state) as sessions:
31
+ sessions.show_session(session)
41
32
 
42
33
  return state
43
34
 
44
35
  def completion(self, state: ReplState):
45
- return super().completion(state, {session: None for session in Exporter.export_session_names(state.sts, state.pod, state.namespace)})
36
+ return {}
46
37
 
47
38
  def help(self, _: ReplState):
48
39
  return f'{ShowExportSession.COMMAND} <export-session-name>\t show export session'
@@ -1,8 +1,6 @@
1
1
  from adam.commands.command import Command
2
- from adam.commands.export.exporter import Exporter
2
+ from adam.commands.export.export_sessions import export_session
3
3
  from adam.repl_state import ReplState, RequiredState
4
- from adam.utils import lines_to_tabular, log
5
- from adam.utils_k8s.statefulsets import StatefulSets
6
4
 
7
5
  class ShowExportSessions(Command):
8
6
  COMMAND = 'show export sessions'
@@ -27,18 +25,13 @@ class ShowExportSessions(Command):
27
25
  return super().run(cmd, state)
28
26
 
29
27
  with self.validate(args, state) as (args, state):
30
- pod = state.pod
31
- if not pod:
32
- pod = StatefulSets.pod_names(state.sts, state.namespace)[0]
33
-
34
- sessions: dict[str, str] = Exporter.find_export_sessions(pod, state.namespace)
35
- log(lines_to_tabular([f'{session}\t{export_state}' for session, export_state in sorted(sessions.items(), reverse=True)],
36
- header='EXPORT_SESSION\tSTATUS', separator='\t'))
28
+ with export_session(state) as sessions:
29
+ sessions.show_all_sessions()
37
30
 
38
31
  return state
39
32
 
40
33
  def completion(self, state: ReplState):
41
- return super().completion(state)
34
+ return {}
42
35
 
43
36
  def help(self, _: ReplState):
44
37
  return f'{ShowExportSessions.COMMAND}\t list export sessions'
@@ -1,26 +1,36 @@
1
1
  import io
2
+ import os
2
3
  import re
3
4
 
4
5
  from adam.config import Config
5
- from adam.pod_exec_result import PodExecResult
6
+ from adam.utils import ExecResult, log2
6
7
  from adam.repl_state import ReplState
7
8
  from adam.utils_k8s.cassandra_nodes import CassandraNodes
8
- from adam.utils_k8s.pods import log_prefix
9
9
  from adam.utils_k8s.statefulsets import StatefulSets
10
+ from adam.utils_local import local_exec
10
11
 
11
12
  class ImportSpec:
12
- def __init__(self, session: str, importer: str):
13
+ def __init__(self, table_name: str, session: str = None, files: list[str] = None, importer: str = None):
14
+ self.table_name = table_name
13
15
  self.session = session
16
+ self.files = files
14
17
  self.importer = importer
15
18
 
16
- def parse_specs(specs_str: str):
17
- session: str = None
19
+ def parse_specs(specs_str: str, files = False):
20
+ session_or_files: str = None
18
21
  importer: str = None
22
+ table_name: str = None
19
23
 
20
24
  if specs_str:
21
- importer, session = ImportSpec._extract_importer(specs_str.strip(' '))
25
+ importer, rest = ImportSpec._extract_importer(specs_str.strip(' '))
26
+
27
+ if rest:
28
+ table_name, session_or_files = ImportSpec._extract_table_name(rest)
29
+
30
+ if not files:
31
+ return ImportSpec(table_name, session=session_or_files, importer=importer)
22
32
 
23
- return ImportSpec(session, importer)
33
+ return ImportSpec(table_name, files=[f.strip(' ') for f in session_or_files.split(',')], importer=importer)
24
34
 
25
35
  def _extract_importer(spec_str: str) -> tuple[str, str]:
26
36
  importer = None
@@ -34,12 +44,25 @@ class ImportSpec:
34
44
 
35
45
  return importer, rest
36
46
 
47
+ def _extract_table_name(spec_str: str) -> tuple[str, str]:
48
+ table_name = None
49
+ rest = spec_str
50
+
51
+ p = re.compile(r"(.*?)as\s+(.*)", re.IGNORECASE)
52
+ match = p.match(spec_str)
53
+ if match:
54
+ rest = match.group(1).strip(' ')
55
+ table_name = match.group(2).strip(' ')
56
+
57
+ return table_name, rest
58
+
37
59
  class ExportSpec(ImportSpec):
38
60
  def __init__(self, keyspace: str, consistency: str, importer: str, tables: list['ExportTableSpec'], session: str = None):
39
61
  super().__init__(None, importer)
40
62
 
41
63
  self.keyspace = keyspace
42
64
  self.consistency = consistency
65
+ self.importer = importer
43
66
  self.tables = tables
44
67
  self.session = session
45
68
 
@@ -154,11 +177,12 @@ class ExportTableSpec:
154
177
  return f'{self.keyspace}.{self.table}({self.columns}) as {self.target_table}'
155
178
 
156
179
  class ExportTableStatus:
157
- def __init__(self, keyspace: str, target_table: str, status: str, table: str = None):
180
+ def __init__(self, keyspace: str, target_table: str, status: str, table: str = None, csv_file: str = ''):
158
181
  self.keyspace = keyspace
159
182
  self.target_table = target_table
160
183
  self.status = status
161
184
  self.table = table
185
+ self.csv_file = csv_file
162
186
 
163
187
  def __str__(self):
164
188
  return f'{self.keyspace}.{self.table} as {self.target_table} = {self.status}'
@@ -169,11 +193,11 @@ class ExportTableStatus:
169
193
 
170
194
  return False
171
195
 
172
- def from_session(sts: str, pod: str, namespace: str, export_session: str):
196
+ def from_session(sts: str, pod: str, namespace: str, export_session: str) -> tuple['ExportTableStatus', str]:
173
197
  statuses: list[ExportTableStatus] = []
174
198
 
175
199
  status_in_whole = 'done'
176
- log_files: list[str] = find_files(pod, namespace, f'{log_prefix()}-{export_session}_*.log*')
200
+ log_files: list[str] = find_files(pod, namespace, f'{export_log_prefix()}-{export_session}_*.log*')
177
201
 
178
202
  for log_file in log_files:
179
203
  status: ExportTableStatus = ExportTableStatus.from_log_file(pod, namespace, export_session, log_file)
@@ -188,7 +212,7 @@ class ExportTableStatus:
188
212
  def get_csv_files_n_table(target_table: str):
189
213
  db = f'{copy_session}_{target_table}'
190
214
  csv_file = f'{csv_dir()}/{db}/*.csv'
191
- csv_files: list[str] = find_files(pod, namespace, csv_file)
215
+ csv_files: list[str] = find_files(pod, namespace, csv_file, remote=True)
192
216
  if csv_files:
193
217
  table = target_table
194
218
  m = re.match(f'{csv_dir()}/{db}/(.*).csv', csv_files[0])
@@ -198,24 +222,26 @@ class ExportTableStatus:
198
222
 
199
223
  return csv_files, target_table
200
224
 
201
- m = re.match(f'{log_prefix()}-{copy_session}_(.*?)\.(.*?)\.log(.*)', log_file)
225
+ m = re.match(f'{export_log_prefix()}-{copy_session}_(.*?)\.(.*?)\.log(.*)', log_file)
202
226
  if m:
203
227
  keyspace = m.group(1)
204
228
  target_table = m.group(2)
205
229
  state = m.group(3)
206
230
  if state == '.pending_import':
207
- _, table = get_csv_files_n_table(target_table)
208
- return ExportTableStatus(keyspace, target_table, 'pending_import', table)
231
+ csv_files, table = get_csv_files_n_table(target_table)
232
+ return ExportTableStatus(keyspace, target_table, 'pending_import', table, csv_files[0] if csv_files else '')
209
233
  elif state == '.done':
210
234
  return ExportTableStatus(keyspace, target_table, 'done', target_table)
211
235
 
212
236
  # 4 rows exported to 1 files in 0 day, 0 hour, 0 minute, and 1.335 seconds.
213
237
  pattern = 'rows exported to'
214
- r: PodExecResult = CassandraNodes.exec(pod, namespace, f"grep '{pattern}' {log_file}", show_out=Config().is_debug(), shell='bash')
238
+ r: ExecResult = local_exec(['grep', pattern, log_file], show_out=Config().is_debug())
239
+ # r = CassandraNodes.exec(pod, namespace, f"grep '{pattern}' {log_file}", show_out=Config().is_debug(), shell='bash')
240
+
215
241
  if r.exit_code() == 0:
216
242
  csv_files, table = get_csv_files_n_table(target_table)
217
243
  if csv_files:
218
- return ExportTableStatus(keyspace, target_table, 'exported', table)
244
+ return ExportTableStatus(keyspace, target_table, 'exported', table, csv_files[0])
219
245
  else:
220
246
  return ExportTableStatus(keyspace, target_table, 'imported', target_table)
221
247
  else:
@@ -226,20 +252,39 @@ class ExportTableStatus:
226
252
  def csv_dir():
227
253
  return Config().get('export.csv_dir', '/c3/cassandra/tmp')
228
254
 
229
- def find_files(pod: str, namespace: str, pattern: str, mmin: int = 0):
230
- if mmin:
231
- r = CassandraNodes.exec(pod, namespace, f'find {pattern} -mmin -{mmin}', show_out=Config().is_debug(), shell='bash')
255
+ def find_files(pod: str, namespace: str, pattern: str, mmin: int = 0, remote = False):
256
+ stdout = ''
257
+ if not remote:
258
+ dir = os.path.dirname(pattern)
259
+ base = os.path.basename(pattern)
260
+ cmd = ['find', dir, '-name', base]
261
+ if mmin:
262
+ cmd += ['-mmin', f'-{mmin}']
263
+
264
+ stdout = local_exec(cmd, show_out=Config().is_debug()).stdout
232
265
  else:
233
- r = CassandraNodes.exec(pod, namespace, f'find {pattern}', show_out=Config().is_debug(), shell='bash')
266
+ cmd = f'find {pattern}'
267
+ if mmin:
268
+ cmd = f'{cmd} -mmin -{mmin}'
269
+
270
+ stdout = CassandraNodes.exec(pod, namespace, cmd, show_out=Config().is_debug(), shell='bash').stdout
234
271
 
235
272
  log_files = []
236
- for line in r.stdout.split('\n'):
273
+ for line in stdout.split('\n'):
237
274
  line = line.strip(' \r')
238
275
  if line:
239
276
  log_files.append(line)
240
277
 
241
278
  return log_files
242
279
 
280
+ def export_log_prefix():
281
+ return Config().get('export.log-prefix', '/tmp/qing')
282
+
283
+ def os_system_exec(cmd: str, show_out = False):
284
+ if show_out: log2(cmd)
285
+
286
+ os.system(cmd)
287
+
243
288
  class GeneratorStream(io.RawIOBase):
244
289
  def __init__(self, generator):
245
290
  self._generator = generator
@@ -292,23 +337,30 @@ class GeneratorStream(io.RawIOBase):
292
337
  self._buffer = self._buffer[size:]
293
338
  return data
294
339
 
295
- class PodHandler:
296
- def __init__(self, state: ReplState):
340
+ class PodPushHandler:
341
+ def __init__(self, state: ReplState, pod: str = None):
297
342
  self.state = state
343
+ self.pushed = False
344
+ self.pod = pod
298
345
 
299
346
  def __enter__(self):
300
347
  state = self.state
301
348
 
302
349
  if not state.pod:
350
+ self.pushed = True
303
351
  state.push()
304
- state.pod = StatefulSets.pod_names(state.sts, state.namespace)[0]
352
+
353
+ if not self.pod:
354
+ self.pod = StatefulSets.pod_names(state.sts, state.namespace)[0]
355
+ state.pod = self.pod
305
356
 
306
357
  return state
307
358
 
308
359
  def __exit__(self, exc_type, exc_val, exc_tb):
309
- self.state.pop()
360
+ if self.pushed:
361
+ self.state.pop()
310
362
 
311
363
  return False
312
364
 
313
- def state_with_pod(state: ReplState):
314
- return PodHandler(state)
365
+ def state_with_pod(state: ReplState, pod: str = None):
366
+ return PodPushHandler(state, pod=pod)