dcicutils 8.13.2__py3-none-any.whl → 8.13.3__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- dcicutils/scripts/update_portal_object.py +430 -0
- dcicutils/scripts/view_portal_object.py +146 -102
- {dcicutils-8.13.2.dist-info → dcicutils-8.13.3.dist-info}/METADATA +1 -1
- {dcicutils-8.13.2.dist-info → dcicutils-8.13.3.dist-info}/RECORD +7 -6
- {dcicutils-8.13.2.dist-info → dcicutils-8.13.3.dist-info}/entry_points.txt +1 -0
- {dcicutils-8.13.2.dist-info → dcicutils-8.13.3.dist-info}/LICENSE.txt +0 -0
- {dcicutils-8.13.2.dist-info → dcicutils-8.13.3.dist-info}/WHEEL +0 -0
@@ -0,0 +1,430 @@
|
|
1
|
+
# ------------------------------------------------------------------------------------------------------
|
2
|
+
# Command-line utility to update (post, patch, upsert) portal objects for SMaHT/CGAP/Fourfront.
|
3
|
+
# ------------------------------------------------------------------------------------------------------
|
4
|
+
# Example commands:
|
5
|
+
# update-portal-object --post file_format.json
|
6
|
+
# update-portal-object --upsert directory-with-schema-named-dot-json-files
|
7
|
+
# update-portal-object --patch file-not-named-for-schema-name.json --schema UnalignedReads
|
8
|
+
# --------------------------------------------------------------------------------------------------
|
9
|
+
|
10
|
+
import argparse
|
11
|
+
from functools import lru_cache
|
12
|
+
import glob
|
13
|
+
import io
|
14
|
+
import json
|
15
|
+
import os
|
16
|
+
import sys
|
17
|
+
from typing import Callable, List, Optional, Tuple, Union
|
18
|
+
from dcicutils.command_utils import yes_or_no
|
19
|
+
from dcicutils.common import ORCHESTRATED_APPS, APP_SMAHT
|
20
|
+
from dcicutils.ff_utils import delete_metadata, purge_metadata
|
21
|
+
from dcicutils.misc_utils import get_error_message, PRINT
|
22
|
+
from dcicutils.portal_utils import Portal as PortalFromUtils
|
23
|
+
|
24
|
+
|
25
|
+
class Portal(PortalFromUtils):
|
26
|
+
|
27
|
+
def delete_metadata(self, object_id: str) -> Optional[dict]:
|
28
|
+
if isinstance(object_id, str) and object_id and self.key:
|
29
|
+
return delete_metadata(obj_id=object_id, key=self.key)
|
30
|
+
return None
|
31
|
+
|
32
|
+
def purge_metadata(self, object_id: str) -> Optional[dict]:
|
33
|
+
if isinstance(object_id, str) and object_id and self.key:
|
34
|
+
return purge_metadata(obj_id=object_id, key=self.key)
|
35
|
+
return None
|
36
|
+
|
37
|
+
|
38
|
+
_DEFAULT_APP = "smaht"
|
39
|
+
_SMAHT_ENV_ENVIRON_NAME = "SMAHT_ENV"
|
40
|
+
|
41
|
+
# Schema properties to ignore (by default) for the view schema usage.
|
42
|
+
_SCHEMAS_IGNORE_PROPERTIES = [
|
43
|
+
"date_created",
|
44
|
+
"last_modified",
|
45
|
+
"principals_allowed",
|
46
|
+
"submitted_by",
|
47
|
+
"schema_version"
|
48
|
+
]
|
49
|
+
|
50
|
+
_SCHEMA_ORDER = [ # See: smaht-portal/src/encoded/project/loadxl.py
|
51
|
+
"access_key",
|
52
|
+
"user",
|
53
|
+
"consortium",
|
54
|
+
"submission_center",
|
55
|
+
"file_format",
|
56
|
+
"quality_metric",
|
57
|
+
"output_file",
|
58
|
+
"reference_file",
|
59
|
+
"reference_genome",
|
60
|
+
"software",
|
61
|
+
"tracking_item",
|
62
|
+
"workflow",
|
63
|
+
"workflow_run",
|
64
|
+
"meta_workflow",
|
65
|
+
"meta_workflow_run",
|
66
|
+
"image",
|
67
|
+
"document",
|
68
|
+
"static_section",
|
69
|
+
"page",
|
70
|
+
"filter_set",
|
71
|
+
"higlass_view_config",
|
72
|
+
"ingestion_submission",
|
73
|
+
"ontology_term",
|
74
|
+
"protocol",
|
75
|
+
"donor",
|
76
|
+
"demographic",
|
77
|
+
"medical_history",
|
78
|
+
"diagnosis",
|
79
|
+
"exposure",
|
80
|
+
"family_history",
|
81
|
+
"medical_treatment",
|
82
|
+
"death_circumstances",
|
83
|
+
"tissue_collection",
|
84
|
+
"tissue",
|
85
|
+
"histology",
|
86
|
+
"cell_line",
|
87
|
+
"cell_culture",
|
88
|
+
"cell_culture_mixture",
|
89
|
+
"preparation_kit",
|
90
|
+
"treatment",
|
91
|
+
"sample_preparation",
|
92
|
+
"tissue_sample",
|
93
|
+
"cell_culture_sample",
|
94
|
+
"cell_sample",
|
95
|
+
"analyte",
|
96
|
+
"analyte_preparation",
|
97
|
+
"assay",
|
98
|
+
"library",
|
99
|
+
"library_preparation",
|
100
|
+
"sequencer",
|
101
|
+
"basecalling",
|
102
|
+
"sequencing",
|
103
|
+
"file_set",
|
104
|
+
"unaligned_reads",
|
105
|
+
"aligned_reads",
|
106
|
+
"variant_calls",
|
107
|
+
]
|
108
|
+
|
109
|
+
|
110
|
+
def main():
|
111
|
+
|
112
|
+
parser = argparse.ArgumentParser(description="View Portal object.")
|
113
|
+
parser.add_argument("--env", "-e", type=str, required=False, default=None,
|
114
|
+
help=f"Environment name (key from ~/.smaht-keys.json).")
|
115
|
+
parser.add_argument("--app", type=str, required=False, default=None,
|
116
|
+
help=f"Application name (one of: smaht, cgap, fourfront).")
|
117
|
+
parser.add_argument("--schema", type=str, required=False, default=None,
|
118
|
+
help="Use named schema rather than infer from post/patch/upsert file name.")
|
119
|
+
parser.add_argument("--post", type=str, required=False, default=None, help="POST data.")
|
120
|
+
parser.add_argument("--patch", type=str, required=False, default=None, help="PATCH data.")
|
121
|
+
parser.add_argument("--upsert", type=str, required=False, default=None, help="Upsert data.")
|
122
|
+
parser.add_argument("--delete", type=str, required=False, default=None, help="Delete data.")
|
123
|
+
parser.add_argument("--purge", type=str, required=False, default=None, help="Purge data.")
|
124
|
+
parser.add_argument("--confirm", action="store_true", required=False, default=False, help="Confirm before action.")
|
125
|
+
parser.add_argument("--verbose", action="store_true", required=False, default=False, help="Verbose output.")
|
126
|
+
parser.add_argument("--quiet", action="store_true", required=False, default=False, help="Quiet output.")
|
127
|
+
parser.add_argument("--debug", action="store_true", required=False, default=False, help="Debugging output.")
|
128
|
+
args = parser.parse_args()
|
129
|
+
|
130
|
+
def usage(message: Optional[str] = None) -> None:
|
131
|
+
nonlocal parser
|
132
|
+
_print(message) if isinstance(message, str) else None
|
133
|
+
parser.print_help()
|
134
|
+
sys.exit(1)
|
135
|
+
|
136
|
+
if app := args.app:
|
137
|
+
if (app not in ORCHESTRATED_APPS) and ((app := app.lower()) not in ORCHESTRATED_APPS):
|
138
|
+
usage(f"ERROR: Unknown app name; must be one of: {' | '.join(ORCHESTRATED_APPS)}")
|
139
|
+
else:
|
140
|
+
app = APP_SMAHT
|
141
|
+
|
142
|
+
portal = _create_portal(env=args.env, app=app, verbose=args.verbose, debug=args.debug)
|
143
|
+
|
144
|
+
if explicit_schema_name := args.schema:
|
145
|
+
schema, explicit_schema_name = _get_schema(portal, explicit_schema_name)
|
146
|
+
if not schema:
|
147
|
+
usage(f"ERROR: Unknown schema name: {args.schema}")
|
148
|
+
|
149
|
+
if not (args.post or args.patch or args.upsert or args.delete or args.purge):
|
150
|
+
usage()
|
151
|
+
|
152
|
+
if args.post:
|
153
|
+
_post_or_patch_or_upsert(portal=portal,
|
154
|
+
file_or_directory=args.post,
|
155
|
+
explicit_schema_name=explicit_schema_name,
|
156
|
+
update_function=post_data,
|
157
|
+
update_action_name="POST",
|
158
|
+
confirm=args.confirm, verbose=args.verbose, quiet=args.quiet, debug=args.debug)
|
159
|
+
if args.patch:
|
160
|
+
_post_or_patch_or_upsert(portal=portal,
|
161
|
+
file_or_directory=args.patch,
|
162
|
+
explicit_schema_name=explicit_schema_name,
|
163
|
+
update_function=patch_data,
|
164
|
+
update_action_name="PATCH",
|
165
|
+
confirm=args.confirm, verbose=args.verbose, quiet=args.quiet, debug=args.debug)
|
166
|
+
if args.upsert:
|
167
|
+
_post_or_patch_or_upsert(portal=portal,
|
168
|
+
file_or_directory=args.upsert,
|
169
|
+
explicit_schema_name=explicit_schema_name,
|
170
|
+
update_function=upsert_data,
|
171
|
+
update_action_name="UPSERT",
|
172
|
+
confirm=args.confirm, verbose=args.verbose, quiet=args.quiet, debug=args.debug)
|
173
|
+
|
174
|
+
if args.delete:
|
175
|
+
if not portal.get_metadata(args.delete, raise_exception=False):
|
176
|
+
_print(f"Cannot find given object: {args.delete}")
|
177
|
+
sys.exit(1)
|
178
|
+
if yes_or_no(f"Do you really want to delete this item: {args.delete} ?"):
|
179
|
+
portal.delete_metadata(args.delete)
|
180
|
+
|
181
|
+
if args.purge:
|
182
|
+
if not portal.get_metadata(args.purge, raise_exception=False):
|
183
|
+
_print(f"Cannot find given object: {args.purge}")
|
184
|
+
sys.exit(1)
|
185
|
+
if yes_or_no(f"Do you really want to purge this item: {args.purge} ?"):
|
186
|
+
portal.delete_metadata(args.purge)
|
187
|
+
portal.purge_metadata(args.purge)
|
188
|
+
|
189
|
+
|
190
|
+
def _post_or_patch_or_upsert(portal: Portal, file_or_directory: str,
|
191
|
+
explicit_schema_name: str,
|
192
|
+
update_function: Callable, update_action_name: str,
|
193
|
+
confirm: bool = False, verbose: bool = False,
|
194
|
+
quiet: bool = False, debug: bool = False) -> None:
|
195
|
+
|
196
|
+
def is_schema_name_list(portal: Portal, keys: list) -> bool:
|
197
|
+
if isinstance(keys, list):
|
198
|
+
for key in keys:
|
199
|
+
if portal.get_schema(key) is None:
|
200
|
+
return False
|
201
|
+
return True
|
202
|
+
return False
|
203
|
+
|
204
|
+
def post_or_patch_or_upsert(portal: Portal, file: str, schema_name: Optional[str],
|
205
|
+
confirm: bool = False, verbose: bool = False,
|
206
|
+
quiet: bool = False, debug: bool = False) -> None:
|
207
|
+
|
208
|
+
nonlocal update_function, update_action_name
|
209
|
+
if not quiet:
|
210
|
+
_print(f"Processing {update_action_name} file: {file}")
|
211
|
+
if data := _read_json_from_file(file):
|
212
|
+
if isinstance(data, dict):
|
213
|
+
if isinstance(schema_name, str) and schema_name:
|
214
|
+
if debug:
|
215
|
+
_print(f"DEBUG: File ({file}) contains an object of type: {schema_name}")
|
216
|
+
update_function(portal, data, schema_name, confirm=confirm,
|
217
|
+
file=file, verbose=verbose, debug=debug)
|
218
|
+
elif is_schema_name_list(portal, list(data.keys())):
|
219
|
+
if debug:
|
220
|
+
_print(f"DEBUG: File ({file}) contains a dictionary of schema names.")
|
221
|
+
for schema_name in data:
|
222
|
+
if isinstance(schema_data := data[schema_name], list):
|
223
|
+
if debug:
|
224
|
+
_print(f"DEBUG: Processing {update_action_name}s for type: {schema_name}")
|
225
|
+
for index, item in enumerate(schema_data):
|
226
|
+
update_function(portal, item, schema_name, confirm=confirm,
|
227
|
+
file=file, index=index, verbose=verbose, debug=debug)
|
228
|
+
else:
|
229
|
+
_print(f"WARNING: File ({file}) contains schema item which is not a list: {schema_name}")
|
230
|
+
else:
|
231
|
+
_print(f"WARNING: File ({file}) contains unknown item type.")
|
232
|
+
elif isinstance(data, list):
|
233
|
+
if debug:
|
234
|
+
_print(f"DEBUG: File ({file}) contains a list of objects of type: {schema_name}")
|
235
|
+
for index, item in enumerate(data):
|
236
|
+
update_function(portal, item, schema_name, confirm=confirm,
|
237
|
+
file=file, index=index, verbose=verbose, debug=debug)
|
238
|
+
if debug:
|
239
|
+
_print(f"DEBUG: Processing {update_action_name} file done: {file}")
|
240
|
+
|
241
|
+
if os.path.isdir(file_or_directory):
|
242
|
+
if ((files := glob.glob(os.path.join(file_or_directory, "*.json"))) and
|
243
|
+
(files_and_schemas := _file_names_to_ordered_file_and_schema_names(portal, files))): # noqa
|
244
|
+
for file_and_schema in files_and_schemas:
|
245
|
+
if not (file := file_and_schema[0]):
|
246
|
+
continue
|
247
|
+
if not (schema_name := file_and_schema[1]) and not (schema_name := explicit_schema_name):
|
248
|
+
_print(f"ERROR: Schema cannot be inferred from file name and --schema not specified: {file}")
|
249
|
+
continue
|
250
|
+
post_or_patch_or_upsert(portal, file_and_schema[0], schema_name=schema_name,
|
251
|
+
confirm=confirm, quiet=quiet, verbose=verbose, debug=debug)
|
252
|
+
elif os.path.isfile(file := file_or_directory):
|
253
|
+
if ((schema_name := _get_schema_name_from_schema_named_json_file_name(portal, file)) or
|
254
|
+
(schema_name := explicit_schema_name)): # noqa
|
255
|
+
post_or_patch_or_upsert(portal, file, schema_name=schema_name,
|
256
|
+
confirm=confirm, quiet=quiet, verbose=verbose, debug=debug)
|
257
|
+
else:
|
258
|
+
post_or_patch_or_upsert(portal, file, schema_name=schema_name,
|
259
|
+
confirm=confirm, quiet=quiet, verbose=verbose, debug=debug)
|
260
|
+
# _print(f"ERROR: Schema cannot be inferred from file name and --schema not specified: {file}")
|
261
|
+
# return
|
262
|
+
else:
|
263
|
+
_print(f"ERROR: Cannot find file or directory: {file_or_directory}")
|
264
|
+
|
265
|
+
|
266
|
+
def post_data(portal: Portal, data: dict, schema_name: str, confirm: bool = False,
|
267
|
+
file: Optional[str] = None, index: int = 0,
|
268
|
+
verbose: bool = False, debug: bool = False) -> None:
|
269
|
+
if not (identifying_path := portal.get_identifying_path(data, portal_type=schema_name)):
|
270
|
+
if isinstance(file, str) and isinstance(index, int):
|
271
|
+
_print(f"ERROR: Item for POST has no identifying property: {file} (#{index + 1})")
|
272
|
+
else:
|
273
|
+
_print(f"ERROR: Item for POST has no identifying property.")
|
274
|
+
return
|
275
|
+
if portal.get_metadata(identifying_path, raise_exception=False):
|
276
|
+
_print(f"ERROR: Item for POST already exists: {identifying_path}")
|
277
|
+
return
|
278
|
+
if (confirm is True) and not yes_or_no(f"POST data for: {identifying_path} ?"):
|
279
|
+
return
|
280
|
+
if verbose:
|
281
|
+
_print(f"POST {schema_name} item: {identifying_path}")
|
282
|
+
try:
|
283
|
+
portal.post_metadata(schema_name, data)
|
284
|
+
if debug:
|
285
|
+
_print(f"DEBUG: POST {schema_name} item done: {identifying_path}")
|
286
|
+
except Exception as e:
|
287
|
+
_print(f"ERROR: Cannot POST {schema_name} item: {identifying_path}")
|
288
|
+
_print(get_error_message(e))
|
289
|
+
return
|
290
|
+
|
291
|
+
|
292
|
+
def patch_data(portal: Portal, data: dict, schema_name: str, confirm: bool = False,
|
293
|
+
file: Optional[str] = None, index: int = 0,
|
294
|
+
verbose: bool = False, debug: bool = False) -> None:
|
295
|
+
if not (identifying_path := portal.get_identifying_path(data, portal_type=schema_name)):
|
296
|
+
if isinstance(file, str) and isinstance(index, int):
|
297
|
+
_print(f"ERROR: Item for PATCH has no identifying property: {file} (#{index + 1})")
|
298
|
+
else:
|
299
|
+
_print(f"ERROR: Item for PATCH has no identifying property.")
|
300
|
+
return
|
301
|
+
if not portal.get_metadata(identifying_path, raise_exception=False):
|
302
|
+
_print(f"ERROR: Item for PATCH does not already exist: {identifying_path}")
|
303
|
+
return
|
304
|
+
if (confirm is True) and not yes_or_no(f"PATCH data for: {identifying_path}"):
|
305
|
+
return
|
306
|
+
if verbose:
|
307
|
+
_print(f"PATCH {schema_name} item: {identifying_path}")
|
308
|
+
try:
|
309
|
+
portal.patch_metadata(identifying_path, data)
|
310
|
+
if debug:
|
311
|
+
_print(f"DEBUG: PATCH {schema_name} item OK: {identifying_path}")
|
312
|
+
except Exception as e:
|
313
|
+
_print(f"ERROR: Cannot PATCH {schema_name} item: {identifying_path}")
|
314
|
+
_print(e)
|
315
|
+
return
|
316
|
+
|
317
|
+
|
318
|
+
def upsert_data(portal: Portal, data: dict, schema_name: str, confirm: bool = False,
|
319
|
+
file: Optional[str] = None, index: int = 0,
|
320
|
+
verbose: bool = False, debug: bool = False) -> None:
|
321
|
+
if not (identifying_path := portal.get_identifying_path(data, portal_type=schema_name)):
|
322
|
+
if isinstance(file, str) and isinstance(index, int):
|
323
|
+
_print(f"ERROR: Item for UPSERT has no identifying property: {file} (#{index + 1})")
|
324
|
+
else:
|
325
|
+
_print(f"ERROR: Item for UPSERT has no identifying property.")
|
326
|
+
return
|
327
|
+
exists = portal.get_metadata(identifying_path, raise_exception=False)
|
328
|
+
if ((confirm is True) and not yes_or_no(f"{'PATCH' if exists else 'POST'} data for: {identifying_path} ?")):
|
329
|
+
return
|
330
|
+
if verbose:
|
331
|
+
_print(f"{'PATCH' if exists else 'POST'} {schema_name} item: {identifying_path}")
|
332
|
+
try:
|
333
|
+
portal.post_metadata(schema_name, data) if not exists else portal.patch_metadata(identifying_path, data)
|
334
|
+
if debug:
|
335
|
+
_print(f"DEBUG: UPSERT {schema_name} item OK: {identifying_path}")
|
336
|
+
except Exception as e:
|
337
|
+
_print(f"ERROR: Cannot UPSERT {schema_name} item: {identifying_path}")
|
338
|
+
_print(e)
|
339
|
+
return
|
340
|
+
|
341
|
+
|
342
|
+
def _create_portal(env: Optional[str] = None, app: Optional[str] = None,
|
343
|
+
verbose: bool = False, debug: bool = False) -> Optional[Portal]:
|
344
|
+
|
345
|
+
env_from_environ = None
|
346
|
+
if not env and (app == APP_SMAHT):
|
347
|
+
if env := os.environ.get(_SMAHT_ENV_ENVIRON_NAME):
|
348
|
+
env_from_environ = True
|
349
|
+
if not (portal := Portal(env, app=app) if env or app else None):
|
350
|
+
return None
|
351
|
+
if verbose:
|
352
|
+
if (env := portal.env) or (env := os.environ(_SMAHT_ENV_ENVIRON_NAME)):
|
353
|
+
_print(f"Portal environment"
|
354
|
+
f"{f' (from {_SMAHT_ENV_ENVIRON_NAME})' if env_from_environ else ''}: {portal.env}")
|
355
|
+
if portal.keys_file:
|
356
|
+
_print(f"Portal keys file: {portal.keys_file}")
|
357
|
+
if portal.key_id:
|
358
|
+
_print(f"Portal key prefix: {portal.key_id[0:2]}******")
|
359
|
+
if portal.server:
|
360
|
+
_print(f"Portal server: {portal.server}")
|
361
|
+
return portal
|
362
|
+
|
363
|
+
|
364
|
+
def _read_json_from_file(file: str) -> Optional[dict]:
|
365
|
+
try:
|
366
|
+
if not os.path.exists(file):
|
367
|
+
return None
|
368
|
+
with io.open(file, "r") as f:
|
369
|
+
try:
|
370
|
+
return json.load(f)
|
371
|
+
except Exception:
|
372
|
+
_print(f"ERROR: Cannot load JSON from file: {file}")
|
373
|
+
return None
|
374
|
+
except Exception:
|
375
|
+
_print(f"ERROR: Cannot open file: {file}")
|
376
|
+
return None
|
377
|
+
|
378
|
+
|
379
|
+
def _file_names_to_ordered_file_and_schema_names(portal: Portal,
|
380
|
+
files: Union[List[str], str]) -> List[Tuple[str, Optional[str]]]:
|
381
|
+
results = []
|
382
|
+
if isinstance(files, str):
|
383
|
+
files = [files]
|
384
|
+
if not isinstance(files, list):
|
385
|
+
return results
|
386
|
+
for file in files:
|
387
|
+
if isinstance(file, str) and file:
|
388
|
+
results.append((file, _get_schema_name_from_schema_named_json_file_name(portal, file)))
|
389
|
+
ordered_results = []
|
390
|
+
for schema_name in _SCHEMA_ORDER:
|
391
|
+
schema_name = portal.schema_name(schema_name)
|
392
|
+
if result := next((item for item in results if item[1] == schema_name), None):
|
393
|
+
ordered_results.append(result)
|
394
|
+
results.remove(result)
|
395
|
+
ordered_results.extend(results) if results else None
|
396
|
+
return ordered_results
|
397
|
+
|
398
|
+
|
399
|
+
def _get_schema_name_from_schema_named_json_file_name(portal: Portal, value: str) -> Optional[str]:
|
400
|
+
try:
|
401
|
+
if not value.endswith(".json"):
|
402
|
+
return None
|
403
|
+
_, schema_name = _get_schema(portal, os.path.basename(value[:-5]))
|
404
|
+
return schema_name
|
405
|
+
except Exception:
|
406
|
+
return False
|
407
|
+
|
408
|
+
|
409
|
+
@lru_cache(maxsize=1)
|
410
|
+
def _get_schemas(portal: Portal) -> Optional[dict]:
|
411
|
+
return portal.get_schemas()
|
412
|
+
|
413
|
+
|
414
|
+
@lru_cache(maxsize=100)
|
415
|
+
def _get_schema(portal: Portal, name: str) -> Tuple[Optional[dict], Optional[str]]:
|
416
|
+
if portal and name and (name := name.replace("_", "").replace("-", "").strip().lower()):
|
417
|
+
if schemas := _get_schemas(portal):
|
418
|
+
for schema_name in schemas:
|
419
|
+
if schema_name.replace("_", "").replace("-", "").strip().lower() == name.lower():
|
420
|
+
return schemas[schema_name], schema_name
|
421
|
+
return None, None
|
422
|
+
|
423
|
+
|
424
|
+
def _print(*args, **kwargs) -> None:
|
425
|
+
PRINT(*args, **kwargs)
|
426
|
+
sys.stdout.flush()
|
427
|
+
|
428
|
+
|
429
|
+
if __name__ == "__main__":
|
430
|
+
main()
|
@@ -62,9 +62,10 @@ import json
|
|
62
62
|
import pyperclip
|
63
63
|
import os
|
64
64
|
import sys
|
65
|
-
from typing import Callable, List, Optional, Tuple
|
65
|
+
from typing import Callable, List, Optional, TextIO, Tuple, Union
|
66
66
|
import yaml
|
67
67
|
from dcicutils.captured_output import captured_output, uncaptured_output
|
68
|
+
from dcicutils.command_utils import yes_or_no
|
68
69
|
from dcicutils.misc_utils import get_error_message, is_uuid, PRINT
|
69
70
|
from dcicutils.portal_utils import Portal
|
70
71
|
|
@@ -78,11 +79,15 @@ _SCHEMAS_IGNORE_PROPERTIES = [
|
|
78
79
|
"schema_version"
|
79
80
|
]
|
80
81
|
|
82
|
+
_output_file: TextIO = None
|
83
|
+
|
81
84
|
|
82
85
|
def main():
|
83
86
|
|
87
|
+
global _output_file
|
88
|
+
|
84
89
|
parser = argparse.ArgumentParser(description="View Portal object.")
|
85
|
-
parser.add_argument("uuid", type=str,
|
90
|
+
parser.add_argument("uuid", nargs="?", type=str,
|
86
91
|
help=f"The uuid (or path) of the object to fetch and view. ")
|
87
92
|
parser.add_argument("--ini", type=str, required=False, default=None,
|
88
93
|
help=f"Name of the application .ini file.")
|
@@ -97,11 +102,9 @@ def main():
|
|
97
102
|
parser.add_argument("--all", action="store_true", required=False, default=False,
|
98
103
|
help="Include all properties for schema usage.")
|
99
104
|
parser.add_argument("--raw", action="store_true", required=False, default=False, help="Raw output.")
|
105
|
+
parser.add_argument("--inserts", action="store_true", required=False, default=False,
|
106
|
+
help="Format output for subsequent inserts.")
|
100
107
|
parser.add_argument("--tree", action="store_true", required=False, default=False, help="Tree output for schemas.")
|
101
|
-
parser.add_argument("--post", type=str, required=False, default=None,
|
102
|
-
help="POST data of the main arg type with data from file specified with this option.")
|
103
|
-
parser.add_argument("--patch", type=str, required=False, default=None,
|
104
|
-
help="PATCH data of the main arg type with data from file specified with this option.")
|
105
108
|
parser.add_argument("--database", action="store_true", required=False, default=False,
|
106
109
|
help="Read from database output.")
|
107
110
|
parser.add_argument("--bool", action="store_true", required=False,
|
@@ -109,6 +112,7 @@ def main():
|
|
109
112
|
parser.add_argument("--yaml", action="store_true", required=False, default=False, help="YAML output.")
|
110
113
|
parser.add_argument("--copy", "-c", action="store_true", required=False, default=False,
|
111
114
|
help="Copy object data to clipboard.")
|
115
|
+
parser.add_argument("--output", required=False, help="Output file.", type=str)
|
112
116
|
parser.add_argument("--indent", required=False, default=False, help="Indent output.", type=int)
|
113
117
|
parser.add_argument("--details", action="store_true", required=False, default=False, help="Detailed output.")
|
114
118
|
parser.add_argument("--more-details", action="store_true", required=False, default=False,
|
@@ -123,54 +127,57 @@ def main():
|
|
123
127
|
portal = _create_portal(ini=args.ini, env=args.env or os.environ.get("SMAHT_ENV"),
|
124
128
|
server=args.server, app=args.app, verbose=args.verbose, debug=args.debug)
|
125
129
|
|
126
|
-
if
|
130
|
+
if not args.uuid:
|
131
|
+
_print("UUID or schema or path required.")
|
132
|
+
_exit(1)
|
133
|
+
|
134
|
+
if args.output:
|
135
|
+
if os.path.exists(args.output):
|
136
|
+
if os.path.isdir(args.output):
|
137
|
+
_print(f"Specified output file already exists as a directory: {args.output}")
|
138
|
+
_exit(1)
|
139
|
+
elif os.path.isfile(args.output):
|
140
|
+
_print(f"Specified output file already exists: {args.output}")
|
141
|
+
if not yes_or_no(f"Do you want to overwrite this file?"):
|
142
|
+
_exit(0)
|
143
|
+
_output_file = io.open(args.output, "w")
|
144
|
+
|
145
|
+
if args.uuid and ((args.uuid.lower() == "schemas") or (args.uuid.lower() == "schema")):
|
127
146
|
_print_all_schema_names(portal=portal, details=args.details,
|
128
147
|
more_details=args.more_details, all=args.all,
|
129
148
|
tree=args.tree, raw=args.raw, raw_yaml=args.yaml)
|
130
149
|
return
|
131
|
-
elif args.uuid.lower() == "info":
|
150
|
+
elif args.uuid and (args.uuid.lower() == "info"):
|
132
151
|
if consortia := portal.get_metadata("/consortia?limit=1000"):
|
133
|
-
|
152
|
+
_print_output("Known Consortia:")
|
134
153
|
consortia = sorted(consortia.get("@graph", []), key=lambda key: key.get("identifier"))
|
135
154
|
for consortium in consortia:
|
136
155
|
if ((consortium_name := consortium.get("identifier")) and
|
137
156
|
(consortium_uuid := consortium.get("uuid"))): # noqa
|
138
|
-
|
157
|
+
_print_output(f"- {consortium_name}: {consortium_uuid}")
|
139
158
|
if submission_centers := portal.get_metadata("/submission-centers?limit=1000"):
|
140
|
-
|
159
|
+
_print_output("Known Submission Centers:")
|
141
160
|
submission_centers = sorted(submission_centers.get("@graph", []), key=lambda key: key.get("identifier"))
|
142
161
|
for submission_center in submission_centers:
|
143
162
|
if ((submission_center_name := submission_center.get("identifier")) and
|
144
163
|
(submission_center_uuid := submission_center.get("uuid"))): # noqa
|
145
|
-
|
164
|
+
_print_output(f"- {submission_center_name}: {submission_center_uuid}")
|
146
165
|
try:
|
147
166
|
if file_formats := portal.get_metadata("/file-formats?limit=1000"):
|
148
|
-
|
167
|
+
_print_output("Known File Formats:")
|
149
168
|
file_formats = sorted(file_formats.get("@graph", []), key=lambda key: key.get("identifier"))
|
150
169
|
for file_format in file_formats:
|
151
170
|
if ((file_format_name := file_format.get("identifier")) and
|
152
171
|
(file_format_uuid := file_format.get("uuid"))): # noqa
|
153
|
-
|
172
|
+
_print_output(f"- {file_format_name}: {file_format_uuid}")
|
154
173
|
except Exception:
|
155
|
-
|
174
|
+
_print_output("Known File Formats: None")
|
156
175
|
return
|
157
176
|
|
158
177
|
if _is_maybe_schema_name(args.uuid):
|
159
178
|
args.schema = True
|
160
179
|
|
161
180
|
if args.schema:
|
162
|
-
if args.post:
|
163
|
-
if post_data := _read_json_from_file(args.post):
|
164
|
-
if args.verbose:
|
165
|
-
_print(f"POSTing data from file ({args.post}) as type: {args.uuid}")
|
166
|
-
if isinstance(post_data, dict):
|
167
|
-
post_data = [post_data]
|
168
|
-
elif not isinstance(post_data, list):
|
169
|
-
_print(f"POST data neither list nor dictionary: {args.post}")
|
170
|
-
for item in post_data:
|
171
|
-
portal.post_metadata(args.uuid, item)
|
172
|
-
if args.verbose:
|
173
|
-
_print(f"Done POSTing data from file ({args.post}) as type: {args.uuid}")
|
174
181
|
schema, schema_name = _get_schema(portal, args.uuid)
|
175
182
|
if schema:
|
176
183
|
if args.copy:
|
@@ -178,49 +185,33 @@ def main():
|
|
178
185
|
if not args.raw:
|
179
186
|
if parent_schema_name := _get_parent_schema_name(schema):
|
180
187
|
if schema.get("isAbstract") is True:
|
181
|
-
|
188
|
+
_print_output(f"{schema_name} | parent: {parent_schema_name} | abstract")
|
182
189
|
else:
|
183
|
-
|
190
|
+
_print_output(f"{schema_name} | parent: {parent_schema_name}")
|
184
191
|
else:
|
185
|
-
|
192
|
+
_print_output(schema_name)
|
186
193
|
_print_schema(schema, details=args.details, more_details=args.details,
|
187
194
|
all=args.all, raw=args.raw, raw_yaml=args.yaml)
|
188
195
|
return
|
189
|
-
elif args.patch:
|
190
|
-
if patch_data := _read_json_from_file(args.patch):
|
191
|
-
if args.verbose:
|
192
|
-
_print(f"PATCHing data from file ({args.patch}) for object: {args.uuid}")
|
193
|
-
if isinstance(patch_data, dict):
|
194
|
-
patch_data = [patch_data]
|
195
|
-
elif not isinstance(patch_data, list):
|
196
|
-
_print(f"PATCH data neither list nor dictionary: {args.patch}")
|
197
|
-
for item in patch_data:
|
198
|
-
portal.patch_metadata(args.uuid, item)
|
199
|
-
if args.verbose:
|
200
|
-
_print(f"Done PATCHing data from file ({args.patch}) as type: {args.uuid}")
|
201
|
-
return
|
202
|
-
else:
|
203
|
-
_print(f"No PATCH data found in file: {args.patch}")
|
204
|
-
sys.exit(1)
|
205
196
|
|
206
|
-
data = _get_portal_object(portal=portal, uuid=args.uuid, raw=args.raw,
|
197
|
+
data = _get_portal_object(portal=portal, uuid=args.uuid, raw=args.raw, inserts=args.inserts,
|
207
198
|
database=args.database, check=args.bool, verbose=args.verbose)
|
208
199
|
if args.bool:
|
209
200
|
if data:
|
210
201
|
_print(f"{args.uuid}: found")
|
211
|
-
|
202
|
+
_exit(0)
|
212
203
|
else:
|
213
204
|
_print(f"{args.uuid}: not found")
|
214
|
-
|
205
|
+
_exit(1)
|
215
206
|
if args.copy:
|
216
207
|
pyperclip.copy(json.dumps(data, indent=4))
|
217
208
|
if args.yaml:
|
218
|
-
|
209
|
+
_print_output(yaml.dump(data))
|
219
210
|
else:
|
220
211
|
if args.indent > 0:
|
221
|
-
|
212
|
+
_print_output(_format_json_with_indent(data, indent=args.indent))
|
222
213
|
else:
|
223
|
-
|
214
|
+
_print_output(json.dumps(data, default=str, indent=4))
|
224
215
|
|
225
216
|
|
226
217
|
def _format_json_with_indent(value: dict, indent: int = 0) -> Optional[str]:
|
@@ -254,7 +245,7 @@ def _create_portal(ini: str, env: Optional[str] = None,
|
|
254
245
|
|
255
246
|
|
256
247
|
def _get_portal_object(portal: Portal, uuid: str,
|
257
|
-
raw: bool = False, database: bool = False,
|
248
|
+
raw: bool = False, inserts: bool = False, database: bool = False,
|
258
249
|
check: bool = False, verbose: bool = False) -> dict:
|
259
250
|
response = None
|
260
251
|
try:
|
@@ -262,7 +253,7 @@ def _get_portal_object(portal: Portal, uuid: str,
|
|
262
253
|
path = f"/{uuid}"
|
263
254
|
else:
|
264
255
|
path = uuid
|
265
|
-
response = portal.get(path, raw=raw, database=database)
|
256
|
+
response = portal.get(path, raw=raw or inserts, database=database)
|
266
257
|
except Exception as e:
|
267
258
|
if "404" in str(e) and "not found" in str(e).lower():
|
268
259
|
_print(f"Portal object not found at {portal.server}: {uuid}")
|
@@ -278,7 +269,21 @@ def _get_portal_object(portal: Portal, uuid: str,
|
|
278
269
|
if not response.json:
|
279
270
|
_exit(f"Invalid JSON getting Portal object: {uuid}")
|
280
271
|
response = response.json()
|
281
|
-
if
|
272
|
+
if inserts:
|
273
|
+
# Format results as suitable for inserts (e.g. via update-portal-object).
|
274
|
+
response.pop("schema_version", None)
|
275
|
+
if ((isinstance(results := response.get("@graph"), list) and results) and
|
276
|
+
(isinstance(results_type := response.get("@type"), list) and results_type) and
|
277
|
+
(isinstance(results_type := results_type[0], str) and results_type.endswith("SearchResults")) and
|
278
|
+
(results_type := results_type[0:-len("SearchResults")])): # noqa
|
279
|
+
for result in results:
|
280
|
+
result.pop("schema_version", None)
|
281
|
+
response = {f"{results_type}": results}
|
282
|
+
# Get the result as non-raw so we can get its type.
|
283
|
+
elif ((response_cooked := portal.get(path, database=database)) and
|
284
|
+
(isinstance(response_type := response_cooked.json().get("@type"), list) and response_type)):
|
285
|
+
response = {f"{response_type[0]}": [response]}
|
286
|
+
elif raw:
|
282
287
|
response.pop("schema_version", None)
|
283
288
|
return response
|
284
289
|
|
@@ -292,7 +297,7 @@ def _get_schema(portal: Portal, name: str) -> Tuple[Optional[dict], Optional[str
|
|
292
297
|
if portal and name and (name := name.replace("_", "").replace("-", "").strip().lower()):
|
293
298
|
if schemas := _get_schemas(portal):
|
294
299
|
for schema_name in schemas:
|
295
|
-
if schema_name.replace("_", "").replace("-", "").strip().lower() == name:
|
300
|
+
if schema_name.replace("_", "").replace("-", "").strip().lower() == name.lower():
|
296
301
|
return schemas[schema_name], schema_name
|
297
302
|
return None, None
|
298
303
|
|
@@ -303,13 +308,37 @@ def _is_maybe_schema_name(value: str) -> bool:
|
|
303
308
|
return False
|
304
309
|
|
305
310
|
|
311
|
+
def _is_schema_name(portal: Portal, value: str) -> bool:
|
312
|
+
try:
|
313
|
+
return _get_schema(portal, value)[0] is not None
|
314
|
+
except Exception:
|
315
|
+
return False
|
316
|
+
|
317
|
+
|
318
|
+
def _is_schema_named_json_file_name(portal: Portal, value: str) -> bool:
|
319
|
+
try:
|
320
|
+
return value.endswith(".json") and _is_schema_name(portal, os.path.basename(value[:-5]))
|
321
|
+
except Exception:
|
322
|
+
return False
|
323
|
+
|
324
|
+
|
325
|
+
def _get_schema_name_from_schema_named_json_file_name(portal: Portal, value: str) -> Optional[str]:
|
326
|
+
try:
|
327
|
+
if not value.endswith(".json"):
|
328
|
+
return None
|
329
|
+
_, schema_name = _get_schema(portal, os.path.basename(value[:-5]))
|
330
|
+
return schema_name
|
331
|
+
except Exception:
|
332
|
+
return False
|
333
|
+
|
334
|
+
|
306
335
|
def _print_schema(schema: dict, details: bool = False, more_details: bool = False, all: bool = False,
|
307
336
|
raw: bool = False, raw_yaml: bool = False) -> None:
|
308
337
|
if raw:
|
309
338
|
if raw_yaml:
|
310
|
-
|
339
|
+
_print_output(yaml.dump(schema))
|
311
340
|
else:
|
312
|
-
|
341
|
+
_print_output(json.dumps(schema, indent=4))
|
313
342
|
return
|
314
343
|
_print_schema_info(schema, details=details, more_details=more_details, all=all)
|
315
344
|
|
@@ -322,37 +351,37 @@ def _print_schema_info(schema: dict, level: int = 0,
|
|
322
351
|
identifying_properties = schema.get("identifyingProperties")
|
323
352
|
if level == 0:
|
324
353
|
if required_properties := schema.get("required"):
|
325
|
-
|
354
|
+
_print_output("- required properties:")
|
326
355
|
for required_property in sorted(list(set(required_properties))):
|
327
356
|
if not all and required_property in _SCHEMAS_IGNORE_PROPERTIES:
|
328
357
|
continue
|
329
358
|
if property_type := (info := schema.get("properties", {}).get(required_property, {})).get("type"):
|
330
359
|
if property_type == "array" and (array_type := info.get("items", {}).get("type")):
|
331
|
-
|
360
|
+
_print_output(f" - {required_property}: {property_type} of {array_type}")
|
332
361
|
else:
|
333
|
-
|
362
|
+
_print_output(f" - {required_property}: {property_type}")
|
334
363
|
else:
|
335
|
-
|
364
|
+
_print_output(f" - {required_property}")
|
336
365
|
if isinstance(any_of := schema.get("anyOf"), list):
|
337
366
|
if ((any_of == [{"required": ["submission_centers"]}, {"required": ["consortia"]}]) or
|
338
367
|
(any_of == [{"required": ["consortia"]}, {"required": ["submission_centers"]}])): # noqa
|
339
368
|
# Very very special case.
|
340
|
-
|
341
|
-
|
342
|
-
|
369
|
+
_print_output(f" - at least one of:")
|
370
|
+
_print_output(f" - consortia: array of string")
|
371
|
+
_print_output(f" - submission_centers: array of string")
|
343
372
|
required = required_properties
|
344
373
|
if identifying_properties := schema.get("identifyingProperties"):
|
345
|
-
|
374
|
+
_print_output("- identifying properties:")
|
346
375
|
for identifying_property in sorted(list(set(identifying_properties))):
|
347
376
|
if not all and identifying_property in _SCHEMAS_IGNORE_PROPERTIES:
|
348
377
|
continue
|
349
378
|
if property_type := (info := schema.get("properties", {}).get(identifying_property, {})).get("type"):
|
350
379
|
if property_type == "array" and (array_type := info.get("items", {}).get("type")):
|
351
|
-
|
380
|
+
_print_output(f" - {identifying_property}: {property_type} of {array_type}")
|
352
381
|
else:
|
353
|
-
|
382
|
+
_print_output(f" - {identifying_property}: {property_type}")
|
354
383
|
else:
|
355
|
-
|
384
|
+
_print_output(f" - {identifying_property}")
|
356
385
|
if properties := schema.get("properties"):
|
357
386
|
reference_properties = []
|
358
387
|
for property_name in properties:
|
@@ -362,16 +391,16 @@ def _print_schema_info(schema: dict, level: int = 0,
|
|
362
391
|
if link_to := property.get("linkTo"):
|
363
392
|
reference_properties.append({"name": property_name, "ref": link_to})
|
364
393
|
if reference_properties:
|
365
|
-
|
394
|
+
_print_output("- reference properties:")
|
366
395
|
for reference_property in sorted(reference_properties, key=lambda key: key["name"]):
|
367
|
-
|
396
|
+
_print_output(f" - {reference_property['name']}: {reference_property['ref']}")
|
368
397
|
if schema.get("additionalProperties") is True:
|
369
|
-
|
398
|
+
_print_output(f" - additional properties are allowed")
|
370
399
|
if not more_details:
|
371
400
|
return
|
372
401
|
if properties := (schema.get("properties") if level == 0 else schema):
|
373
402
|
if level == 0:
|
374
|
-
|
403
|
+
_print_output("- properties:")
|
375
404
|
for property_name in sorted(properties):
|
376
405
|
if not all and property_name in _SCHEMAS_IGNORE_PROPERTIES:
|
377
406
|
continue
|
@@ -392,7 +421,7 @@ def _print_schema_info(schema: dict, level: int = 0,
|
|
392
421
|
property_type = "open ended object"
|
393
422
|
if property.get("calculatedProperty"):
|
394
423
|
suffix += f" | calculated"
|
395
|
-
|
424
|
+
_print_output(f"{spaces}- {property_name}: {property_type}{suffix}")
|
396
425
|
_print_schema_info(object_properties, level=level + 1,
|
397
426
|
details=details, more_details=more_details, all=all,
|
398
427
|
required=property.get("required"))
|
@@ -416,28 +445,28 @@ def _print_schema_info(schema: dict, level: int = 0,
|
|
416
445
|
if property_type := property_items.get("type"):
|
417
446
|
if property_type == "object":
|
418
447
|
suffix = ""
|
419
|
-
|
448
|
+
_print_output(f"{spaces}- {property_name}: array of object{suffix}")
|
420
449
|
_print_schema_info(property_items.get("properties"), level=level + 1,
|
421
450
|
details=details, more_details=more_details, all=all,
|
422
451
|
required=property_items.get("required"))
|
423
452
|
elif property_type == "array":
|
424
453
|
# This (array-of-array) never happens to occur at this time (February 2024).
|
425
|
-
|
454
|
+
_print_output(f"{spaces}- {property_name}: array of array{suffix}")
|
426
455
|
else:
|
427
|
-
|
456
|
+
_print_output(f"{spaces}- {property_name}: array of {property_type}{suffix}")
|
428
457
|
else:
|
429
|
-
|
458
|
+
_print_output(f"{spaces}- {property_name}: array{suffix}")
|
430
459
|
else:
|
431
|
-
|
460
|
+
_print_output(f"{spaces}- {property_name}: array{suffix}")
|
432
461
|
if enumeration:
|
433
462
|
nenums = 0
|
434
463
|
maxenums = 15
|
435
464
|
for enum in sorted(enumeration):
|
436
465
|
if (nenums := nenums + 1) >= maxenums:
|
437
466
|
if (remaining := len(enumeration) - nenums) > 0:
|
438
|
-
|
467
|
+
_print_output(f"{spaces} - [{remaining} more ...]")
|
439
468
|
break
|
440
|
-
|
469
|
+
_print_output(f"{spaces} - {enum}")
|
441
470
|
else:
|
442
471
|
if isinstance(property_type, list):
|
443
472
|
property_type = " or ".join(sorted(property_type))
|
@@ -479,18 +508,18 @@ def _print_schema_info(schema: dict, level: int = 0,
|
|
479
508
|
suffix += f" | max length: {max_length}"
|
480
509
|
if (min_length := property.get("minLength")) is not None:
|
481
510
|
suffix += f" | min length: {min_length}"
|
482
|
-
|
511
|
+
_print_output(f"{spaces}- {property_name}: {property_type}{suffix}")
|
483
512
|
if enumeration:
|
484
513
|
nenums = 0
|
485
514
|
maxenums = 15
|
486
515
|
for enum in sorted(enumeration):
|
487
516
|
if (nenums := nenums + 1) >= maxenums:
|
488
517
|
if (remaining := len(enumeration) - nenums) > 0:
|
489
|
-
|
518
|
+
_print_output(f"{spaces} - [{remaining} more ...]")
|
490
519
|
break
|
491
|
-
|
520
|
+
_print_output(f"{spaces} - {enum}")
|
492
521
|
else:
|
493
|
-
|
522
|
+
_print_output(f"{spaces}- {property_name}")
|
494
523
|
|
495
524
|
|
496
525
|
def _print_all_schema_names(portal: Portal,
|
@@ -501,9 +530,9 @@ def _print_all_schema_names(portal: Portal,
|
|
501
530
|
|
502
531
|
if raw:
|
503
532
|
if raw_yaml:
|
504
|
-
|
533
|
+
_print_output(yaml.dump(schemas))
|
505
534
|
else:
|
506
|
-
|
535
|
+
_print_output(json.dumps(schemas, indent=4))
|
507
536
|
return
|
508
537
|
|
509
538
|
if tree:
|
@@ -513,14 +542,14 @@ def _print_all_schema_names(portal: Portal,
|
|
513
542
|
for schema_name in sorted(schemas.keys()):
|
514
543
|
if parent_schema_name := _get_parent_schema_name(schemas[schema_name]):
|
515
544
|
if schemas[schema_name].get("isAbstract") is True:
|
516
|
-
|
545
|
+
_print_output(f"{schema_name} | parent: {parent_schema_name} | abstract")
|
517
546
|
else:
|
518
|
-
|
547
|
+
_print_output(f"{schema_name} | parent: {parent_schema_name}")
|
519
548
|
else:
|
520
549
|
if schemas[schema_name].get("isAbstract") is True:
|
521
|
-
|
550
|
+
_print_output(f"{schema_name} | abstract")
|
522
551
|
else:
|
523
|
-
|
552
|
+
_print_output(schema_name)
|
524
553
|
if details:
|
525
554
|
_print_schema(schemas[schema_name], details=details, more_details=more_details, all=all)
|
526
555
|
|
@@ -559,8 +588,7 @@ def _print_schemas_tree(schemas: dict) -> None:
|
|
559
588
|
def _print_tree(root_name: Optional[str],
|
560
589
|
children_of: Callable,
|
561
590
|
has_children: Optional[Callable] = None,
|
562
|
-
name_of: Optional[Callable] = None
|
563
|
-
print: Callable = print) -> None:
|
591
|
+
name_of: Optional[Callable] = None) -> None:
|
564
592
|
"""
|
565
593
|
Recursively prints as a tree structure the given root name and any of its
|
566
594
|
children (again, recursively) as specified by the given children_of callable;
|
@@ -589,26 +617,26 @@ def _print_tree(root_name: Optional[str],
|
|
589
617
|
if has_children(path):
|
590
618
|
extension = branch if pointer == tee else space
|
591
619
|
yield from tree_generator(path, prefix=prefix+extension)
|
592
|
-
|
620
|
+
_print_output(first + ((name_of(root_name) if callable(name_of) else root_name) or "root"))
|
593
621
|
for line in tree_generator(root_name, prefix=" "):
|
594
|
-
|
622
|
+
_print_output(line)
|
595
623
|
|
596
624
|
|
597
625
|
def _read_json_from_file(file: str) -> Optional[dict]:
|
598
626
|
if not os.path.exists(file):
|
599
627
|
_print(f"Cannot find file: {file}")
|
600
|
-
|
628
|
+
_exit(1)
|
601
629
|
try:
|
602
630
|
with io.open(file, "r") as f:
|
603
631
|
try:
|
604
632
|
return json.load(f)
|
605
633
|
except Exception:
|
606
634
|
_print(f"Cannot parse JSON in file: {file}")
|
607
|
-
|
635
|
+
_exit(1)
|
608
636
|
except Exception as e:
|
609
|
-
|
637
|
+
_print(e)
|
610
638
|
_print(f"Cannot open file: {file}")
|
611
|
-
|
639
|
+
_exit(1)
|
612
640
|
|
613
641
|
|
614
642
|
def _print(*args, **kwargs):
|
@@ -617,10 +645,26 @@ def _print(*args, **kwargs):
|
|
617
645
|
sys.stdout.flush()
|
618
646
|
|
619
647
|
|
620
|
-
def
|
621
|
-
|
648
|
+
def _print_output(value: str):
|
649
|
+
global _output_file
|
650
|
+
if _output_file:
|
651
|
+
_output_file.write(value)
|
652
|
+
_output_file.write("\n")
|
653
|
+
else:
|
654
|
+
with uncaptured_output():
|
655
|
+
PRINT(value)
|
656
|
+
sys.stdout.flush()
|
657
|
+
|
658
|
+
|
659
|
+
def _exit(message: Optional[Union[str, int]] = None, status: Optional[int] = None) -> None:
|
660
|
+
global _output_file
|
661
|
+
if isinstance(message, str):
|
622
662
|
_print(f"ERROR: {message}")
|
623
|
-
|
663
|
+
elif isinstance(message, int) and not isinstance(status, int):
|
664
|
+
status = message
|
665
|
+
if _output_file:
|
666
|
+
_output_file.close()
|
667
|
+
sys.exit(status if isinstance(status, int) else (0 if status is None else 1))
|
624
668
|
|
625
669
|
|
626
670
|
if __name__ == "__main__":
|
@@ -60,7 +60,8 @@ dcicutils/s3_utils.py,sha256=LauLFQGvZLfpBJ81tYMikjLd3SJRz2R_FrL1n4xSlyI,28868
|
|
60
60
|
dcicutils/schema_utils.py,sha256=GmRm-XqZKJ6qine16SQF1txcby9WougDav_sYmKNs9E,12400
|
61
61
|
dcicutils/scripts/publish_to_pypi.py,sha256=sMd4WASQGlxlh7uLrt2eGkFRXYgONVmvIg8mClMS5RQ,13903
|
62
62
|
dcicutils/scripts/run_license_checker.py,sha256=z2keYnRDZsHQbTeo1XORAXSXNJK5axVzL5LjiNqZ7jE,4184
|
63
|
-
dcicutils/scripts/
|
63
|
+
dcicutils/scripts/update_portal_object.py,sha256=p9pFkoA3ZZOWvh-GMDpgR8qOfx_jQppOVNOjsuZndAU,18810
|
64
|
+
dcicutils/scripts/view_portal_object.py,sha256=ddZdOuSsYD-4VlsWth0EBTD_2TycQ4Ktgh-IdzKHweM,31490
|
64
65
|
dcicutils/secrets_utils.py,sha256=8dppXAsiHhJzI6NmOcvJV5ldvKkQZzh3Fl-cb8Wm7MI,19745
|
65
66
|
dcicutils/sheet_utils.py,sha256=VlmzteONW5VF_Q4vo0yA5vesz1ViUah1MZ_yA1rwZ0M,33629
|
66
67
|
dcicutils/snapshot_utils.py,sha256=YDeI3vD-MhAtHwKDzfEm2q-n3l-da2yRpRR3xp0Ah1M,23021
|
@@ -74,8 +75,8 @@ dcicutils/trace_utils.py,sha256=g8kwV4ebEy5kXW6oOrEAUsurBcCROvwtZqz9fczsGRE,1769
|
|
74
75
|
dcicutils/validation_utils.py,sha256=cMZIU2cY98FYtzK52z5WUYck7urH6JcqOuz9jkXpqzg,14797
|
75
76
|
dcicutils/variant_utils.py,sha256=2H9azNx3xAj-MySg-uZ2SFqbWs4kZvf61JnK6b-h4Qw,4343
|
76
77
|
dcicutils/zip_utils.py,sha256=_Y9EmL3D2dUZhxucxHvrtmmlbZmK4FpSsHEb7rGSJLU,3265
|
77
|
-
dcicutils-8.13.
|
78
|
-
dcicutils-8.13.
|
79
|
-
dcicutils-8.13.
|
80
|
-
dcicutils-8.13.
|
81
|
-
dcicutils-8.13.
|
78
|
+
dcicutils-8.13.3.dist-info/LICENSE.txt,sha256=qnwSmfnEWMl5l78VPDEzAmEbLVrRqQvfUQiHT0ehrOo,1102
|
79
|
+
dcicutils-8.13.3.dist-info/METADATA,sha256=B583S5ausZLy7zA73GFhcTCgX3KJVnhy008WzM0H6uk,3442
|
80
|
+
dcicutils-8.13.3.dist-info/WHEEL,sha256=7Z8_27uaHI_UZAc4Uox4PpBhQ9Y5_modZXWMxtUi4NU,88
|
81
|
+
dcicutils-8.13.3.dist-info/entry_points.txt,sha256=W6kEWdUJk9tQ4myAgpehPdebcwvCAZ7UgB-wyPgDUMg,335
|
82
|
+
dcicutils-8.13.3.dist-info/RECORD,,
|
@@ -2,5 +2,6 @@
|
|
2
2
|
publish-to-pypi=dcicutils.scripts.publish_to_pypi:main
|
3
3
|
run-license-checker=dcicutils.scripts.run_license_checker:main
|
4
4
|
show-contributors=dcicutils.contribution_scripts:show_contributors_main
|
5
|
+
update-portal-object=dcicutils.scripts.update_portal_object:main
|
5
6
|
view-portal-object=dcicutils.scripts.view_portal_object:main
|
6
7
|
|
File without changes
|
File without changes
|