dcicutils 8.13.2__py3-none-any.whl → 8.13.3.1b1__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,430 @@
1
+ # ------------------------------------------------------------------------------------------------------
2
+ # Command-line utility to update (post, patch, upsert) portal objects for SMaHT/CGAP/Fourfront.
3
+ # ------------------------------------------------------------------------------------------------------
4
+ # Example commands:
5
+ # update-portal-object --post file_format.json
6
+ # update-portal-object --upsert directory-with-schema-named-dot-json-files
7
+ # update-portal-object --patch file-not-named-for-schema-name.json --schema UnalignedReads
8
+ # --------------------------------------------------------------------------------------------------
9
+
10
+ import argparse
11
+ from functools import lru_cache
12
+ import glob
13
+ import io
14
+ import json
15
+ import os
16
+ import sys
17
+ from typing import Callable, List, Optional, Tuple, Union
18
+ from dcicutils.command_utils import yes_or_no
19
+ from dcicutils.common import ORCHESTRATED_APPS, APP_SMAHT
20
+ from dcicutils.ff_utils import delete_metadata, purge_metadata
21
+ from dcicutils.misc_utils import get_error_message, PRINT
22
+ from dcicutils.portal_utils import Portal as PortalFromUtils
23
+
24
+
25
+ class Portal(PortalFromUtils):
26
+
27
+ def delete_metadata(self, object_id: str) -> Optional[dict]:
28
+ if isinstance(object_id, str) and object_id and self.key:
29
+ return delete_metadata(obj_id=object_id, key=self.key)
30
+ return None
31
+
32
+ def purge_metadata(self, object_id: str) -> Optional[dict]:
33
+ if isinstance(object_id, str) and object_id and self.key:
34
+ return purge_metadata(obj_id=object_id, key=self.key)
35
+ return None
36
+
37
+
38
+ _DEFAULT_APP = "smaht"
39
+ _SMAHT_ENV_ENVIRON_NAME = "SMAHT_ENV"
40
+
41
+ # Schema properties to ignore (by default) for the view schema usage.
42
+ _SCHEMAS_IGNORE_PROPERTIES = [
43
+ "date_created",
44
+ "last_modified",
45
+ "principals_allowed",
46
+ "submitted_by",
47
+ "schema_version"
48
+ ]
49
+
50
+ _SCHEMA_ORDER = [ # See: smaht-portal/src/encoded/project/loadxl.py
51
+ "access_key",
52
+ "user",
53
+ "consortium",
54
+ "submission_center",
55
+ "file_format",
56
+ "quality_metric",
57
+ "output_file",
58
+ "reference_file",
59
+ "reference_genome",
60
+ "software",
61
+ "tracking_item",
62
+ "workflow",
63
+ "workflow_run",
64
+ "meta_workflow",
65
+ "meta_workflow_run",
66
+ "image",
67
+ "document",
68
+ "static_section",
69
+ "page",
70
+ "filter_set",
71
+ "higlass_view_config",
72
+ "ingestion_submission",
73
+ "ontology_term",
74
+ "protocol",
75
+ "donor",
76
+ "demographic",
77
+ "medical_history",
78
+ "diagnosis",
79
+ "exposure",
80
+ "family_history",
81
+ "medical_treatment",
82
+ "death_circumstances",
83
+ "tissue_collection",
84
+ "tissue",
85
+ "histology",
86
+ "cell_line",
87
+ "cell_culture",
88
+ "cell_culture_mixture",
89
+ "preparation_kit",
90
+ "treatment",
91
+ "sample_preparation",
92
+ "tissue_sample",
93
+ "cell_culture_sample",
94
+ "cell_sample",
95
+ "analyte",
96
+ "analyte_preparation",
97
+ "assay",
98
+ "library",
99
+ "library_preparation",
100
+ "sequencer",
101
+ "basecalling",
102
+ "sequencing",
103
+ "file_set",
104
+ "unaligned_reads",
105
+ "aligned_reads",
106
+ "variant_calls",
107
+ ]
108
+
109
+
110
+ def main():
111
+
112
+ parser = argparse.ArgumentParser(description="View Portal object.")
113
+ parser.add_argument("--env", "-e", type=str, required=False, default=None,
114
+ help=f"Environment name (key from ~/.smaht-keys.json).")
115
+ parser.add_argument("--app", type=str, required=False, default=None,
116
+ help=f"Application name (one of: smaht, cgap, fourfront).")
117
+ parser.add_argument("--schema", type=str, required=False, default=None,
118
+ help="Use named schema rather than infer from post/patch/upsert file name.")
119
+ parser.add_argument("--post", type=str, required=False, default=None, help="POST data.")
120
+ parser.add_argument("--patch", type=str, required=False, default=None, help="PATCH data.")
121
+ parser.add_argument("--upsert", type=str, required=False, default=None, help="Upsert data.")
122
+ parser.add_argument("--delete", type=str, required=False, default=None, help="Delete data.")
123
+ parser.add_argument("--purge", type=str, required=False, default=None, help="Purge data.")
124
+ parser.add_argument("--confirm", action="store_true", required=False, default=False, help="Confirm before action.")
125
+ parser.add_argument("--verbose", action="store_true", required=False, default=False, help="Verbose output.")
126
+ parser.add_argument("--quiet", action="store_true", required=False, default=False, help="Quiet output.")
127
+ parser.add_argument("--debug", action="store_true", required=False, default=False, help="Debugging output.")
128
+ args = parser.parse_args()
129
+
130
+ def usage(message: Optional[str] = None) -> None:
131
+ nonlocal parser
132
+ _print(message) if isinstance(message, str) else None
133
+ parser.print_help()
134
+ sys.exit(1)
135
+
136
+ if app := args.app:
137
+ if (app not in ORCHESTRATED_APPS) and ((app := app.lower()) not in ORCHESTRATED_APPS):
138
+ usage(f"ERROR: Unknown app name; must be one of: {' | '.join(ORCHESTRATED_APPS)}")
139
+ else:
140
+ app = APP_SMAHT
141
+
142
+ portal = _create_portal(env=args.env, app=app, verbose=args.verbose, debug=args.debug)
143
+
144
+ if explicit_schema_name := args.schema:
145
+ schema, explicit_schema_name = _get_schema(portal, explicit_schema_name)
146
+ if not schema:
147
+ usage(f"ERROR: Unknown schema name: {args.schema}")
148
+
149
+ if not (args.post or args.patch or args.upsert or args.delete or args.purge):
150
+ usage()
151
+
152
+ if args.post:
153
+ _post_or_patch_or_upsert(portal=portal,
154
+ file_or_directory=args.post,
155
+ explicit_schema_name=explicit_schema_name,
156
+ update_function=post_data,
157
+ update_action_name="POST",
158
+ confirm=args.confirm, verbose=args.verbose, quiet=args.quiet, debug=args.debug)
159
+ if args.patch:
160
+ _post_or_patch_or_upsert(portal=portal,
161
+ file_or_directory=args.patch,
162
+ explicit_schema_name=explicit_schema_name,
163
+ update_function=patch_data,
164
+ update_action_name="PATCH",
165
+ confirm=args.confirm, verbose=args.verbose, quiet=args.quiet, debug=args.debug)
166
+ if args.upsert:
167
+ _post_or_patch_or_upsert(portal=portal,
168
+ file_or_directory=args.upsert,
169
+ explicit_schema_name=explicit_schema_name,
170
+ update_function=upsert_data,
171
+ update_action_name="UPSERT",
172
+ confirm=args.confirm, verbose=args.verbose, quiet=args.quiet, debug=args.debug)
173
+
174
+ if args.delete:
175
+ if not portal.get_metadata(args.delete, raise_exception=False):
176
+ _print(f"Cannot find given object: {args.delete}")
177
+ sys.exit(1)
178
+ if yes_or_no(f"Do you really want to delete this item: {args.delete} ?"):
179
+ portal.delete_metadata(args.delete)
180
+
181
+ if args.purge:
182
+ if not portal.get_metadata(args.purge, raise_exception=False):
183
+ _print(f"Cannot find given object: {args.purge}")
184
+ sys.exit(1)
185
+ if yes_or_no(f"Do you really want to purge this item: {args.purge} ?"):
186
+ portal.delete_metadata(args.purge)
187
+ portal.purge_metadata(args.purge)
188
+
189
+
190
+ def _post_or_patch_or_upsert(portal: Portal, file_or_directory: str,
191
+ explicit_schema_name: str,
192
+ update_function: Callable, update_action_name: str,
193
+ confirm: bool = False, verbose: bool = False,
194
+ quiet: bool = False, debug: bool = False) -> None:
195
+
196
+ def is_schema_name_list(portal: Portal, keys: list) -> bool:
197
+ if isinstance(keys, list):
198
+ for key in keys:
199
+ if portal.get_schema(key) is None:
200
+ return False
201
+ return True
202
+ return False
203
+
204
+ def post_or_patch_or_upsert(portal: Portal, file: str, schema_name: Optional[str],
205
+ confirm: bool = False, verbose: bool = False,
206
+ quiet: bool = False, debug: bool = False) -> None:
207
+
208
+ nonlocal update_function, update_action_name
209
+ if not quiet:
210
+ _print(f"Processing {update_action_name} file: {file}")
211
+ if data := _read_json_from_file(file):
212
+ if isinstance(data, dict):
213
+ if isinstance(schema_name, str) and schema_name:
214
+ if debug:
215
+ _print(f"DEBUG: File ({file}) contains an object of type: {schema_name}")
216
+ update_function(portal, data, schema_name, confirm=confirm,
217
+ file=file, verbose=verbose, debug=debug)
218
+ elif is_schema_name_list(portal, list(data.keys())):
219
+ if debug:
220
+ _print(f"DEBUG: File ({file}) contains a dictionary of schema names.")
221
+ for schema_name in data:
222
+ if isinstance(schema_data := data[schema_name], list):
223
+ if debug:
224
+ _print(f"DEBUG: Processing {update_action_name}s for type: {schema_name}")
225
+ for index, item in enumerate(schema_data):
226
+ update_function(portal, item, schema_name, confirm=confirm,
227
+ file=file, index=index, verbose=verbose, debug=debug)
228
+ else:
229
+ _print(f"WARNING: File ({file}) contains schema item which is not a list: {schema_name}")
230
+ else:
231
+ _print(f"WARNING: File ({file}) contains unknown item type.")
232
+ elif isinstance(data, list):
233
+ if debug:
234
+ _print(f"DEBUG: File ({file}) contains a list of objects of type: {schema_name}")
235
+ for index, item in enumerate(data):
236
+ update_function(portal, item, schema_name, confirm=confirm,
237
+ file=file, index=index, verbose=verbose, debug=debug)
238
+ if debug:
239
+ _print(f"DEBUG: Processing {update_action_name} file done: {file}")
240
+
241
+ if os.path.isdir(file_or_directory):
242
+ if ((files := glob.glob(os.path.join(file_or_directory, "*.json"))) and
243
+ (files_and_schemas := _file_names_to_ordered_file_and_schema_names(portal, files))): # noqa
244
+ for file_and_schema in files_and_schemas:
245
+ if not (file := file_and_schema[0]):
246
+ continue
247
+ if not (schema_name := file_and_schema[1]) and not (schema_name := explicit_schema_name):
248
+ _print(f"ERROR: Schema cannot be inferred from file name and --schema not specified: {file}")
249
+ continue
250
+ post_or_patch_or_upsert(portal, file_and_schema[0], schema_name=schema_name,
251
+ confirm=confirm, quiet=quiet, verbose=verbose, debug=debug)
252
+ elif os.path.isfile(file := file_or_directory):
253
+ if ((schema_name := _get_schema_name_from_schema_named_json_file_name(portal, file)) or
254
+ (schema_name := explicit_schema_name)): # noqa
255
+ post_or_patch_or_upsert(portal, file, schema_name=schema_name,
256
+ confirm=confirm, quiet=quiet, verbose=verbose, debug=debug)
257
+ else:
258
+ post_or_patch_or_upsert(portal, file, schema_name=schema_name,
259
+ confirm=confirm, quiet=quiet, verbose=verbose, debug=debug)
260
+ # _print(f"ERROR: Schema cannot be inferred from file name and --schema not specified: {file}")
261
+ # return
262
+ else:
263
+ _print(f"ERROR: Cannot find file or directory: {file_or_directory}")
264
+
265
+
266
+ def post_data(portal: Portal, data: dict, schema_name: str, confirm: bool = False,
267
+ file: Optional[str] = None, index: int = 0,
268
+ verbose: bool = False, debug: bool = False) -> None:
269
+ if not (identifying_path := portal.get_identifying_path(data, portal_type=schema_name)):
270
+ if isinstance(file, str) and isinstance(index, int):
271
+ _print(f"ERROR: Item for POST has no identifying property: {file} (#{index + 1})")
272
+ else:
273
+ _print(f"ERROR: Item for POST has no identifying property.")
274
+ return
275
+ if portal.get_metadata(identifying_path, raise_exception=False):
276
+ _print(f"ERROR: Item for POST already exists: {identifying_path}")
277
+ return
278
+ if (confirm is True) and not yes_or_no(f"POST data for: {identifying_path} ?"):
279
+ return
280
+ if verbose:
281
+ _print(f"POST {schema_name} item: {identifying_path}")
282
+ try:
283
+ portal.post_metadata(schema_name, data)
284
+ if debug:
285
+ _print(f"DEBUG: POST {schema_name} item done: {identifying_path}")
286
+ except Exception as e:
287
+ _print(f"ERROR: Cannot POST {schema_name} item: {identifying_path}")
288
+ _print(get_error_message(e))
289
+ return
290
+
291
+
292
+ def patch_data(portal: Portal, data: dict, schema_name: str, confirm: bool = False,
293
+ file: Optional[str] = None, index: int = 0,
294
+ verbose: bool = False, debug: bool = False) -> None:
295
+ if not (identifying_path := portal.get_identifying_path(data, portal_type=schema_name)):
296
+ if isinstance(file, str) and isinstance(index, int):
297
+ _print(f"ERROR: Item for PATCH has no identifying property: {file} (#{index + 1})")
298
+ else:
299
+ _print(f"ERROR: Item for PATCH has no identifying property.")
300
+ return
301
+ if not portal.get_metadata(identifying_path, raise_exception=False):
302
+ _print(f"ERROR: Item for PATCH does not already exist: {identifying_path}")
303
+ return
304
+ if (confirm is True) and not yes_or_no(f"PATCH data for: {identifying_path}"):
305
+ return
306
+ if verbose:
307
+ _print(f"PATCH {schema_name} item: {identifying_path}")
308
+ try:
309
+ portal.patch_metadata(identifying_path, data)
310
+ if debug:
311
+ _print(f"DEBUG: PATCH {schema_name} item OK: {identifying_path}")
312
+ except Exception as e:
313
+ _print(f"ERROR: Cannot PATCH {schema_name} item: {identifying_path}")
314
+ _print(e)
315
+ return
316
+
317
+
318
+ def upsert_data(portal: Portal, data: dict, schema_name: str, confirm: bool = False,
319
+ file: Optional[str] = None, index: int = 0,
320
+ verbose: bool = False, debug: bool = False) -> None:
321
+ if not (identifying_path := portal.get_identifying_path(data, portal_type=schema_name)):
322
+ if isinstance(file, str) and isinstance(index, int):
323
+ _print(f"ERROR: Item for UPSERT has no identifying property: {file} (#{index + 1})")
324
+ else:
325
+ _print(f"ERROR: Item for UPSERT has no identifying property.")
326
+ return
327
+ exists = portal.get_metadata(identifying_path, raise_exception=False)
328
+ if ((confirm is True) and not yes_or_no(f"{'PATCH' if exists else 'POST'} data for: {identifying_path} ?")):
329
+ return
330
+ if verbose:
331
+ _print(f"{'PATCH' if exists else 'POST'} {schema_name} item: {identifying_path}")
332
+ try:
333
+ portal.post_metadata(schema_name, data) if not exists else portal.patch_metadata(identifying_path, data)
334
+ if debug:
335
+ _print(f"DEBUG: UPSERT {schema_name} item OK: {identifying_path}")
336
+ except Exception as e:
337
+ _print(f"ERROR: Cannot UPSERT {schema_name} item: {identifying_path}")
338
+ _print(e)
339
+ return
340
+
341
+
342
+ def _create_portal(env: Optional[str] = None, app: Optional[str] = None,
343
+ verbose: bool = False, debug: bool = False) -> Optional[Portal]:
344
+
345
+ env_from_environ = None
346
+ if not env and (app == APP_SMAHT):
347
+ if env := os.environ.get(_SMAHT_ENV_ENVIRON_NAME):
348
+ env_from_environ = True
349
+ if not (portal := Portal(env, app=app) if env or app else None):
350
+ return None
351
+ if verbose:
352
+ if (env := portal.env) or (env := os.environ(_SMAHT_ENV_ENVIRON_NAME)):
353
+ _print(f"Portal environment"
354
+ f"{f' (from {_SMAHT_ENV_ENVIRON_NAME})' if env_from_environ else ''}: {portal.env}")
355
+ if portal.keys_file:
356
+ _print(f"Portal keys file: {portal.keys_file}")
357
+ if portal.key_id:
358
+ _print(f"Portal key prefix: {portal.key_id[0:2]}******")
359
+ if portal.server:
360
+ _print(f"Portal server: {portal.server}")
361
+ return portal
362
+
363
+
364
+ def _read_json_from_file(file: str) -> Optional[dict]:
365
+ try:
366
+ if not os.path.exists(file):
367
+ return None
368
+ with io.open(file, "r") as f:
369
+ try:
370
+ return json.load(f)
371
+ except Exception:
372
+ _print(f"ERROR: Cannot load JSON from file: {file}")
373
+ return None
374
+ except Exception:
375
+ _print(f"ERROR: Cannot open file: {file}")
376
+ return None
377
+
378
+
379
+ def _file_names_to_ordered_file_and_schema_names(portal: Portal,
380
+ files: Union[List[str], str]) -> List[Tuple[str, Optional[str]]]:
381
+ results = []
382
+ if isinstance(files, str):
383
+ files = [files]
384
+ if not isinstance(files, list):
385
+ return results
386
+ for file in files:
387
+ if isinstance(file, str) and file:
388
+ results.append((file, _get_schema_name_from_schema_named_json_file_name(portal, file)))
389
+ ordered_results = []
390
+ for schema_name in _SCHEMA_ORDER:
391
+ schema_name = portal.schema_name(schema_name)
392
+ if result := next((item for item in results if item[1] == schema_name), None):
393
+ ordered_results.append(result)
394
+ results.remove(result)
395
+ ordered_results.extend(results) if results else None
396
+ return ordered_results
397
+
398
+
399
+ def _get_schema_name_from_schema_named_json_file_name(portal: Portal, value: str) -> Optional[str]:
400
+ try:
401
+ if not value.endswith(".json"):
402
+ return None
403
+ _, schema_name = _get_schema(portal, os.path.basename(value[:-5]))
404
+ return schema_name
405
+ except Exception:
406
+ return False
407
+
408
+
409
+ @lru_cache(maxsize=1)
410
+ def _get_schemas(portal: Portal) -> Optional[dict]:
411
+ return portal.get_schemas()
412
+
413
+
414
+ @lru_cache(maxsize=100)
415
+ def _get_schema(portal: Portal, name: str) -> Tuple[Optional[dict], Optional[str]]:
416
+ if portal and name and (name := name.replace("_", "").replace("-", "").strip().lower()):
417
+ if schemas := _get_schemas(portal):
418
+ for schema_name in schemas:
419
+ if schema_name.replace("_", "").replace("-", "").strip().lower() == name.lower():
420
+ return schemas[schema_name], schema_name
421
+ return None, None
422
+
423
+
424
+ def _print(*args, **kwargs) -> None:
425
+ PRINT(*args, **kwargs)
426
+ sys.stdout.flush()
427
+
428
+
429
+ if __name__ == "__main__":
430
+ main()
@@ -62,9 +62,10 @@ import json
62
62
  import pyperclip
63
63
  import os
64
64
  import sys
65
- from typing import Callable, List, Optional, Tuple
65
+ from typing import Callable, List, Optional, TextIO, Tuple, Union
66
66
  import yaml
67
67
  from dcicutils.captured_output import captured_output, uncaptured_output
68
+ from dcicutils.command_utils import yes_or_no
68
69
  from dcicutils.misc_utils import get_error_message, is_uuid, PRINT
69
70
  from dcicutils.portal_utils import Portal
70
71
 
@@ -78,11 +79,15 @@ _SCHEMAS_IGNORE_PROPERTIES = [
78
79
  "schema_version"
79
80
  ]
80
81
 
82
+ _output_file: TextIO = None
83
+
81
84
 
82
85
  def main():
83
86
 
87
+ global _output_file
88
+
84
89
  parser = argparse.ArgumentParser(description="View Portal object.")
85
- parser.add_argument("uuid", type=str,
90
+ parser.add_argument("uuid", nargs="?", type=str,
86
91
  help=f"The uuid (or path) of the object to fetch and view. ")
87
92
  parser.add_argument("--ini", type=str, required=False, default=None,
88
93
  help=f"Name of the application .ini file.")
@@ -97,11 +102,9 @@ def main():
97
102
  parser.add_argument("--all", action="store_true", required=False, default=False,
98
103
  help="Include all properties for schema usage.")
99
104
  parser.add_argument("--raw", action="store_true", required=False, default=False, help="Raw output.")
105
+ parser.add_argument("--inserts", action="store_true", required=False, default=False,
106
+ help="Format output for subsequent inserts.")
100
107
  parser.add_argument("--tree", action="store_true", required=False, default=False, help="Tree output for schemas.")
101
- parser.add_argument("--post", type=str, required=False, default=None,
102
- help="POST data of the main arg type with data from file specified with this option.")
103
- parser.add_argument("--patch", type=str, required=False, default=None,
104
- help="PATCH data of the main arg type with data from file specified with this option.")
105
108
  parser.add_argument("--database", action="store_true", required=False, default=False,
106
109
  help="Read from database output.")
107
110
  parser.add_argument("--bool", action="store_true", required=False,
@@ -109,118 +112,102 @@ def main():
109
112
  parser.add_argument("--yaml", action="store_true", required=False, default=False, help="YAML output.")
110
113
  parser.add_argument("--copy", "-c", action="store_true", required=False, default=False,
111
114
  help="Copy object data to clipboard.")
115
+ parser.add_argument("--output", required=False, help="Output file.", type=str)
112
116
  parser.add_argument("--indent", required=False, default=False, help="Indent output.", type=int)
113
- parser.add_argument("--details", action="store_true", required=False, default=False, help="Detailed output.")
114
- parser.add_argument("--more-details", action="store_true", required=False, default=False,
115
- help="More detailed output.")
117
+ parser.add_argument("--summary", action="store_true", required=False, default=False,
118
+ help="Summary output (for schema only).")
119
+ parser.add_argument("--terse", action="store_true", required=False, default=False, help="Terse output.")
116
120
  parser.add_argument("--verbose", action="store_true", required=False, default=False, help="Verbose output.")
117
121
  parser.add_argument("--debug", action="store_true", required=False, default=False, help="Debugging output.")
118
122
  args = parser.parse_args()
119
123
 
120
- if args.more_details:
121
- args.details = True
122
-
123
124
  portal = _create_portal(ini=args.ini, env=args.env or os.environ.get("SMAHT_ENV"),
124
125
  server=args.server, app=args.app, verbose=args.verbose, debug=args.debug)
125
126
 
126
- if args.uuid.lower() == "schemas" or args.uuid.lower() == "schema":
127
- _print_all_schema_names(portal=portal, details=args.details,
128
- more_details=args.more_details, all=args.all,
129
- tree=args.tree, raw=args.raw, raw_yaml=args.yaml)
127
+ if not args.uuid:
128
+ _print("UUID or schema or path required.")
129
+ _exit(1)
130
+
131
+ if args.output:
132
+ if os.path.exists(args.output):
133
+ if os.path.isdir(args.output):
134
+ _print(f"Specified output file already exists as a directory: {args.output}")
135
+ _exit(1)
136
+ elif os.path.isfile(args.output):
137
+ _print(f"Specified output file already exists: {args.output}")
138
+ if not yes_or_no(f"Do you want to overwrite this file?"):
139
+ _exit(0)
140
+ _output_file = io.open(args.output, "w")
141
+
142
+ if args.uuid and ((args.uuid.lower() == "schemas") or (args.uuid.lower() == "schema")):
143
+ _print_all_schema_names(portal=portal, terse=args.terse, all=args.all,
144
+ tree=args.tree, summary=args.summary, yaml=args.yaml)
130
145
  return
131
- elif args.uuid.lower() == "info": # TODO: need word for what consortiums and submission centers are collectively
146
+ elif args.uuid and (args.uuid.lower() == "info"):
132
147
  if consortia := portal.get_metadata("/consortia?limit=1000"):
133
- _print("Known Consortia:")
148
+ _print_output("Known Consortia:")
134
149
  consortia = sorted(consortia.get("@graph", []), key=lambda key: key.get("identifier"))
135
150
  for consortium in consortia:
136
151
  if ((consortium_name := consortium.get("identifier")) and
137
152
  (consortium_uuid := consortium.get("uuid"))): # noqa
138
- _print(f"- {consortium_name}: {consortium_uuid}")
153
+ _print_output(f"- {consortium_name}: {consortium_uuid}")
139
154
  if submission_centers := portal.get_metadata("/submission-centers?limit=1000"):
140
- _print("Known Submission Centers:")
155
+ _print_output("Known Submission Centers:")
141
156
  submission_centers = sorted(submission_centers.get("@graph", []), key=lambda key: key.get("identifier"))
142
157
  for submission_center in submission_centers:
143
158
  if ((submission_center_name := submission_center.get("identifier")) and
144
159
  (submission_center_uuid := submission_center.get("uuid"))): # noqa
145
- _print(f"- {submission_center_name}: {submission_center_uuid}")
160
+ _print_output(f"- {submission_center_name}: {submission_center_uuid}")
146
161
  try:
147
162
  if file_formats := portal.get_metadata("/file-formats?limit=1000"):
148
- _print("Known File Formats:")
163
+ _print_output("Known File Formats:")
149
164
  file_formats = sorted(file_formats.get("@graph", []), key=lambda key: key.get("identifier"))
150
165
  for file_format in file_formats:
151
166
  if ((file_format_name := file_format.get("identifier")) and
152
167
  (file_format_uuid := file_format.get("uuid"))): # noqa
153
- _print(f"- {file_format_name}: {file_format_uuid}")
168
+ _print_output(f"- {file_format_name}: {file_format_uuid}")
154
169
  except Exception:
155
- _print("Known File Formats: None")
170
+ _print_output("Known File Formats: None")
156
171
  return
157
172
 
158
173
  if _is_maybe_schema_name(args.uuid):
159
174
  args.schema = True
160
175
 
161
176
  if args.schema:
162
- if args.post:
163
- if post_data := _read_json_from_file(args.post):
164
- if args.verbose:
165
- _print(f"POSTing data from file ({args.post}) as type: {args.uuid}")
166
- if isinstance(post_data, dict):
167
- post_data = [post_data]
168
- elif not isinstance(post_data, list):
169
- _print(f"POST data neither list nor dictionary: {args.post}")
170
- for item in post_data:
171
- portal.post_metadata(args.uuid, item)
172
- if args.verbose:
173
- _print(f"Done POSTing data from file ({args.post}) as type: {args.uuid}")
174
177
  schema, schema_name = _get_schema(portal, args.uuid)
175
178
  if schema:
176
179
  if args.copy:
177
180
  pyperclip.copy(json.dumps(schema, indent=4))
178
- if not args.raw:
181
+ if args.summary:
179
182
  if parent_schema_name := _get_parent_schema_name(schema):
180
183
  if schema.get("isAbstract") is True:
181
- _print(f"{schema_name} | parent: {parent_schema_name} | abstract")
184
+ _print_output(f"{schema_name} | parent: {parent_schema_name} | abstract")
182
185
  else:
183
- _print(f"{schema_name} | parent: {parent_schema_name}")
186
+ _print_output(f"{schema_name} | parent: {parent_schema_name}")
184
187
  else:
185
- _print(schema_name)
186
- _print_schema(schema, details=args.details, more_details=args.details,
187
- all=args.all, raw=args.raw, raw_yaml=args.yaml)
188
+ _print_output(schema_name)
189
+ _print_schema(schema, terse=args.terse,
190
+ all=args.all, summary=args.summary, yaml=args.yaml)
188
191
  return
189
- elif args.patch:
190
- if patch_data := _read_json_from_file(args.patch):
191
- if args.verbose:
192
- _print(f"PATCHing data from file ({args.patch}) for object: {args.uuid}")
193
- if isinstance(patch_data, dict):
194
- patch_data = [patch_data]
195
- elif not isinstance(patch_data, list):
196
- _print(f"PATCH data neither list nor dictionary: {args.patch}")
197
- for item in patch_data:
198
- portal.patch_metadata(args.uuid, item)
199
- if args.verbose:
200
- _print(f"Done PATCHing data from file ({args.patch}) as type: {args.uuid}")
201
- return
202
- else:
203
- _print(f"No PATCH data found in file: {args.patch}")
204
- sys.exit(1)
205
192
 
206
- data = _get_portal_object(portal=portal, uuid=args.uuid, raw=args.raw,
193
+ data = _get_portal_object(portal=portal, uuid=args.uuid, raw=args.raw, inserts=args.inserts,
207
194
  database=args.database, check=args.bool, verbose=args.verbose)
208
195
  if args.bool:
209
196
  if data:
210
197
  _print(f"{args.uuid}: found")
211
- sys.exit(0)
198
+ _exit(0)
212
199
  else:
213
200
  _print(f"{args.uuid}: not found")
214
- sys.exit(1)
201
+ _exit(1)
215
202
  if args.copy:
216
203
  pyperclip.copy(json.dumps(data, indent=4))
217
204
  if args.yaml:
218
- _print(yaml.dump(data))
205
+ _print_output(yaml.dump(data))
219
206
  else:
220
207
  if args.indent > 0:
221
- _print(_format_json_with_indent(data, indent=args.indent))
208
+ _print_output(_format_json_with_indent(data, indent=args.indent))
222
209
  else:
223
- _print(json.dumps(data, default=str, indent=4))
210
+ _print_output(json.dumps(data, default=str, indent=4))
224
211
 
225
212
 
226
213
  def _format_json_with_indent(value: dict, indent: int = 0) -> Optional[str]:
@@ -254,7 +241,7 @@ def _create_portal(ini: str, env: Optional[str] = None,
254
241
 
255
242
 
256
243
  def _get_portal_object(portal: Portal, uuid: str,
257
- raw: bool = False, database: bool = False,
244
+ raw: bool = False, inserts: bool = False, database: bool = False,
258
245
  check: bool = False, verbose: bool = False) -> dict:
259
246
  response = None
260
247
  try:
@@ -262,7 +249,7 @@ def _get_portal_object(portal: Portal, uuid: str,
262
249
  path = f"/{uuid}"
263
250
  else:
264
251
  path = uuid
265
- response = portal.get(path, raw=raw, database=database)
252
+ response = portal.get(path, raw=raw or inserts, database=database)
266
253
  except Exception as e:
267
254
  if "404" in str(e) and "not found" in str(e).lower():
268
255
  _print(f"Portal object not found at {portal.server}: {uuid}")
@@ -278,7 +265,38 @@ def _get_portal_object(portal: Portal, uuid: str,
278
265
  if not response.json:
279
266
  _exit(f"Invalid JSON getting Portal object: {uuid}")
280
267
  response = response.json()
281
- if raw:
268
+ if inserts:
269
+ # Format results as suitable for inserts (e.g. via update-portal-object).
270
+ response.pop("schema_version", None)
271
+ if ((isinstance(results := response.get("@graph"), list) and results) and
272
+ (isinstance(results_type := response.get("@type"), list) and results_type) and
273
+ (isinstance(results_type := results_type[0], str) and results_type.endswith("SearchResults")) and
274
+ (results_type := results_type[0:-len("SearchResults")])): # noqa
275
+ # For search results, the type (from XyzSearchResults, above) may not be precisely correct for
276
+ # each of the results; it may be the supertype (e.g. QualityMetric vs QualityMetricWorkflowRun);
277
+ # so for types which are supertypes (gotten via Portal.get_schemas_super_type_map) we actually
278
+ # lookup each result individually to determine its actual precise type.
279
+ if not ((supertypes := portal.get_schemas_super_type_map()) and (subtypes := supertypes.get(results_type))):
280
+ subtypes = None
281
+ response = {}
282
+ for result in results:
283
+ result.pop("schema_version", None)
284
+ if (subtypes and
285
+ (result_uuid := result.get("uuid")) and
286
+ (individual_result := portal.get_metadata(result_uuid, raise_exception=False)) and
287
+ isinstance(result_type:= individual_result.get("@type"), list) and result_type and result_type[0]): # noqa
288
+ result_type = result_type[0]
289
+ else:
290
+ result_type = results_type
291
+ if response.get(result_type):
292
+ response[result_type].append(result)
293
+ else:
294
+ response[result_type] = [result]
295
+ # Get the result as non-raw so we can get its type.
296
+ elif ((response_cooked := portal.get(path, database=database)) and
297
+ (isinstance(response_type := response_cooked.json().get("@type"), list) and response_type)):
298
+ response = {f"{response_type[0]}": [response]}
299
+ elif raw:
282
300
  response.pop("schema_version", None)
283
301
  return response
284
302
 
@@ -292,7 +310,7 @@ def _get_schema(portal: Portal, name: str) -> Tuple[Optional[dict], Optional[str
292
310
  if portal and name and (name := name.replace("_", "").replace("-", "").strip().lower()):
293
311
  if schemas := _get_schemas(portal):
294
312
  for schema_name in schemas:
295
- if schema_name.replace("_", "").replace("-", "").strip().lower() == name:
313
+ if schema_name.replace("_", "").replace("-", "").strip().lower() == name.lower():
296
314
  return schemas[schema_name], schema_name
297
315
  return None, None
298
316
 
@@ -303,56 +321,80 @@ def _is_maybe_schema_name(value: str) -> bool:
303
321
  return False
304
322
 
305
323
 
306
- def _print_schema(schema: dict, details: bool = False, more_details: bool = False, all: bool = False,
307
- raw: bool = False, raw_yaml: bool = False) -> None:
308
- if raw:
309
- if raw_yaml:
310
- _print(yaml.dump(schema))
324
+ def _is_schema_name(portal: Portal, value: str) -> bool:
325
+ try:
326
+ return _get_schema(portal, value)[0] is not None
327
+ except Exception:
328
+ return False
329
+
330
+
331
+ def _is_schema_named_json_file_name(portal: Portal, value: str) -> bool:
332
+ try:
333
+ return value.endswith(".json") and _is_schema_name(portal, os.path.basename(value[:-5]))
334
+ except Exception:
335
+ return False
336
+
337
+
338
+ def _get_schema_name_from_schema_named_json_file_name(portal: Portal, value: str) -> Optional[str]:
339
+ try:
340
+ if not value.endswith(".json"):
341
+ return None
342
+ _, schema_name = _get_schema(portal, os.path.basename(value[:-5]))
343
+ return schema_name
344
+ except Exception:
345
+ return False
346
+
347
+
348
+ def _print_schema(schema: dict, terse: bool = False, all: bool = False,
349
+ summary: bool = False, yaml: bool = False) -> None:
350
+ if summary is not True:
351
+ if yaml:
352
+ _print_output(yaml.dump(schema))
311
353
  else:
312
- _print(json.dumps(schema, indent=4))
354
+ _print_output(json.dumps(schema, indent=4))
313
355
  return
314
- _print_schema_info(schema, details=details, more_details=more_details, all=all)
356
+ _print_schema_info(schema, terse=terse, all=all)
315
357
 
316
358
 
317
359
  def _print_schema_info(schema: dict, level: int = 0,
318
- details: bool = False, more_details: bool = False, all: bool = False,
360
+ terse: bool = False, all: bool = False,
319
361
  required: Optional[List[str]] = None) -> None:
320
362
  if not schema or not isinstance(schema, dict):
321
363
  return
322
364
  identifying_properties = schema.get("identifyingProperties")
323
365
  if level == 0:
324
366
  if required_properties := schema.get("required"):
325
- _print("- required properties:")
367
+ _print_output("- required properties:")
326
368
  for required_property in sorted(list(set(required_properties))):
327
369
  if not all and required_property in _SCHEMAS_IGNORE_PROPERTIES:
328
370
  continue
329
371
  if property_type := (info := schema.get("properties", {}).get(required_property, {})).get("type"):
330
372
  if property_type == "array" and (array_type := info.get("items", {}).get("type")):
331
- _print(f" - {required_property}: {property_type} of {array_type}")
373
+ _print_output(f" - {required_property}: {property_type} of {array_type}")
332
374
  else:
333
- _print(f" - {required_property}: {property_type}")
375
+ _print_output(f" - {required_property}: {property_type}")
334
376
  else:
335
- _print(f" - {required_property}")
377
+ _print_output(f" - {required_property}")
336
378
  if isinstance(any_of := schema.get("anyOf"), list):
337
379
  if ((any_of == [{"required": ["submission_centers"]}, {"required": ["consortia"]}]) or
338
380
  (any_of == [{"required": ["consortia"]}, {"required": ["submission_centers"]}])): # noqa
339
381
  # Very very special case.
340
- _print(f" - at least one of:")
341
- _print(f" - consortia: array of string")
342
- _print(f" - submission_centers: array of string")
382
+ _print_output(f" - at least one of:")
383
+ _print_output(f" - consortia: array of string")
384
+ _print_output(f" - submission_centers: array of string")
343
385
  required = required_properties
344
386
  if identifying_properties := schema.get("identifyingProperties"):
345
- _print("- identifying properties:")
387
+ _print_output("- identifying properties:")
346
388
  for identifying_property in sorted(list(set(identifying_properties))):
347
389
  if not all and identifying_property in _SCHEMAS_IGNORE_PROPERTIES:
348
390
  continue
349
391
  if property_type := (info := schema.get("properties", {}).get(identifying_property, {})).get("type"):
350
392
  if property_type == "array" and (array_type := info.get("items", {}).get("type")):
351
- _print(f" - {identifying_property}: {property_type} of {array_type}")
393
+ _print_output(f" - {identifying_property}: {property_type} of {array_type}")
352
394
  else:
353
- _print(f" - {identifying_property}: {property_type}")
395
+ _print_output(f" - {identifying_property}: {property_type}")
354
396
  else:
355
- _print(f" - {identifying_property}")
397
+ _print_output(f" - {identifying_property}")
356
398
  if properties := schema.get("properties"):
357
399
  reference_properties = []
358
400
  for property_name in properties:
@@ -362,16 +404,16 @@ def _print_schema_info(schema: dict, level: int = 0,
362
404
  if link_to := property.get("linkTo"):
363
405
  reference_properties.append({"name": property_name, "ref": link_to})
364
406
  if reference_properties:
365
- _print("- reference properties:")
407
+ _print_output("- reference properties:")
366
408
  for reference_property in sorted(reference_properties, key=lambda key: key["name"]):
367
- _print(f" - {reference_property['name']}: {reference_property['ref']}")
409
+ _print_output(f" - {reference_property['name']}: {reference_property['ref']}")
368
410
  if schema.get("additionalProperties") is True:
369
- _print(f" - additional properties are allowed")
370
- if not more_details:
411
+ _print_output(f" - additional properties are allowed")
412
+ if terse:
371
413
  return
372
414
  if properties := (schema.get("properties") if level == 0 else schema):
373
415
  if level == 0:
374
- _print("- properties:")
416
+ _print_output("- properties:")
375
417
  for property_name in sorted(properties):
376
418
  if not all and property_name in _SCHEMAS_IGNORE_PROPERTIES:
377
419
  continue
@@ -392,9 +434,8 @@ def _print_schema_info(schema: dict, level: int = 0,
392
434
  property_type = "open ended object"
393
435
  if property.get("calculatedProperty"):
394
436
  suffix += f" | calculated"
395
- _print(f"{spaces}- {property_name}: {property_type}{suffix}")
396
- _print_schema_info(object_properties, level=level + 1,
397
- details=details, more_details=more_details, all=all,
437
+ _print_output(f"{spaces}- {property_name}: {property_type}{suffix}")
438
+ _print_schema_info(object_properties, level=level + 1, terse=terse, all=all,
398
439
  required=property.get("required"))
399
440
  elif property_type == "array":
400
441
  suffix = ""
@@ -416,28 +457,28 @@ def _print_schema_info(schema: dict, level: int = 0,
416
457
  if property_type := property_items.get("type"):
417
458
  if property_type == "object":
418
459
  suffix = ""
419
- _print(f"{spaces}- {property_name}: array of object{suffix}")
460
+ _print_output(f"{spaces}- {property_name}: array of object{suffix}")
420
461
  _print_schema_info(property_items.get("properties"), level=level + 1,
421
- details=details, more_details=more_details, all=all,
462
+ terse=terse, all=all,
422
463
  required=property_items.get("required"))
423
464
  elif property_type == "array":
424
465
  # This (array-of-array) never happens to occur at this time (February 2024).
425
- _print(f"{spaces}- {property_name}: array of array{suffix}")
466
+ _print_output(f"{spaces}- {property_name}: array of array{suffix}")
426
467
  else:
427
- _print(f"{spaces}- {property_name}: array of {property_type}{suffix}")
468
+ _print_output(f"{spaces}- {property_name}: array of {property_type}{suffix}")
428
469
  else:
429
- _print(f"{spaces}- {property_name}: array{suffix}")
470
+ _print_output(f"{spaces}- {property_name}: array{suffix}")
430
471
  else:
431
- _print(f"{spaces}- {property_name}: array{suffix}")
472
+ _print_output(f"{spaces}- {property_name}: array{suffix}")
432
473
  if enumeration:
433
474
  nenums = 0
434
475
  maxenums = 15
435
476
  for enum in sorted(enumeration):
436
477
  if (nenums := nenums + 1) >= maxenums:
437
478
  if (remaining := len(enumeration) - nenums) > 0:
438
- _print(f"{spaces} - [{remaining} more ...]")
479
+ _print_output(f"{spaces} - [{remaining} more ...]")
439
480
  break
440
- _print(f"{spaces} - {enum}")
481
+ _print_output(f"{spaces} - {enum}")
441
482
  else:
442
483
  if isinstance(property_type, list):
443
484
  property_type = " or ".join(sorted(property_type))
@@ -479,31 +520,31 @@ def _print_schema_info(schema: dict, level: int = 0,
479
520
  suffix += f" | max length: {max_length}"
480
521
  if (min_length := property.get("minLength")) is not None:
481
522
  suffix += f" | min length: {min_length}"
482
- _print(f"{spaces}- {property_name}: {property_type}{suffix}")
523
+ _print_output(f"{spaces}- {property_name}: {property_type}{suffix}")
483
524
  if enumeration:
484
525
  nenums = 0
485
526
  maxenums = 15
486
527
  for enum in sorted(enumeration):
487
528
  if (nenums := nenums + 1) >= maxenums:
488
529
  if (remaining := len(enumeration) - nenums) > 0:
489
- _print(f"{spaces} - [{remaining} more ...]")
530
+ _print_output(f"{spaces} - [{remaining} more ...]")
490
531
  break
491
- _print(f"{spaces} - {enum}")
532
+ _print_output(f"{spaces} - {enum}")
492
533
  else:
493
- _print(f"{spaces}- {property_name}")
534
+ _print_output(f"{spaces}- {property_name}")
494
535
 
495
536
 
496
537
  def _print_all_schema_names(portal: Portal,
497
- details: bool = False, more_details: bool = False, all: bool = False,
498
- tree: bool = False, raw: bool = False, raw_yaml: bool = False) -> None:
538
+ terse: bool = False, all: bool = False,
539
+ tree: bool = False, summary: bool = False, yaml: bool = False) -> None:
499
540
  if not (schemas := _get_schemas(portal)):
500
541
  return
501
542
 
502
- if raw:
503
- if raw_yaml:
504
- _print(yaml.dump(schemas))
543
+ if summary is not True:
544
+ if yaml:
545
+ _print_output(yaml.dump(schemas))
505
546
  else:
506
- _print(json.dumps(schemas, indent=4))
547
+ _print_output(json.dumps(schemas, indent=4))
507
548
  return
508
549
 
509
550
  if tree:
@@ -513,16 +554,16 @@ def _print_all_schema_names(portal: Portal,
513
554
  for schema_name in sorted(schemas.keys()):
514
555
  if parent_schema_name := _get_parent_schema_name(schemas[schema_name]):
515
556
  if schemas[schema_name].get("isAbstract") is True:
516
- _print(f"{schema_name} | parent: {parent_schema_name} | abstract")
557
+ _print_output(f"{schema_name} | parent: {parent_schema_name} | abstract")
517
558
  else:
518
- _print(f"{schema_name} | parent: {parent_schema_name}")
559
+ _print_output(f"{schema_name} | parent: {parent_schema_name}")
519
560
  else:
520
561
  if schemas[schema_name].get("isAbstract") is True:
521
- _print(f"{schema_name} | abstract")
562
+ _print_output(f"{schema_name} | abstract")
522
563
  else:
523
- _print(schema_name)
524
- if details:
525
- _print_schema(schemas[schema_name], details=details, more_details=more_details, all=all)
564
+ _print_output(schema_name)
565
+ if not terse:
566
+ _print_schema(schemas[schema_name], terse=terse, all=all)
526
567
 
527
568
 
528
569
  def _get_parent_schema_name(schema: dict) -> Optional[str]:
@@ -559,8 +600,7 @@ def _print_schemas_tree(schemas: dict) -> None:
559
600
  def _print_tree(root_name: Optional[str],
560
601
  children_of: Callable,
561
602
  has_children: Optional[Callable] = None,
562
- name_of: Optional[Callable] = None,
563
- print: Callable = print) -> None:
603
+ name_of: Optional[Callable] = None) -> None:
564
604
  """
565
605
  Recursively prints as a tree structure the given root name and any of its
566
606
  children (again, recursively) as specified by the given children_of callable;
@@ -589,26 +629,26 @@ def _print_tree(root_name: Optional[str],
589
629
  if has_children(path):
590
630
  extension = branch if pointer == tee else space
591
631
  yield from tree_generator(path, prefix=prefix+extension)
592
- print(first + ((name_of(root_name) if callable(name_of) else root_name) or "root"))
632
+ _print_output(first + ((name_of(root_name) if callable(name_of) else root_name) or "root"))
593
633
  for line in tree_generator(root_name, prefix=" "):
594
- print(line)
634
+ _print_output(line)
595
635
 
596
636
 
597
637
  def _read_json_from_file(file: str) -> Optional[dict]:
598
638
  if not os.path.exists(file):
599
639
  _print(f"Cannot find file: {file}")
600
- sys.exit(1)
640
+ _exit(1)
601
641
  try:
602
642
  with io.open(file, "r") as f:
603
643
  try:
604
644
  return json.load(f)
605
645
  except Exception:
606
646
  _print(f"Cannot parse JSON in file: {file}")
607
- sys.exit(1)
647
+ _exit(1)
608
648
  except Exception as e:
609
- print(e)
649
+ _print(e)
610
650
  _print(f"Cannot open file: {file}")
611
- sys.exit(1)
651
+ _exit(1)
612
652
 
613
653
 
614
654
  def _print(*args, **kwargs):
@@ -617,10 +657,26 @@ def _print(*args, **kwargs):
617
657
  sys.stdout.flush()
618
658
 
619
659
 
620
- def _exit(message: Optional[str] = None) -> None:
621
- if message:
660
+ def _print_output(value: str):
661
+ global _output_file
662
+ if _output_file:
663
+ _output_file.write(value)
664
+ _output_file.write("\n")
665
+ else:
666
+ with uncaptured_output():
667
+ PRINT(value)
668
+ sys.stdout.flush()
669
+
670
+
671
+ def _exit(message: Optional[Union[str, int]] = None, status: Optional[int] = None) -> None:
672
+ global _output_file
673
+ if isinstance(message, str):
622
674
  _print(f"ERROR: {message}")
623
- sys.exit(1)
675
+ elif isinstance(message, int) and not isinstance(status, int):
676
+ status = message
677
+ if _output_file:
678
+ _output_file.close()
679
+ sys.exit(status if isinstance(status, int) else (0 if status is None else 1))
624
680
 
625
681
 
626
682
  if __name__ == "__main__":
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dcicutils
3
- Version: 8.13.2
3
+ Version: 8.13.3.1b1
4
4
  Summary: Utility package for interacting with the 4DN Data Portal and other 4DN resources
5
5
  Home-page: https://github.com/4dn-dcic/utils
6
6
  License: MIT
@@ -26,8 +26,8 @@ Requires-Dist: PyJWT (>=2.6.0,<3.0.0)
26
26
  Requires-Dist: PyYAML (>=6.0.1,<7.0.0)
27
27
  Requires-Dist: appdirs (>=1.4.4,<2.0.0)
28
28
  Requires-Dist: aws-requests-auth (>=0.4.2,<1)
29
- Requires-Dist: boto3 (>=1.34.136,<2.0.0)
30
- Requires-Dist: botocore (>=1.34.136,<2.0.0)
29
+ Requires-Dist: boto3 (>=1.34.144,<2.0.0)
30
+ Requires-Dist: botocore (>=1.34.144,<2.0.0)
31
31
  Requires-Dist: chardet (>=5.2.0,<6.0.0)
32
32
  Requires-Dist: docker (>=4.4.4,<5.0.0)
33
33
  Requires-Dist: elasticsearch (==7.13.4)
@@ -60,7 +60,8 @@ dcicutils/s3_utils.py,sha256=LauLFQGvZLfpBJ81tYMikjLd3SJRz2R_FrL1n4xSlyI,28868
60
60
  dcicutils/schema_utils.py,sha256=GmRm-XqZKJ6qine16SQF1txcby9WougDav_sYmKNs9E,12400
61
61
  dcicutils/scripts/publish_to_pypi.py,sha256=sMd4WASQGlxlh7uLrt2eGkFRXYgONVmvIg8mClMS5RQ,13903
62
62
  dcicutils/scripts/run_license_checker.py,sha256=z2keYnRDZsHQbTeo1XORAXSXNJK5axVzL5LjiNqZ7jE,4184
63
- dcicutils/scripts/view_portal_object.py,sha256=rnlIoblDpoLPJ-Bor7OHxLgrVZyvqoxA0jmHW8ogg3s,29805
63
+ dcicutils/scripts/update_portal_object.py,sha256=p9pFkoA3ZZOWvh-GMDpgR8qOfx_jQppOVNOjsuZndAU,18810
64
+ dcicutils/scripts/view_portal_object.py,sha256=h8COy0lcLNWF9b5spjrlQ28wfqyTTMqAeC_xpFXutus,32262
64
65
  dcicutils/secrets_utils.py,sha256=8dppXAsiHhJzI6NmOcvJV5ldvKkQZzh3Fl-cb8Wm7MI,19745
65
66
  dcicutils/sheet_utils.py,sha256=VlmzteONW5VF_Q4vo0yA5vesz1ViUah1MZ_yA1rwZ0M,33629
66
67
  dcicutils/snapshot_utils.py,sha256=YDeI3vD-MhAtHwKDzfEm2q-n3l-da2yRpRR3xp0Ah1M,23021
@@ -74,8 +75,8 @@ dcicutils/trace_utils.py,sha256=g8kwV4ebEy5kXW6oOrEAUsurBcCROvwtZqz9fczsGRE,1769
74
75
  dcicutils/validation_utils.py,sha256=cMZIU2cY98FYtzK52z5WUYck7urH6JcqOuz9jkXpqzg,14797
75
76
  dcicutils/variant_utils.py,sha256=2H9azNx3xAj-MySg-uZ2SFqbWs4kZvf61JnK6b-h4Qw,4343
76
77
  dcicutils/zip_utils.py,sha256=_Y9EmL3D2dUZhxucxHvrtmmlbZmK4FpSsHEb7rGSJLU,3265
77
- dcicutils-8.13.2.dist-info/LICENSE.txt,sha256=qnwSmfnEWMl5l78VPDEzAmEbLVrRqQvfUQiHT0ehrOo,1102
78
- dcicutils-8.13.2.dist-info/METADATA,sha256=eg6mLuD6nLgu-UeTL97H4ONCf9Y3VRrHYakT7UR1wDk,3442
79
- dcicutils-8.13.2.dist-info/WHEEL,sha256=7Z8_27uaHI_UZAc4Uox4PpBhQ9Y5_modZXWMxtUi4NU,88
80
- dcicutils-8.13.2.dist-info/entry_points.txt,sha256=51Q4F_2V10L0282W7HFjP4jdzW4K8lnWDARJQVFy_hw,270
81
- dcicutils-8.13.2.dist-info/RECORD,,
78
+ dcicutils-8.13.3.1b1.dist-info/LICENSE.txt,sha256=qnwSmfnEWMl5l78VPDEzAmEbLVrRqQvfUQiHT0ehrOo,1102
79
+ dcicutils-8.13.3.1b1.dist-info/METADATA,sha256=Qf_4Nxv0PpDGb0NuNqilIzyNY_tuJ8-sgXyN9YZZ1Zc,3446
80
+ dcicutils-8.13.3.1b1.dist-info/WHEEL,sha256=7Z8_27uaHI_UZAc4Uox4PpBhQ9Y5_modZXWMxtUi4NU,88
81
+ dcicutils-8.13.3.1b1.dist-info/entry_points.txt,sha256=W6kEWdUJk9tQ4myAgpehPdebcwvCAZ7UgB-wyPgDUMg,335
82
+ dcicutils-8.13.3.1b1.dist-info/RECORD,,
@@ -2,5 +2,6 @@
2
2
  publish-to-pypi=dcicutils.scripts.publish_to_pypi:main
3
3
  run-license-checker=dcicutils.scripts.run_license_checker:main
4
4
  show-contributors=dcicutils.contribution_scripts:show_contributors_main
5
+ update-portal-object=dcicutils.scripts.update_portal_object:main
5
6
  view-portal-object=dcicutils.scripts.view_portal_object:main
6
7