looper 1.7.1__py3-none-any.whl → 1.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
looper/project.py CHANGED
@@ -3,6 +3,8 @@
3
3
  import itertools
4
4
  import os
5
5
 
6
+ from yaml import safe_load
7
+
6
8
  try:
7
9
  from functools import cached_property
8
10
  except ImportError:
@@ -26,6 +28,7 @@ from .exceptions import *
26
28
  from .pipeline_interface import PipelineInterface
27
29
  from .processed_project import populate_project_paths, populate_sample_paths
28
30
  from .utils import *
31
+ from .const import PipelineLevel
29
32
 
30
33
  __all__ = ["Project"]
31
34
 
@@ -126,6 +129,12 @@ class Project(peppyProject):
126
129
 
127
130
  self[EXTRA_KEY] = {}
128
131
 
132
+ try:
133
+ # For loading PEPs via CSV, Peppy cannot infer project name.
134
+ name = self.name
135
+ except NotImplementedError:
136
+ self.name = None
137
+
129
138
  # add sample pipeline interface to the project
130
139
  if kwargs.get(SAMPLE_PL_ARG):
131
140
  self.set_sample_piface(kwargs.get(SAMPLE_PL_ARG))
@@ -144,7 +153,7 @@ class Project(peppyProject):
144
153
  self.dcc = (
145
154
  None
146
155
  if divcfg_path is None
147
- else ComputingConfiguration(filepath=divcfg_path)
156
+ else ComputingConfiguration.from_yaml_file(filepath=divcfg_path)
148
157
  )
149
158
  if DRY_RUN_KEY in self and not self[DRY_RUN_KEY]:
150
159
  _LOGGER.debug("Ensuring project directories exist")
@@ -300,7 +309,7 @@ class Project(peppyProject):
300
309
  :return list[looper.PipelineInterface]: list of pipeline interfaces
301
310
  """
302
311
  return [
303
- PipelineInterface(pi, pipeline_type="project")
312
+ PipelineInterface(pi, pipeline_type=PipelineLevel.PROJECT.value)
304
313
  for pi in self.project_pipeline_interface_sources
305
314
  ]
306
315
 
@@ -343,7 +352,9 @@ class Project(peppyProject):
343
352
 
344
353
  :return bool: whether pipestat configuration is complete
345
354
  """
346
- return self._check_if_pipestat_configured(project_level=True)
355
+ return self._check_if_pipestat_configured(
356
+ pipeline_type=PipelineLevel.PROJECT.value
357
+ )
347
358
 
348
359
  def get_sample_piface(self, sample_name):
349
360
  """
@@ -441,73 +452,91 @@ class Project(peppyProject):
441
452
  schema_set.update([schema_file])
442
453
  return list(schema_set)
443
454
 
444
- def get_pipestat_managers(self, sample_name=None, project_level=False):
445
- """
446
- Get a collection of pipestat managers for the selected sample or project.
455
+ def _check_if_pipestat_configured(self, pipeline_type=PipelineLevel.SAMPLE.value):
447
456
 
448
- The number of pipestat managers corresponds to the number of unique
449
- output schemas in the pipeline interfaces specified by the sample or project.
457
+ # First check if pipestat key is in looper_config, if not return false
450
458
 
451
- :param str sample_name: sample name to get pipestat managers for
452
- :param bool project_level: whether the project PipestatManagers
453
- should be returned
454
- :return dict[str, pipestat.PipestatManager]: a mapping of pipestat
455
- managers by pipeline interface name
456
- """
457
- pipestat_configs = self._get_pipestat_configuration(
458
- sample_name=sample_name, project_level=project_level
459
- )
460
- return {
461
- pipeline_name: PipestatManager(**pipestat_vars)
462
- for pipeline_name, pipestat_vars in pipestat_configs.items()
463
- }
459
+ if PIPESTAT_KEY not in self[EXTRA_KEY]:
460
+ return False
461
+ elif PIPESTAT_KEY in self[EXTRA_KEY]:
462
+ if self[EXTRA_KEY][PIPESTAT_KEY] is None:
463
+ return False
464
+ else:
465
+ # If pipestat key is available assume user desires pipestat usage
466
+ # This should return True OR raise an exception at this point.
467
+ return self._get_pipestat_configuration(pipeline_type)
464
468
 
465
- def _check_if_pipestat_configured(self, project_level=False):
466
- """
467
- A helper method determining whether pipestat configuration is complete
469
+ def _get_pipestat_configuration(self, pipeline_type=PipelineLevel.SAMPLE.value):
468
470
 
469
- :param bool project_level: whether the project pipestat config should be checked
470
- :return bool: whether pipestat configuration is complete
471
- """
472
- try:
473
- if project_level:
474
- pipestat_configured = self._get_pipestat_configuration(
475
- sample_name=None, project_level=project_level
471
+ # First check if it already exists
472
+
473
+ if pipeline_type == PipelineLevel.SAMPLE.value:
474
+ for piface in self.pipeline_interfaces:
475
+
476
+ pipestat_config_path = self._check_for_existing_pipestat_config(piface)
477
+
478
+ if not pipestat_config_path:
479
+ self._create_pipestat_config(piface)
480
+ else:
481
+ piface.psm = PipestatManager(
482
+ config_file=pipestat_config_path, multi_pipelines=True
483
+ )
484
+
485
+ elif pipeline_type == PipelineLevel.PROJECT.value:
486
+ for prj_piface in self.project_pipeline_interfaces:
487
+ pipestat_config_path = self._check_for_existing_pipestat_config(
488
+ prj_piface
476
489
  )
477
- else:
478
- for s in self.samples:
479
- pipestat_configured = self._get_pipestat_configuration(
480
- sample_name=s.sample_name
490
+
491
+ if not pipestat_config_path:
492
+ self._create_pipestat_config(prj_piface)
493
+ else:
494
+ prj_piface.psm = PipestatManager(
495
+ config_file=pipestat_config_path, multi_pipelines=True
481
496
  )
482
- except Exception as e:
483
- context = (
484
- f"Project '{self.name}'"
485
- if project_level
486
- else f"Sample '{s.sample_name}'"
487
- )
488
- _LOGGER.debug(
489
- f"Pipestat configuration incomplete for {context}; "
490
- f"caught exception: {getattr(e, 'message', repr(e))}"
491
- )
492
- return False
493
497
  else:
494
- if pipestat_configured is not None and pipestat_configured != {}:
495
- return True
496
- else:
497
- return False
498
+ _LOGGER.error(
499
+ msg="No pipeline type specified during pipestat configuration"
500
+ )
501
+
502
+ return True
498
503
 
499
- def _get_pipestat_configuration(self, sample_name=None, project_level=False):
504
+ def _check_for_existing_pipestat_config(self, piface):
500
505
  """
501
- Get all required pipestat configuration variables from looper_config file
506
+
507
+ config files should be in looper output directory and named as:
508
+
509
+ pipestat_config_pipelinename.yaml
510
+
502
511
  """
503
512
 
504
- ret = {}
505
- if not project_level and sample_name is None:
506
- raise ValueError(
507
- "Must provide the sample_name to determine the "
508
- "sample to get the PipestatManagers for"
513
+ # Cannot do much if we cannot retrieve the pipeline_name
514
+ try:
515
+ pipeline_name = piface.data["pipeline_name"]
516
+ except KeyError:
517
+ raise Exception(
518
+ "To use pipestat, a pipeline_name must be set in the pipeline interface."
509
519
  )
510
520
 
521
+ config_file_name = f"pipestat_config_{pipeline_name}.yaml"
522
+ output_dir = expandpath(self.output_dir)
523
+
524
+ config_file_path = os.path.join(
525
+ # os.path.dirname(output_dir), config_file_name
526
+ output_dir,
527
+ config_file_name,
528
+ )
529
+
530
+ if os.path.exists(config_file_path):
531
+ return config_file_path
532
+ else:
533
+ return None
534
+
535
+ def _create_pipestat_config(self, piface):
536
+ """
537
+ Each piface needs its own config file and associated psm
538
+ """
539
+
511
540
  if PIPESTAT_KEY in self[EXTRA_KEY]:
512
541
  pipestat_config_dict = self[EXTRA_KEY][PIPESTAT_KEY]
513
542
  else:
@@ -521,13 +550,58 @@ class Project(peppyProject):
521
550
  # Expand paths in the event ENV variables were used in config files
522
551
  output_dir = expandpath(self.output_dir)
523
552
 
524
- # Get looper user configured items first and update the pipestat_config_dict
553
+ pipestat_config_dict.update({"output_dir": output_dir})
554
+
555
+ if "output_schema" in piface.data:
556
+ schema_path = expandpath(piface.data["output_schema"])
557
+ if not os.path.isabs(schema_path):
558
+ # Get path relative to the pipeline_interface
559
+ schema_path = os.path.join(
560
+ os.path.dirname(piface.pipe_iface_file), schema_path
561
+ )
562
+ pipestat_config_dict.update({"schema_path": schema_path})
563
+ try:
564
+ with open(schema_path, "r") as f:
565
+ output_schema_data = safe_load(f)
566
+ output_schema_pipeline_name = output_schema_data[
567
+ PIPELINE_INTERFACE_PIPELINE_NAME_KEY
568
+ ]
569
+ except Exception:
570
+ output_schema_pipeline_name = None
571
+ else:
572
+ output_schema_pipeline_name = None
573
+ if "pipeline_name" in piface.data:
574
+ pipeline_name = piface.data["pipeline_name"]
575
+ pipestat_config_dict.update({"pipeline_name": piface.data["pipeline_name"]})
576
+ else:
577
+ pipeline_name = None
578
+ if "pipeline_type" in piface.data:
579
+ pipestat_config_dict.update({"pipeline_type": piface.data["pipeline_type"]})
580
+
581
+ # Warn user if there is a mismatch in pipeline_names from sources!!!
582
+ if pipeline_name != output_schema_pipeline_name:
583
+ _LOGGER.warning(
584
+ msg=f"Pipeline name mismatch detected. Pipeline interface: {pipeline_name} Output schema: {output_schema_pipeline_name} Defaulting to pipeline_interface value."
585
+ )
586
+
525
587
  try:
526
588
  results_file_path = expandpath(pipestat_config_dict["results_file_path"])
527
- if not os.path.exists(os.path.dirname(results_file_path)):
528
- results_file_path = os.path.join(
529
- os.path.dirname(output_dir), results_file_path
530
- )
589
+
590
+ if not os.path.isabs(results_file_path):
591
+ # e.g. user configures "results.yaml" as results_file_path
592
+ if "{record_identifier}" in results_file_path:
593
+ # this is specifically to check if the user wishes tro generate a file for EACH record
594
+ if not os.path.exists(os.path.dirname(results_file_path)):
595
+ results_file_path = os.path.join(output_dir, results_file_path)
596
+ else:
597
+ if not os.path.exists(os.path.dirname(results_file_path)):
598
+ results_file_path = os.path.join(
599
+ output_dir, f"{pipeline_name}/", results_file_path
600
+ )
601
+ else:
602
+ # Do nothing because the user has given an absolute file path
603
+ pass
604
+
531
605
  pipestat_config_dict.update({"results_file_path": results_file_path})
532
606
  except KeyError:
533
607
  results_file_path = None
@@ -540,57 +614,20 @@ class Project(peppyProject):
540
614
  except KeyError:
541
615
  flag_file_dir = None
542
616
 
543
- if sample_name:
544
- pipestat_config_dict.update({"record_identifier": sample_name})
545
-
546
- if project_level and "project_name" in pipestat_config_dict:
547
- pipestat_config_dict.update(
548
- {"project_name": pipestat_config_dict["project_name"]}
549
- )
550
-
551
- if project_level and "{record_identifier}" in results_file_path:
552
- # if project level and using {record_identifier}, pipestat needs some sort of record_identifier during creation
553
- pipestat_config_dict.update(
554
- {"record_identifier": "default_project_record_identifier"}
555
- )
556
-
557
- pipestat_config_dict.update({"output_dir": output_dir})
558
-
559
- pifaces = (
560
- self.project_pipeline_interfaces
561
- if project_level
562
- else self._interfaces_by_sample[sample_name]
617
+ # Pipestat_dict_ is now updated from all sources and can be written to a yaml.
618
+ pipestat_config_path = os.path.join(
619
+ output_dir,
620
+ f"pipestat_config_{pipeline_name}.yaml",
563
621
  )
564
622
 
565
- for piface in pifaces:
566
- # We must also obtain additional pipestat items from the pipeline author's piface
567
- if "output_schema" in piface.data:
568
- schema_path = expandpath(piface.data["output_schema"])
569
- if not os.path.isabs(schema_path):
570
- # Get path relative to the pipeline_interface
571
- schema_path = os.path.join(
572
- os.path.dirname(piface.pipe_iface_file), schema_path
573
- )
574
- pipestat_config_dict.update({"schema_path": schema_path})
575
- if "pipeline_name" in piface.data:
576
- pipestat_config_dict.update(
577
- {"pipeline_name": piface.data["pipeline_name"]}
578
- )
579
- if "pipeline_type" in piface.data:
580
- pipestat_config_dict.update(
581
- {"pipeline_type": piface.data["pipeline_type"]}
582
- )
623
+ # Two end goals, create a config file
624
+ write_pipestat_config(pipestat_config_path, pipestat_config_dict)
583
625
 
584
- # Pipestat_dict_ is now updated from all sources and can be written to a yaml.
585
- looper_pipestat_config_path = os.path.join(
586
- os.path.dirname(output_dir), "looper_pipestat_config.yaml"
587
- )
588
- write_pipestat_config(looper_pipestat_config_path, pipestat_config_dict)
626
+ piface.psm = PipestatManager(
627
+ config_file=pipestat_config_path, multi_pipelines=True
628
+ )
589
629
 
590
- ret[piface.pipeline_name] = {
591
- "config_file": looper_pipestat_config_path,
592
- }
593
- return ret
630
+ return None
594
631
 
595
632
  def populate_pipeline_outputs(self):
596
633
  """
@@ -657,7 +694,7 @@ class Project(peppyProject):
657
694
  pifaces_by_sample = {}
658
695
  for source, sample_names in self._samples_by_interface.items():
659
696
  try:
660
- pi = PipelineInterface(source, pipeline_type="sample")
697
+ pi = PipelineInterface(source, pipeline_type=PipelineLevel.SAMPLE.value)
661
698
  except PipelineInterfaceConfigError as e:
662
699
  _LOGGER.debug(f"Skipping pipeline interface creation: {e}")
663
700
  else:
@@ -708,7 +745,9 @@ class Project(peppyProject):
708
745
  for source in piface_srcs:
709
746
  source = self._resolve_path_with_cfg(source)
710
747
  try:
711
- PipelineInterface(source, pipeline_type="sample")
748
+ PipelineInterface(
749
+ source, pipeline_type=PipelineLevel.SAMPLE.value
750
+ )
712
751
  except (
713
752
  ValidationError,
714
753
  IOError,
looper/utils.py CHANGED
@@ -16,9 +16,10 @@ from peppy import Project as peppyProject
16
16
  from peppy.const import *
17
17
  from ubiquerg import convert_value, expandpath, parse_registry_path
18
18
  from pephubclient.constants import RegistryPath
19
- from pydantic.error_wrappers import ValidationError
19
+ from pydantic import ValidationError
20
20
 
21
21
  from .const import *
22
+ from .command_models.commands import SUPPORTED_COMMANDS
22
23
  from .exceptions import MisconfigurationException, RegistryPathException
23
24
 
24
25
  _LOGGER = getLogger(__name__)
@@ -94,7 +95,9 @@ def fetch_sample_flags(prj, sample, pl_name, flag_dir=None):
94
95
  return [
95
96
  x
96
97
  for x in folder_contents
97
- if os.path.splitext(x)[1] == ".flag" and os.path.basename(x).startswith(pl_name)
98
+ if os.path.splitext(x)[1] == ".flag"
99
+ and os.path.basename(x).startswith(pl_name)
100
+ and sample.sample_name in x
98
101
  ]
99
102
 
100
103
 
@@ -250,19 +253,20 @@ def read_yaml_file(filepath):
250
253
  return data
251
254
 
252
255
 
253
- def enrich_args_via_cfg(parser_args, aux_parser, test_args=None):
256
+ def enrich_args_via_cfg(subcommand_name, parser_args, aux_parser, test_args=None):
254
257
  """
255
258
  Read in a looper dotfile and set arguments.
256
259
 
257
260
  Priority order: CLI > dotfile/config > parser default
258
261
 
262
+ :param subcommand name: the name of the command used
259
263
  :param argparse.Namespace parser_args: parsed args by the original parser
260
264
  :param argparse.Namespace aux_parser: parsed args by the a parser
261
265
  with defaults suppressed
262
266
  :return argparse.Namespace: selected argument values
263
267
  """
264
268
  cfg_args_all = (
265
- _get_subcommand_args(parser_args)
269
+ _get_subcommand_args(subcommand_name, parser_args)
266
270
  if os.path.exists(parser_args.config_file)
267
271
  else dict()
268
272
  )
@@ -273,23 +277,42 @@ def enrich_args_via_cfg(parser_args, aux_parser, test_args=None):
273
277
  else:
274
278
  cli_args, _ = aux_parser.parse_known_args()
275
279
 
276
- for dest in vars(parser_args):
277
- if dest not in POSITIONAL or not hasattr(result, dest):
278
- if dest in cli_args:
279
- x = getattr(cli_args, dest)
280
- r = convert_value(x) if isinstance(x, str) else x
281
- elif cfg_args_all is not None and dest in cfg_args_all:
282
- if isinstance(cfg_args_all[dest], list):
283
- r = [convert_value(i) for i in cfg_args_all[dest]]
280
+ def set_single_arg(argname, default_source_namespace, result_namespace):
281
+ if argname not in POSITIONAL or not hasattr(result, argname):
282
+ if argname in cli_args:
283
+ cli_provided_value = getattr(cli_args, argname)
284
+ r = (
285
+ convert_value(cli_provided_value)
286
+ if isinstance(cli_provided_value, str)
287
+ else cli_provided_value
288
+ )
289
+ elif cfg_args_all is not None and argname in cfg_args_all:
290
+ if isinstance(cfg_args_all[argname], list):
291
+ r = [convert_value(i) for i in cfg_args_all[argname]]
284
292
  else:
285
- r = convert_value(cfg_args_all[dest])
293
+ r = convert_value(cfg_args_all[argname])
286
294
  else:
287
- r = getattr(parser_args, dest)
288
- setattr(result, dest, r)
295
+ r = getattr(default_source_namespace, argname)
296
+ setattr(result_namespace, argname, r)
297
+
298
+ for top_level_argname in vars(parser_args):
299
+ if top_level_argname not in [cmd.name for cmd in SUPPORTED_COMMANDS]:
300
+ # this argument is a top-level argument
301
+ set_single_arg(top_level_argname, parser_args, result)
302
+ else:
303
+ # this argument actually is a subcommand
304
+ enriched_command_namespace = argparse.Namespace()
305
+ command_namespace = getattr(parser_args, top_level_argname)
306
+ if command_namespace:
307
+ for argname in vars(command_namespace):
308
+ set_single_arg(
309
+ argname, command_namespace, enriched_command_namespace
310
+ )
311
+ setattr(result, top_level_argname, enriched_command_namespace)
289
312
  return result
290
313
 
291
314
 
292
- def _get_subcommand_args(parser_args):
315
+ def _get_subcommand_args(subcommand_name, parser_args):
293
316
  """
294
317
  Get the union of values for the subcommand arguments from
295
318
  Project.looper, Project.looper.cli.<subcommand> and Project.looper.cli.all.
@@ -321,8 +344,8 @@ def _get_subcommand_args(parser_args):
321
344
  else dict()
322
345
  )
323
346
  args.update(
324
- cfg_args[parser_args.command] or dict()
325
- if parser_args.command in cfg_args
347
+ cfg_args[subcommand_name] or dict()
348
+ if subcommand_name in cfg_args
326
349
  else dict()
327
350
  )
328
351
  except (TypeError, KeyError, AttributeError, ValueError) as e:
@@ -449,7 +472,7 @@ def initiate_looper_config(
449
472
  return False
450
473
 
451
474
  if pep_path:
452
- if is_registry_path(pep_path):
475
+ if is_pephub_registry_path(pep_path):
453
476
  pass
454
477
  else:
455
478
  pep_path = expandpath(pep_path)
@@ -537,12 +560,25 @@ def read_looper_config_file(looper_config_path: str) -> dict:
537
560
 
538
561
  # Expand paths in case ENV variables are used
539
562
  for k, v in return_dict.items():
563
+ if k == SAMPLE_PL_ARG or k == PROJECT_PL_ARG:
564
+ # Pipeline interfaces are resolved at a later point. Do it there only to maintain consistency. #474
565
+ pass
540
566
  if isinstance(v, str):
541
567
  v = expandpath(v)
542
- if not os.path.isabs(v) and not is_registry_path(v):
543
- return_dict[k] = os.path.join(config_dir_path, v)
544
- else:
568
+ # TODO this is messy because is_pephub_registry needs to fail on anything NOT a pephub registry path
569
+ # https://github.com/pepkit/ubiquerg/issues/43
570
+ if is_PEP_file_type(v):
571
+ if not os.path.isabs(v):
572
+ return_dict[k] = os.path.join(config_dir_path, v)
573
+ else:
574
+ return_dict[k] = v
575
+ elif is_pephub_registry_path(v):
545
576
  return_dict[k] = v
577
+ else:
578
+ if not os.path.isabs(v):
579
+ return_dict[k] = os.path.join(config_dir_path, v)
580
+ else:
581
+ return_dict[k] = v
546
582
 
547
583
  return return_dict
548
584
 
@@ -575,19 +611,23 @@ def dotfile_path(directory=os.getcwd(), must_exist=False):
575
611
  cur_dir = parent_dir
576
612
 
577
613
 
578
- def is_registry_path(input_string: str) -> bool:
614
+ def is_PEP_file_type(input_string: str) -> bool:
615
+ """
616
+ Determines if the provided path is actually a file type that Looper can use for loading PEP
617
+ """
618
+
619
+ PEP_FILE_TYPES = ["yaml", "csv"]
620
+
621
+ res = list(filter(input_string.endswith, PEP_FILE_TYPES)) != []
622
+ return res
623
+
624
+
625
+ def is_pephub_registry_path(input_string: str) -> bool:
579
626
  """
580
627
  Check if input is a registry path to pephub
581
628
  :param str input_string: path to the PEP (or registry path)
582
629
  :return bool: True if input is a registry path
583
630
  """
584
- try:
585
- if input_string.endswith(".yaml"):
586
- return False
587
- except AttributeError:
588
- raise RegistryPathException(
589
- msg=f"Malformed registry path. Unable to parse {input_string} as a registry path."
590
- )
591
631
  try:
592
632
  registry_path = RegistryPath(**parse_registry_path(input_string))
593
633
  except (ValidationError, TypeError):
@@ -767,3 +807,15 @@ def write_submit_script(fp, content, data):
767
807
  with open(fp, "w") as f:
768
808
  f.write(content)
769
809
  return fp
810
+
811
+
812
+ def inspect_looper_config_file(looper_config_dict) -> None:
813
+ """
814
+ Inspects looper config by printing it to terminal.
815
+ param dict looper_config_dict: dict representing looper_config
816
+
817
+ """
818
+ # Simply print this to terminal
819
+ print("LOOPER INSPECT")
820
+ for key, value in looper_config_dict.items():
821
+ print(f"{key} {value}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: looper
3
- Version: 1.7.1
3
+ Version: 1.8.0
4
4
  Summary: A pipeline submission engine that parses sample inputs and submits pipelines for each sample.
5
5
  Home-page: https://github.com/pepkit/looper
6
6
  Author: Nathan Sheffield, Vince Reuter, Michal Stolarczyk, Johanna Klughammer, Andre Rendeiro
@@ -21,13 +21,14 @@ Requires-Dist: eido >=0.2.1
21
21
  Requires-Dist: jinja2
22
22
  Requires-Dist: logmuse >=0.2.0
23
23
  Requires-Dist: pandas >=2.0.2
24
- Requires-Dist: pephubclient >=0.1.2
25
- Requires-Dist: peppy >=0.40.0
26
- Requires-Dist: pipestat <0.9.0,>=0.8.0
24
+ Requires-Dist: pephubclient >=0.4.0
25
+ Requires-Dist: pipestat >=0.9.2
26
+ Requires-Dist: peppy >=0.40.2
27
27
  Requires-Dist: pyyaml >=3.12
28
28
  Requires-Dist: rich >=9.10.0
29
29
  Requires-Dist: ubiquerg >=0.5.2
30
- Requires-Dist: yacman >=0.9.2
30
+ Requires-Dist: yacman ==0.9.3
31
+ Requires-Dist: pydantic2-argparse >=0.9.2
31
32
 
32
33
  # <img src="docs/img/looper_logo.svg" alt="looper logo" height="70">
33
34
 
@@ -1,19 +1,24 @@
1
1
  looper/__init__.py,sha256=f_z9YY4ibOk7eyWoaViH_VaCXMlPQeiftbnibSFj-3E,1333
2
- looper/__main__.py,sha256=8CX2ae8mUQNI_Z8pdBT4i5UFqROFX1awyFnuYCKuYXg,238
3
- looper/_version.py,sha256=rHqoOqa3LxWXvrBmnnm2LDmV9IlmMEb5qOmw5doj3fk,22
2
+ looper/__main__.py,sha256=OOCmI-dPUvInnJHkHNMf54cblNJ3Yl9ELOwZcfOXmD8,240
3
+ looper/_version.py,sha256=Oc_xF94AMAHKZkZlB5rBt1iO0TXWFalg65MP4T2qt-A,22
4
4
  looper/cli_divvy.py,sha256=J07x83sqC4jJeu3_yS6KOARPWmwKGAV7JvN33T5zDac,5907
5
- looper/cli_looper.py,sha256=se-EbQ4nucWxMiU0VLnmV0Kss-JMjSmWih6vHaOiLi0,26367
6
- looper/conductor.py,sha256=9k0r_vsHCP25MrDjz7GR16_EEdqP9JZELYZJVm6ny1g,30777
7
- looper/const.py,sha256=bPj4lTuj2l6gwHROWqj16iHfJFo9ghZAz8THNREWW4U,8558
8
- looper/divvy.py,sha256=qa1ebbQTfNupAyDfhfEJ6mbZ_V3zk-D_E-Tck7miJ38,15688
5
+ looper/cli_pydantic.py,sha256=QKG-rvy7ORkTivDIkrk4tWpVAcuxmUSZePV35hvag0k,13541
6
+ looper/conductor.py,sha256=WAEtzZFElCK_mvsnaiGKnu5x6quYKsDMno6j6TuNG-g,31448
7
+ looper/const.py,sha256=KbQD-Q62g61pUroEF4ogQerYhJE-xbt4cX9m15oYHTo,8682
8
+ looper/divvy.py,sha256=SPoC7fpWHjC82NEKkpnObV1Koamt3M2tCmfAXpjdGRM,15399
9
9
  looper/exceptions.py,sha256=r6SKKt-m8CXQnXGDnuiwoA6zBJhIZflygBKjX4RCloI,3419
10
- looper/looper.py,sha256=U3mNqXRDZRzmFXV26TcPQYKPdY1plPvMraNHTnps4mQ,30448
10
+ looper/looper.py,sha256=XomJIis4sFHOO5qEwzmyKT85X-lKNURX8yhOftTFmbY,30099
11
11
  looper/parser_types.py,sha256=d3FHt54f9jo9VZMr5SQkbghcAdABqiYZW2JBGO5EBnw,2327
12
12
  looper/pipeline_interface.py,sha256=y46tB1_73d1FX8N1w4-GGvRBJ7rqhenuUYVtUfIhK5s,14974
13
13
  looper/plugins.py,sha256=MaMdPmK9U_4FkNJE5kccohBbY1i2qj1NTEucubFOJek,5747
14
14
  looper/processed_project.py,sha256=jZxoMYafvr-OHFxylc5ivGty1VwXBZhl0kgoFkY-174,9837
15
- looper/project.py,sha256=nkNP7ftVs82Tnk2Yn6FUp60_D6bjE9sSvySE0SHqsmg,36171
16
- looper/utils.py,sha256=i7srIXPEnQjtNaoP0ziRpdYfB7HNY5_3rW5LoKIM15I,27257
15
+ looper/project.py,sha256=vgJVk_H7DahMrmcGyKv8tcb6rR1JTk2RcRRO1JV866E,37290
16
+ looper/utils.py,sha256=TS7w46XwYzb6g3mA26xOUwDAuwBnJz3IHgAJRGHZaH8,29483
17
+ looper/command_models/DEVELOPER.md,sha256=eRxnrO-vqNJjExzamXKEq5wr_-Zw6PQEwkS9RPinYrk,2775
18
+ looper/command_models/README.md,sha256=3RGegeZlTZYnhcHXRu6bdI_81WZom2q7QYMV-KGYY7U,588
19
+ looper/command_models/__init__.py,sha256=6QWC2TewowEL7dATli5YpMmFWuXaLEPktofJCXkYUBI,187
20
+ looper/command_models/arguments.py,sha256=emK7gc_fVgrSPHE2cShxJX05VrgOEn4H7szU8DBev7Q,8808
21
+ looper/command_models/commands.py,sha256=ZZSI1mSDKejsYTr_q557MQRFrQZI8QTrXIxozxGs078,9724
17
22
  looper/default_config/divvy_config.yaml,sha256=wK5kLDGBV2wwoyqg2rl3X8SXjds4x0mwBUjUzF1Ln7g,1705
18
23
  looper/default_config/divvy_templates/localhost_bulker_template.sub,sha256=yn5VB9Brt7Hck9LT17hD2o8Kn-76gYJQk_A-8C1Gr4k,164
19
24
  looper/default_config/divvy_templates/localhost_docker_template.sub,sha256=XRr7AlR7-TP1L3hyBMfka_RgWRL9vzOlS5Kd1xSNwT0,183
@@ -55,9 +60,9 @@ looper/schemas/divvy_config_schema.yaml,sha256=7GJfKLc3VX4RGjHnOE1zxwsHXhj_ur9za
55
60
  looper/schemas/pipeline_interface_schema_generic.yaml,sha256=D16Rkpj03H9WnvA_N18iNU-hH_HwOuyESJ8Hk5hZSXc,1518
56
61
  looper/schemas/pipeline_interface_schema_project.yaml,sha256=-ZWyA0lKXWik3obuLNVk3IsAZYfbLVbCDvJnD-Fcluo,1567
57
62
  looper/schemas/pipeline_interface_schema_sample.yaml,sha256=x0OwVnijJpvm50DscvvJujdK4UAI7d71pqVemQS-D-0,1564
58
- looper-1.7.1.dist-info/LICENSE.txt,sha256=oB6ZGDa4kcznznJKJsLLFFcOZyi8Y6e2Jv0rJozgp-I,1269
59
- looper-1.7.1.dist-info/METADATA,sha256=RRHlAK9WkJv1b9KtkZpMXHk2xyATbe5DJWCcRZMq7Wk,1740
60
- looper-1.7.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
61
- looper-1.7.1.dist-info/entry_points.txt,sha256=AEL1eb0gPLYvAEUewM35Ng4scXGZIWJK4Mxdj3Hm8Fw,83
62
- looper-1.7.1.dist-info/top_level.txt,sha256=I0Yf7djsoQAMzwHBbDiQi9hGtq4Z41_Ma5CX8qXG8Y8,7
63
- looper-1.7.1.dist-info/RECORD,,
63
+ looper-1.8.0.dist-info/LICENSE.txt,sha256=oB6ZGDa4kcznznJKJsLLFFcOZyi8Y6e2Jv0rJozgp-I,1269
64
+ looper-1.8.0.dist-info/METADATA,sha256=JNbHNlDh1i0dMn7wee_Mix6Hagjqq1iNRfvK6fe5lH8,1775
65
+ looper-1.8.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
66
+ looper-1.8.0.dist-info/entry_points.txt,sha256=ejZpghZG3OoTK69u9rTW-yLyI6SC63bBTUb-Vw26HG4,87
67
+ looper-1.8.0.dist-info/top_level.txt,sha256=I0Yf7djsoQAMzwHBbDiQi9hGtq4Z41_Ma5CX8qXG8Y8,7
68
+ looper-1.8.0.dist-info/RECORD,,
@@ -1,3 +1,3 @@
1
1
  [console_scripts]
2
2
  divvy = looper.__main__:divvy_main
3
- looper = looper.__main__:main
3
+ looper = looper.cli_pydantic:main