pipemake 0.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pipemake-0.6/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2023 Kocher Lab
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
pipemake-0.6/PKG-INFO ADDED
@@ -0,0 +1,18 @@
1
+ Metadata-Version: 2.1
2
+ Name: pipemake
3
+ Version: 0.6
4
+ Summary: Pipemake: A pipeline creation tool using Snakemake
5
+ Home-page: https://github.com/kocherlab/pipemake
6
+ License: MIT
7
+ Project-URL: Documentation, https://pipemake.readthedocs.io/en/latest/
8
+ Project-URL: Code, https://github.com/kocherlab/pipemake
9
+ Project-URL: Issue tracker, https://github.com/kocherlab/pipemake/issues
10
+ Requires-Python: >=3.7
11
+ Description-Content-Type: text/x-rst
12
+ License-File: LICENSE
13
+ Requires-Dist: snakemake
14
+
15
+ PipeMake
16
+ ========
17
+
18
+ Snakemake-based pipeline platform
@@ -0,0 +1,4 @@
1
+ PipeMake
2
+ ========
3
+
4
+ Snakemake-based pipeline platform
@@ -0,0 +1,16 @@
1
+ import sys
2
+
3
+ # Basic Information
4
+ __name__ = "pipemake"
5
+ __version__ = "0.6"
6
+ __summary__ = "Pipemake: A pipeline creation tool using Snakemake"
7
+ __url__ = "https://github.com/kocherlab/pipemake"
8
+ __code__ = "https://github.com/kocherlab/pipemake"
9
+ __issue__ = "https://github.com/kocherlab/pipemake/issues"
10
+ __docs__ = "https://pipemake.readthedocs.io/en/latest/"
11
+ __license__ = "MIT"
12
+ __copyright__ = "2023"
13
+
14
+ # Author Information
15
+ __authors__ = "Andrew Webb and Sarah Kocher"
16
+ __email__ = "19213578+aewebb80@users.noreply.github.com"
@@ -0,0 +1,136 @@
1
+ import os
2
+ import copy
3
+ import yaml
4
+
5
+ from collections import defaultdict
6
+
7
+ from pipemake.processIO import standardizeInput, returnSamples, returnPaths
8
+ from pipemake.snakemakeIO import SnakePipelineIO
9
+
10
+ def loadPipelineConfigs (directory):
11
+
12
+ # Create dicts to store the relevant yaml data
13
+ pipeline_config_args = {}
14
+ pipeline_setup = {}
15
+ pipeline_cmd_line = {}
16
+ pipeline_snakefiles = defaultdict(list)
17
+
18
+ # Check the config directory exists
19
+ config_dir = os.path.join(directory, 'configs')
20
+ if not os.path.isdir(config_dir): raise Exception(f'Unable to find pipeline directory: {config_dir}')
21
+
22
+ # Loop the configs
23
+ for config_filename in os.listdir(config_dir):
24
+ config_file = os.path.join(config_dir, config_filename)
25
+
26
+ # Confirm the path is a file
27
+ if not os.path.isfile(config_file): continue
28
+
29
+ with open(config_file, "r") as config_stream:
30
+ try: config_yaml = yaml.safe_load(config_stream)
31
+ except: raise Exception('Error opening YAML')
32
+
33
+ # Check pipeline_names for repeats, and if found report error
34
+ if config_yaml['pipeline'] in pipeline_config_args:
35
+ raise Exception(f"Pipeline {config_yaml['pipeline']} has already been assigned. Please check pipeline configs")
36
+
37
+ pipeline_config_args[config_yaml['pipeline']] = config_yaml['parser']
38
+ pipeline_setup[config_yaml['pipeline']] = config_yaml['setup']
39
+ pipeline_cmd_line[config_yaml['pipeline']] = config_yaml['command-line']
40
+ pipeline_snakefiles[config_yaml['pipeline']] = config_yaml['snakefiles']
41
+
42
+ return pipeline_config_args, pipeline_setup, pipeline_cmd_line, pipeline_snakefiles
43
+
44
+ def processPipelineSetup (pipeline_setup, pipeline_args):
45
+
46
+ # Create list to store setup args
47
+ process_dict = {}
48
+
49
+ for setup_name, setup_methods in pipeline_setup.items():
50
+ for method_args in setup_methods.values():
51
+
52
+ # Assign the arg groups
53
+ input_args = method_args['input']
54
+
55
+ # Check for missing arguments
56
+ for input_arg in input_args['args']:
57
+ if input_arg.replace('-', '_') not in pipeline_args: raise Exception(f'Setup argument {input_arg} not found among pipeline argument')
58
+
59
+ # Confirm expected args were specified
60
+ method_missing_args = [_a for _a in input_args['args'] if not pipeline_args[_a.replace('-', '_')]]
61
+
62
+ # Skip if missing arguments
63
+ if method_missing_args: continue
64
+
65
+ if 'standardize' in method_args:
66
+
67
+ # Create a dict of the standardize args
68
+ std_args = copy.deepcopy(method_args['standardize'])
69
+
70
+ # Update the arguments
71
+ for std_arg, arg_params in std_args['args'].items():
72
+ if not isinstance(arg_params, str): continue
73
+ std_args['args'][std_arg] = arg_params.replace('-', '_').format(**pipeline_args)
74
+
75
+ # Standardize the input
76
+ standardizeInput(**std_args)
77
+
78
+ # Assign the method paths
79
+ method_paths = returnPaths(**std_args)
80
+
81
+ # Check for method paths
82
+ if len(method_paths) > 0:
83
+
84
+ # Check if the args have already been created
85
+ if 'singularity-args' not in process_dict:
86
+ process_dict['singularity-args'] = defaultdict(list)
87
+
88
+ # Return any paths that need to be accounted for
89
+ process_dict['singularity-args']['bind'].extend(method_paths)
90
+
91
+ if 'samples' in method_args:
92
+
93
+ # Create a dict of the standardize args
94
+ samples_args = copy.deepcopy(method_args['samples'])
95
+
96
+ # Update the arguments
97
+ for samples_arg, arg_params in samples_args['args'].items():
98
+ if not isinstance(arg_params, str): continue
99
+ samples_args['args'][samples_arg] = arg_params.replace('-', '_').format(**pipeline_args)
100
+
101
+ # Assign the samples from the method
102
+ method_samples = returnSamples(**samples_args)
103
+
104
+ # Confirm the samples are not already assigned
105
+ if method_samples and 'samples' in process_dict: raise Exception(f'Samples already assigned')
106
+
107
+ # Standardize the input
108
+ process_dict['samples'] = method_samples
109
+
110
+ return process_dict
111
+
112
+ def processPipelineCmdLine (pipeline_cmd_line, pipeline_args):
113
+
114
+ # Create list to store the command line arguments
115
+ cmd_line_list = ['snakemake']
116
+
117
+ # Process the command line
118
+ for cmd_arg, cmd_value in pipeline_cmd_line.items():
119
+
120
+ # Add the basic args
121
+ if isinstance(cmd_value, bool): cmd_line_list.append(f'--{cmd_arg}')
122
+ else: cmd_line_list.extend([f'--{cmd_arg}', cmd_value])
123
+
124
+ # Check if using singularity
125
+ if cmd_arg == 'use-singularity' and cmd_value and 'singularity-args' in pipeline_args:
126
+
127
+ # Assign the singularity args
128
+ singularity_args_list = []
129
+ for singularity_arg, singularity_value in pipeline_args['singularity-args'].items():
130
+ singularity_args_list.append(f'--{singularity_arg} ' + ','.join(singularity_value))
131
+
132
+ # Add the singularity args
133
+ singularity_args_str = ','.join(singularity_args_list)
134
+ cmd_line_list.extend(['--singularity-args', f'"{singularity_args_str}"'])
135
+
136
+ return ' '.join(map(str,cmd_line_list))
@@ -0,0 +1,51 @@
1
+ import logging
2
+ import sys
3
+
4
+ def startLogger (log_filename = None, filemode = 'w'):
5
+
6
+ # Close any old loggers
7
+ for handler in logging.root.handlers[:]:
8
+ handler.close()
9
+ logging.root.removeHandler(handler)
10
+
11
+ # Star the logger
12
+ if log_filename: logging.basicConfig(filename = log_filename, filemode = filemode, level = 'INFO', format = '%(asctime)s - %(levelname)s: %(message)s', datefmt = '%Y-%m-%d %H:%M:%S')
13
+ else: logging.basicConfig(stream = sys.stdout, level = 'INFO', format = '%(asctime)s - %(levelname)s: %(message)s', datefmt = '%Y-%m-%d %H:%M:%S')
14
+
15
+ # Start logging to stdout
16
+ stdout_log = logging.StreamHandler()
17
+
18
+ # Assign the stdout logging level
19
+ stdout_log.setLevel(logging.WARNING)
20
+
21
+ # Define the stdout format
22
+ console_format = logging.Formatter('%(funcName)s - %(levelname)s: %(message)s')
23
+
24
+ # Assign the format
25
+ stdout_log.setFormatter(console_format)
26
+
27
+ # Add the stdout handler
28
+ logging.getLogger('').addHandler(stdout_log)
29
+
30
+ # Update the exception handler to log the exception
31
+ def expHandler(etype,val,tb):
32
+
33
+ # Log the error
34
+ logging.error("%s" % (val), exc_info=(etype,val,tb))
35
+
36
+ # Update the exception hook
37
+ sys.excepthook = expHandler
38
+
39
+ def logArgDict (arg_dict, print_undefined = False, omit = []):
40
+
41
+ # Loop the arguments
42
+ for arg, value in arg_dict.items():
43
+
44
+ # Skip arg if in omit list
45
+ if arg in omit: continue
46
+
47
+ # Report only defined arguments, unless print_undefined is True
48
+ if value is None and not print_undefined: continue
49
+
50
+ # Log the argument
51
+ logging.info('Argument %s: %s' % (arg, value))
@@ -0,0 +1,289 @@
1
+ #!/usr/bin/env python
2
+
3
+ import os
4
+ import sys
5
+ import yaml
6
+ import random
7
+ import string
8
+ import logging
9
+ import argparse
10
+ import datetime
11
+
12
+ from pydoc import locate
13
+ from collections import defaultdict
14
+
15
+ from pipemake.logger import *
16
+ from pipemake.config import *
17
+ from pipemake.snakemakeIO import SnakePipelineIO
18
+
19
+ def jobRandomString (num_chars = 4):
20
+ global random_string
21
+
22
+ # Create a random string, if not done already
23
+ if not random_string: random_string = ''.join(random.choices(string.digits + string.ascii_uppercase, k = num_chars))
24
+
25
+ return random_string
26
+
27
+ def jobTimeStamp ():
28
+ global time_stamp
29
+
30
+ # Create a time stamp, if not done already
31
+ if not time_stamp: time_stamp = datetime.datetime.now().strftime("%Y-%m-%d")
32
+
33
+ return time_stamp
34
+
35
+ def pipeline_parser (config_parser_pipelines):
36
+
37
+ # Change default behavior on error
38
+ class MyParser(argparse.ArgumentParser):
39
+ def error(self, message):
40
+ sys.stderr.write('error: %s\n\n' % message)
41
+ self.print_help()
42
+ sys.exit(2)
43
+
44
+ # Remove metavar line in parser
45
+ class SubcommandHelpFormatter(argparse.RawDescriptionHelpFormatter):
46
+ def _format_action(self, action):
47
+ parts = super(argparse.RawDescriptionHelpFormatter, self)._format_action(action)
48
+ if action.nargs == argparse.PARSER: parts = "\n".join(parts.split("\n")[1:])
49
+ return parts
50
+
51
+ def confirmDir ():
52
+ '''Custom action to confirm directory exists'''
53
+ class customAction(argparse.Action):
54
+ def __call__(self, parser, args, value, option_string=None):
55
+ if not os.path.isdir(value):
56
+ raise IOError(f'Unable to find directory: {value}')
57
+ setattr(args, self.dest, value)
58
+ return customAction
59
+
60
+ def confirmFile ():
61
+ '''Custom action to confirm file exists'''
62
+ class customAction(argparse.Action):
63
+ def __call__(self, parser, args, value, option_string=None):
64
+ if not os.path.isfile(value):
65
+ raise IOError(f'Unable to find file: {value}')
66
+ setattr(args, self.dest, value)
67
+ return customAction
68
+
69
+ def createArgGroup (parser, group_args):
70
+ '''Create the argument group'''
71
+
72
+ # Set the groip argument, if not given
73
+ if 'group' not in group_args: group_args['group'] = {}
74
+
75
+ # Set the argument type, if not given
76
+ if 'type' not in group_args['group']: group_args['group']['type'] = 'argument_group'
77
+
78
+ # Set if the group is required, if not given
79
+ if 'required' not in group_args['group']: group_args['group']['required'] = False
80
+
81
+ # Set the group label, if not given
82
+ if 'label' not in group_args['group']: group_args['group']['label'] = pipeline_arg_group
83
+
84
+ # Create the argument group
85
+ if group_args['group']['type'] == 'argument_group':
86
+ return parser.add_argument_group(f"{pipeline_name} {group_args['group']['label']} arguments")
87
+
88
+ # Create the mutually exclusive group
89
+ elif group_args['group']['type'] == 'mutually_exclusive':
90
+ arg_group = parser.add_argument_group(f"{pipeline_name} {group_args['group']['label']} arguments")
91
+ return arg_group.add_mutually_exclusive_group(required = group_args['group']['required'])
92
+
93
+ # Report an error if the group type is not supported
94
+ else: raise Exception(f'Group type not supported: {group_args["type"]}')
95
+
96
+ # Create the pipeline selection parser
97
+ pipeline_parser = MyParser(formatter_class = SubcommandHelpFormatter)
98
+ pipeline_parser._positionals.title = "Pipeline selection argument options (positional)"
99
+
100
+ # Create the subparsers
101
+ pipeline_subparsers = pipeline_parser.add_subparsers(dest = 'pipeline', required = True)
102
+
103
+ # Assign the arguments for each pipeline
104
+ for pipeline_name, pipeline_params in config_parser_pipelines.items():
105
+
106
+ # Create the subparser
107
+ pipeline_subparser = pipeline_subparsers.add_parser(pipeline_name, help = pipeline_params['help'], add_help = False)
108
+
109
+ pipeline_arg_groups = {}
110
+
111
+ # Add the required argument group
112
+ pipeline_arg_groups['required'] = pipeline_subparser.add_argument_group(f"{pipeline_name} required arguments")
113
+
114
+ # Loop the pipeline groups
115
+ for pipeline_arg_group in pipeline_params['arg-groups']:
116
+
117
+ # Create the argument group, if not the basic subparsers
118
+ if pipeline_arg_group != 'basic':
119
+
120
+ # Check if the group already exists, then raise an exception if it does
121
+ if pipeline_arg_group in pipeline_arg_groups: raise Exception(f'Cannot create group {pipeline_arg_group}. Group already exists.')
122
+
123
+ # Create the argument group
124
+ pipeline_arg_groups[pipeline_arg_group] = pipeline_subparser.add_argument_group(f"{pipeline_name} {pipeline_arg_group} arguments")
125
+
126
+ # Add the optional argument group
127
+ pipeline_arg_groups['optional'] = pipeline_subparser.add_argument_group(f"{pipeline_name} optional arguments")
128
+
129
+ # Loop the pipeline mutually exclusive groups
130
+ for pipeline_arg_group, group_args in pipeline_params['arg-groups'].items():
131
+ if 'mutually-exclusive-groups' not in group_args: continue
132
+
133
+ # Loop the mutually exclusive groups
134
+ for me_group_name, me_group_args in group_args['mutually-exclusive-groups'].items():
135
+
136
+ # Confirm the mutually exclusive group has not been created
137
+ if me_group_name in pipeline_arg_groups: raise Exception(f'Cannot create mutually exclusive group {me_group_name}. Group already exists.')
138
+
139
+ # Check if the group is required, if not given
140
+ if 'required' not in me_group_args: me_group_args['required'] = False
141
+
142
+ # Create the mutually exclusive group
143
+ if pipeline_arg_group == 'basic' and me_group_args['required']:
144
+ pipeline_arg_groups[me_group_name] = pipeline_arg_groups['required'].add_mutually_exclusive_group(required = me_group_args['required'])
145
+ elif pipeline_arg_group == 'basic':
146
+ pipeline_arg_groups[me_group_name] = pipeline_arg_groups['optional'].add_mutually_exclusive_group(required = me_group_args['required'])
147
+ elif pipeline_arg_group != 'basic' and pipeline_arg_group in pipeline_arg_groups:
148
+ pipeline_arg_groups[me_group_name] = pipeline_arg_groups['optional'].add_mutually_exclusive_group(required = me_group_args['required'])
149
+ else: raise Exception(f'Unable to assign mutually exclusive group: {me_group_name}')
150
+
151
+ # Loop the pipeline arguments
152
+ for pipeline_arg_group, group_args in pipeline_params['arg-groups'].items():
153
+
154
+ '''2. Add the arguments to the argument subparser'''
155
+
156
+ # Loop the arguments in the group
157
+ for pipeline_arg_name, arg_args in group_args['args'].items():
158
+
159
+ # Set the datatypes
160
+ if 'type' in arg_args:
161
+ try: arg_args['type'] = locate(arg_args['type'])
162
+ except: raise Exception(f"Unable to locate type: {arg_args['type']}")
163
+
164
+ # Configure the action parameter, if specified
165
+ if 'action' in arg_args:
166
+ if arg_args['action'] == 'confirmDir': arg_args['action'] = confirmDir()
167
+ elif arg_args['action'] == 'confirmFile': arg_args['action'] = confirmFile()
168
+
169
+ # Configure the default params, if specified
170
+ if 'default' in arg_args and isinstance(arg_args['default'], dict):
171
+ default_str = arg_args['default']['str']
172
+ if 'suffix' in arg_args['default']:
173
+ if isinstance(arg_args['default']['suffix'], str): arg_args['default']['suffix'] = [arg_args['default']['suffix']]
174
+ for suffix in arg_args['default']['suffix']:
175
+
176
+ # Add underscores if needed
177
+ if default_str: default_str += '_'
178
+
179
+ # Process suffix strings
180
+ if isinstance(suffix, str): default_str += suffix
181
+
182
+ # Process suffix dicts
183
+ elif isinstance(suffix, dict):
184
+
185
+ # Convert the suffix dict to lowercase keys
186
+ suffix_dict = {_k.lower():_v for _k, _v in suffix.items()}
187
+
188
+ if 'function' not in suffix_dict: raise Exception(f"Suffix dict not supported: {suffix_dict}")
189
+
190
+ if suffix['function'] == 'jobTimeStamp': default_str += jobTimeStamp()
191
+ elif suffix['function'] == 'jobRandomString': default_str += jobRandomString()
192
+ else: raise Exception(f"Function not supported: {suffix['function']}")
193
+
194
+ arg_args['default'] = default_str
195
+
196
+ # Assign the argument to a mutually exclusive group, if applicable
197
+ if 'mutually-exclusive' in arg_args:
198
+ me_group = arg_args['mutually-exclusive']
199
+ del arg_args['mutually-exclusive']
200
+ pipeline_arg_groups[me_group].add_argument(f'--{pipeline_arg_name}', **arg_args)
201
+
202
+ # Assign the argument to it respective group, if applicable
203
+ elif pipeline_arg_group != 'basic' and pipeline_arg_group in pipeline_arg_groups:
204
+ pipeline_arg_groups[pipeline_arg_group].add_argument(f'--{pipeline_arg_name}', **arg_args)
205
+
206
+ # Assign the argument to the required group, if basic and required
207
+ elif pipeline_arg_group == 'basic' and 'required' in arg_args:
208
+ pipeline_arg_groups['required'].add_argument(f'--{pipeline_arg_name}', **arg_args)
209
+
210
+ # Assign the argument to the optional group, if basic and not required
211
+ elif pipeline_arg_group == 'basic':
212
+ pipeline_arg_groups['optional'].add_argument(f'--{pipeline_arg_name}', **arg_args)
213
+
214
+ # Report an error if the group is not supported
215
+ else:
216
+ raise Exception(f'Argument group not supported: {pipeline_arg_group}')
217
+
218
+ # Add the common optional arguments, but at the end of the list
219
+ pipeline_arg_groups['optional'].add_argument('--scale-threads', help = 'Scale the threads for each task', type = float, default = 1.0)
220
+ pipeline_arg_groups['optional'].add_argument('--scale-mem', help = 'Scale the memory (RAM) for each task', type = float, default = 1.0)
221
+ pipeline_arg_groups['optional'].add_argument('--resource-yml', help = 'Create a seperate resource yaml', action = 'store_true')
222
+ pipeline_arg_groups['optional'].add_argument('--singularity-dir', help = 'Assign different directory of singularity images', type = str, default = '/Genomics/argo/users/aewebb/.local/images/')
223
+ pipeline_arg_groups['optional'].add_argument('-h', '--help', action = 'help', help = 'show this help message and exit')
224
+
225
+ return vars(pipeline_parser.parse_args())
226
+
227
+ # Create variables to store randomString and timeStamp
228
+ random_string = None
229
+ time_stamp = None
230
+
231
+ def main():
232
+
233
+ # Assign the pipeline directory
234
+ if os.environ.get('KPDIR'): pipeline_storage_dir = os.environ.get('KPDIR')
235
+ else: pipeline_storage_dir = 'pipelines'
236
+
237
+ # Confirm the pipeline directory exists
238
+ if not os.path.isdir(pipeline_storage_dir): raise Exception(f'Unable to find pipeline directory: {pipeline_storage_dir}')
239
+
240
+ # Loads the configs
241
+ pipeline_config_args, pipeline_setup, pipeline_cmd_line, pipeline_snakefiles = loadPipelineConfigs(pipeline_storage_dir)
242
+
243
+ # Parse the aguments from the configs
244
+ pipeline_args = pipeline_parser(pipeline_config_args)
245
+
246
+ # Update the pipeline args with the pipeline directory
247
+ pipeline_args['pipeline_storage_dir'] = pipeline_storage_dir
248
+
249
+ # Check that a pipeline was assigned
250
+ if not pipeline_args['pipeline']: raise Exception(f'No pipeline specified')
251
+
252
+ # Create the working directory
253
+ if not os.path.exists(pipeline_args['work_dir']): os.makedirs(pipeline_args['work_dir'])
254
+
255
+ # Create the pipeline job directory
256
+ pipeline_args['pipeline_job_dir'] = os.path.join(pipeline_args['work_dir'], f'.pipeline')
257
+ if not os.path.exists(pipeline_args['pipeline_job_dir']): os.makedirs(pipeline_args['pipeline_job_dir'])
258
+
259
+ # Start logger and log the arguments
260
+ startLogger(os.path.join(pipeline_args['pipeline_job_dir'], f'pipeline.log'))
261
+ logArgDict(pipeline_args, omit = ['pipeline_job_dir'])
262
+
263
+ # Process the pipeline setup
264
+ setup_arg_dict = processPipelineSetup(pipeline_setup[pipeline_args['pipeline']], pipeline_args)
265
+
266
+ # Update the pipeline args if the setup created new args
267
+ if setup_arg_dict: pipeline_args.update(setup_arg_dict)
268
+
269
+ # Create the snakemake pipeline
270
+ snakemake_pipeline = SnakePipelineIO.open(**pipeline_args)
271
+
272
+ # Add the snakemake modules to the pipeline
273
+ for smkm_filename in pipeline_snakefiles[pipeline_args['pipeline']]:
274
+ snakemake_pipeline.addModule(smkm_filename)
275
+
276
+ # Create the snakemake config file
277
+ snakemake_pipeline.writeConfig(pipeline_args)
278
+
279
+ # Create the snakemake piepline file
280
+ snakemake_pipeline.writePipeline()
281
+
282
+ # Close the snakemake pipeline
283
+ snakemake_pipeline.close()
284
+
285
+ # Create the command line
286
+ print(processPipelineCmdLine(pipeline_cmd_line[pipeline_args['pipeline']], pipeline_args))
287
+
288
+ if __name__ == '__main__':
289
+ main()
@@ -0,0 +1,66 @@
1
+ from pipemake.seqIO import SeqFileIO, SeqTableIO
2
+ from pipemake.wildcardIO import WildcardIO
3
+
4
+ class ProcessIO ():
5
+ def __init__ (self, processIO, standardize_func = None, **kwargs):
6
+ self.processIO = processIO
7
+ self.standardize_call = getattr(self.processIO, standardize_func)
8
+ self.samples = []
9
+
10
+ @classmethod
11
+ def fromWildcardStr (cls, wildcard_str = '', **kwargs):
12
+ return cls(WildcardIO.fromStr(wildcard_str, **kwargs), standardize_func = 'standardizedFiles')
13
+
14
+ @classmethod
15
+ def fromTableFile (cls, table_filename = '', **kwargs):
16
+ return cls(SeqTableIO.fromFilenameStr(table_filename, **kwargs), standardize_func = 'standardizedFiles')
17
+
18
+ @classmethod
19
+ def fromFileStr (cls, input_filename = '', **kwargs):
20
+ return cls(SeqFileIO.create(input_filename, **kwargs), standardize_func = 'standardize')
21
+
22
+ def standardize (self, standardized_filename = '', **kwargs):
23
+ self.standardize_call(standardized_filename, **kwargs)
24
+
25
+ def returnSamples (self, **kwargs):
26
+ return self.processIO.returnSamples()
27
+
28
+ def returnPaths (self, **kwargs):
29
+ return self.processIO.returnPaths(**kwargs)
30
+
31
+
32
+ def standardizeInput (method = '', args = {}):
33
+
34
+ # Create the standardization call
35
+ if method == 'wildcard-str': standardize_input_call = ProcessIO.fromWildcardStr(**args)
36
+ elif method == 'table-file': standardize_input_call = ProcessIO.fromTableFile(**args)
37
+ elif method == 'file-str': standardize_input_call = ProcessIO.fromFileStr(**args)
38
+ else: raise Exception(f'No standardization method given for: {method}')
39
+
40
+ # Standardize the input
41
+ standardize_input_call.standardize(**args)
42
+
43
+ def returnPaths (method = '', args = {}):
44
+
45
+ # Create the standardization call
46
+ if method == 'wildcard-str': return_path_call = ProcessIO.fromWildcardStr(**args)
47
+ elif method == 'table-file': return_path_call = ProcessIO.fromTableFile(**args)
48
+ elif method == 'file-str': return_path_call = ProcessIO.fromFileStr(**args)
49
+ else: raise Exception(f'No standardization method given for: {method}')
50
+
51
+ return return_path_call.returnPaths(**args)
52
+
53
+ def returnSamples (method = '', args = {}):
54
+
55
+ # Create the return samples call
56
+ if method == 'wildcard-str': return_samples_call = ProcessIO.fromWildcardStr(**args)
57
+ elif method == 'table-file': return_samples_call = ProcessIO.fromTableFile(**args)
58
+ elif method == 'file-str': raise Exception('Not implemented')
59
+ else: raise Exception(f'No standardization method given for: {method}')
60
+
61
+ return return_samples_call.returnSamples()
62
+
63
+
64
+
65
+
66
+