uuPythonlab 0.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pythonlab/__init__.py ADDED
@@ -0,0 +1,5 @@
1
+ """Top-level package for pythonLab."""
2
+
3
+ __author__ = """mark doerr"""
4
+ __email__ = "mark.doerr@uni-greifswald.de"
5
+ __version__ = "0.2.4"
pythonlab/process.py ADDED
@@ -0,0 +1,141 @@
1
+ """_____________________________________________________________________
2
+
3
+ :PROJECT: pythonLab
4
+
5
+ *process base class*
6
+
7
+ :details:
8
+
9
+ :authors: mark doerr (mark@uni-greifswald.de)
10
+ Stefan Maak
11
+
12
+ :date: (creation) 20210410
13
+
14
+ ________________________________________________________________________
15
+
16
+ """
17
+
18
+ import logging
19
+ from enum import Enum
20
+ from typing import List
21
+
22
+ from abc import ABC, abstractmethod
23
+
24
+ # should be done through plugin system
25
+ from pythonlab.resource import ServiceResource, LabwareResource
26
+
27
+
28
+ class ExecutionConstraint(Enum):
29
+ immediate = 1
30
+ parallel = 2
31
+
32
+
33
+ class PLProcess(ABC):
34
+ def __init__(self, priority=10):
35
+ self._priority = priority # 0 has the highest priority
36
+ self._service_resources = []
37
+ self._labware_resources = []
38
+ self._substance_resources = []
39
+ self._data_resources = []
40
+ # indicates that the samples have to started in the same order,as they are added
41
+ self.preserve_order = False
42
+
43
+ self.labware_nodes = {} # dict labware_name --> labware_start_node
44
+
45
+ self._service_resources = []
46
+ self.create_resources()
47
+ self.init_service_resources()
48
+
49
+ @abstractmethod
50
+ def create_resources(self):
51
+ raise NotImplementedError
52
+
53
+ @abstractmethod
54
+ def init_service_resources(self):
55
+ """calling all init routines.
56
+ If no additional code is required,
57
+ this can be called, using
58
+ super().init_resources() in an overwriting method.
59
+ """
60
+ for resource in self._service_resources:
61
+ resource.init()
62
+
63
+ def set_starting_position(
64
+ self, resource: LabwareResource, device: ServiceResource, position: int
65
+ ):
66
+ """
67
+ This method gets called when setting the starting position of a labware resource. It exists to be overwritten.
68
+ :param resource:
69
+ :param device:
70
+ :param position:
71
+ :return:
72
+ """
73
+
74
+ def add_process_step_priorities(self):
75
+ """we add waiting costs to all jobs, so these will be prioritised by scheduler
76
+ should be also add waiting costs before the start, so they will be started first?
77
+
78
+ :raises NotImplementedError: _description_
79
+ """
80
+
81
+ @abstractmethod
82
+ def process(self):
83
+ raise NotImplementedError
84
+
85
+ def add_process_step(
86
+ self,
87
+ service: ServiceResource,
88
+ labware: List[LabwareResource],
89
+ is_movement: bool = False,
90
+ **kwargs,
91
+ ):
92
+ """
93
+ This Method will be overwritten when the process is parsed. It is designed for service resources to add
94
+ process steps to the workflow.
95
+ :param service:
96
+ :param labware:
97
+ :param is_movement:
98
+ :param kwargs:
99
+ :return:
100
+ """
101
+ # todo: extend description since this is key to parsing and implementation on new service resources
102
+
103
+ def register_service_resource(self, resource):
104
+ logging.debug(f"reg service. res: {resource.name}")
105
+ self._service_resources.append(resource)
106
+
107
+ def register_labware_resource(self, resource):
108
+ logging.debug(f"reg labware res: {resource.name}")
109
+ self._labware_resources.append(resource)
110
+
111
+ def register_substance_resource(self, resource):
112
+ logging.debug(f"reg subst res: {resource.name}")
113
+ self._substance_resources.append(resource)
114
+
115
+ def register_data_resource(self, resource):
116
+ logging.debug(f"reg data res: {resource.name}")
117
+ self._data_resources.append(resource)
118
+
119
+ def shutdown_resources(self):
120
+ for resource in self._service_resources:
121
+ resource.shutdown()
122
+
123
+ @property
124
+ def service_resources(self):
125
+ return self._service_resources
126
+
127
+ @property
128
+ def labware_resources(self):
129
+ return self._labware_resources
130
+
131
+ @property
132
+ def substance_resources(self):
133
+ return self._substance_resources
134
+
135
+ @property
136
+ def data_resources(self):
137
+ return self._data_resources
138
+
139
+ @property
140
+ def priority(self):
141
+ return self._priority
@@ -0,0 +1,500 @@
1
+ """
2
+ Module with all necessary utility to parse a PythonLab process into a networkx graph.
3
+ todo: use pydentic to parse it into defined dataclasses
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import traceback
9
+ import logging
10
+ from abc import ABC
11
+ import graphviz
12
+ import inspect
13
+ import ast
14
+ from uuid import uuid1
15
+ from pathlib import Path
16
+ import sys
17
+ from importlib.util import spec_from_file_location, module_from_spec
18
+ import types
19
+ import networkx as nx
20
+ from pythonlab.process import PLProcess
21
+ from pythonlab.resource import ServiceResource, DynamicLabwareResource, LabwareResource
22
+ from typing import List, Dict, Any, Type
23
+ from copy import deepcopy
24
+ from contextlib import contextmanager
25
+
26
+
27
+ @contextmanager
28
+ def get_source_code(src: str, node: ast.AST) -> str:
29
+ try:
30
+ code = ast.get_source_segment(src, node)
31
+ yield code
32
+ except Exception as ex:
33
+ logging.error(f"Caught Exception {ex} while parsing \n\n{code}\n")
34
+ raise ex
35
+
36
+
37
+ def find_pl_process_class(module: types.ModuleType) -> type[PLProcess]:
38
+ process_classes = [
39
+ obj
40
+ for obj in module.__dict__.values()
41
+ if (
42
+ inspect.isclass(obj)
43
+ and issubclass(obj, PLProcess)
44
+ and obj is not PLProcess
45
+ and obj.__module__
46
+ == module.__name__ # ensure defined in this file, not imported
47
+ )
48
+ ]
49
+ if not process_classes:
50
+ raise RuntimeError(f"No subclass of PLProcess found in module {module.__name__}")
51
+ return process_classes[0]
52
+
53
+
54
+ class PLProcessReader:
55
+ @staticmethod
56
+ def parse_process_from_file_path(file_path: str | Path):
57
+ spec = spec_from_file_location("importing_process", file_path)
58
+ sys.path.append(Path(file_path).parent.as_posix())
59
+ module = module_from_spec(spec)
60
+ sys.modules[spec.name] = module
61
+ spec.loader.exec_module(module)
62
+
63
+ # find the class implementing PLProcess
64
+ process_class = find_pl_process_class(module)
65
+ process = process_class()
66
+
67
+ # call the parsing from instance method
68
+ return PLProcessReader.parse_process_from_instance(process)
69
+
70
+ @staticmethod
71
+ def parse_process_from_source_code(src: str):
72
+ module_name = "tmp_module"
73
+ module = types.ModuleType(module_name)
74
+ # Execute the source code in the module's namespace.
75
+ exec(src, module.__dict__)
76
+
77
+ # Now, create an instance of the class implementing PLProcess.
78
+ process_class = find_pl_process_class(module)
79
+ process = process_class()
80
+
81
+ # retrieve all imports and add them to scope
82
+ scope = {}
83
+ tree = ast.parse(src)
84
+ for node in ast.walk(tree):
85
+ if isinstance(node, ast.Import):
86
+ for alias in node.names:
87
+ stmt = f"import {alias.name}" + (f" as {alias.asname}" if alias.asname else "")
88
+ exec(stmt, scope)
89
+ elif isinstance(node, ast.ImportFrom):
90
+ imports = ", ".join(
91
+ f"{alias.name}" + (f" as {alias.asname}" if alias.asname else "")
92
+ for alias in node.names
93
+ )
94
+ stmt = f"from {node.module} import {imports}"
95
+ exec(stmt, scope)
96
+ return PLProcessReader.parse_process(process, src, scope)
97
+
98
+ @staticmethod
99
+ def parse_process_from_instance(plp: PLProcess):
100
+ src = inspect.getsource(type(plp))
101
+ scope = {}
102
+ module_name = plp.__class__.__module__
103
+ module = sys.modules.get(module_name)
104
+ if module:
105
+ scope.update(module.__dict__)
106
+ return PLProcessReader.parse_process(plp, src, scope)
107
+
108
+ @staticmethod
109
+ def parse_process(plp: PLProcess, src=None, scope: dict | None = None):
110
+ """
111
+ The main function. It takes a PythonLabProcess and derives a SMProcess from it. The Information is stored
112
+ in Job and LabwareInfo classes from the structures-module.
113
+ """
114
+ if scope is None:
115
+ scope = {}
116
+ if src is None:
117
+ src = inspect.getsource(type(plp))
118
+ p = PLProcessReader.ast_get_process(type(plp), src)
119
+
120
+ class Simulator(PLProcessSimulator, type(plp)):
121
+ """This class is necessary to have the simulator inherit the methods of plp dynamically"""
122
+ def __init__(self):
123
+ PLProcessSimulator.__init__(self, type(plp))
124
+ simulator = Simulator()
125
+ scope.update({'self': simulator, '_break_nodes': []})
126
+ devices = [key for key, var in vars(simulator).items() if isinstance(var, ServiceResource)]
127
+ PLProcessReader.execute_scope(p.body, scope, [], src, devices, simulator)
128
+ # contract the dummy_nodes used to link the if-conditions and break-nodes correctly
129
+ PLProcessReader.contract_dummys(simulator.workflow)
130
+ return simulator
131
+
132
+ @staticmethod
133
+ def ast_get_process(process_type, src=None):
134
+ if src is None:
135
+ src = inspect.getsource(process_type)
136
+ module = ast.parse(src)
137
+ cl = [cls for cls in module.body if isinstance(cls, ast.ClassDef)][0]
138
+ fcts = [elem for elem in cl.body if isinstance(elem, ast.FunctionDef)]
139
+ processes = [elem for elem in fcts if elem.name == 'process']
140
+ return processes[0]
141
+
142
+ @staticmethod
143
+ def contract_dummys(g: nx.DiGraph):
144
+ """
145
+ This function contracts the dummy nodes in the workflow graph. To keep track of which labware participated in
146
+ which process step throughout exploring if-clauses, break commands, etc. the reader includes dummy nodes
147
+ into the workflow graph. In this function (after the whole process was read) these dummies are removed by
148
+ contraction, i.e., remove them and connect all sources of incoming edges to all targets of outgoing edges.
149
+ """
150
+ dummys = [n for n, data in g.nodes(data=True) if data['type'] == 'dummy']
151
+ for dummy in dummys:
152
+ # buff1 and buff1 are the same as dummy
153
+ for prior, buff1, data_in in g.in_edges(dummy, data=True):
154
+ for buff2, posterior, data_out in g.out_edges(dummy, data=True):
155
+ label = f"{data_in['label']} {data_out['label']}"
156
+ kwargs = data_in.copy()
157
+ kwargs.update(data_out)
158
+ kwargs['label'] = label
159
+ g.add_edge(prior, posterior, **kwargs)
160
+ g.remove_node(dummy)
161
+
162
+ @staticmethod
163
+ def execute_scope(body: List[ast.AST], scope: Dict[str, Any], runtime_vars: List[str], src: str,
164
+ devices: List[str], plp: PLProcessSimulator):
165
+ """
166
+ Goes through the list of code lines and constructs the workflow graph by manipulation the process state and
167
+ executing certain lines of process description code. Ment to be called recursively
168
+ :param plp:
169
+ :param devices: List of variables that correspond to ServiceResources
170
+ :param src: source code of the process description
171
+ :param body: list of parsed code fragments
172
+ :param scope: variables assigned so var
173
+ :param runtime_vars: list of variable names in current scope that are evaluated at runtime
174
+ :return: Nothing
175
+ """
176
+ # make shallow copies of scope and runtime_vars
177
+ scope = scope.copy()
178
+ for node in body:
179
+ if isinstance(node, ast.Expr):
180
+ PLProcessReader.handle_expr(node, scope, runtime_vars, src)
181
+ if isinstance(node, ast.Assign):
182
+ PLProcessReader.handle_assign(node, scope, runtime_vars, src, devices, plp)
183
+ if isinstance(node, ast.If):
184
+ try:
185
+ PLProcessReader.handle_if(node, scope, runtime_vars, src, devices, plp)
186
+ except Exception as ex:
187
+ logging.error(ex, traceback.print_exc())
188
+ if isinstance(node, ast.For):
189
+ PLProcessReader.handle_for(node, scope, runtime_vars, src, devices, plp)
190
+ if isinstance(node, ast.Break):
191
+ PLProcessReader.handle_break(scope, plp)
192
+
193
+ @staticmethod
194
+ def handle_expr(expr: ast.Expr, scope: Dict[str, Any], runtime_vars: List[str], src: str):
195
+ with get_source_code(src, expr) as code:
196
+ exec(code, scope)
197
+
198
+ @staticmethod
199
+ def handle_assign(asg: ast.Assign, scope: Dict[str, Any], runtime_vars: List[str], src: str, devices: List[str],
200
+ plp: PLProcessSimulator):
201
+ with get_source_code(src, asg) as code:
202
+ is_runtime_var = any(f"attr='{d}'" in ast.dump(asg.value) for d in devices)
203
+ is_computation = any(f"id='{v}'" in ast.dump(asg.value) for v in runtime_vars)
204
+ var_names = [t.id for t in asg.targets if isinstance(t, ast.Name)]
205
+ if is_runtime_var:
206
+ exec(ast.get_source_segment(src, asg.value), scope)
207
+ plp.add_var_nodes(*var_names)
208
+ runtime_vars.extend([v for v in var_names if v not in runtime_vars])
209
+ elif is_computation:
210
+ used_vars = filter(lambda v: f"id='{v}'" in ast.dump(asg.value), runtime_vars)
211
+ needed = {key: val for key, val in scope.items() if key in code}
212
+ plp.add_computation(name=asg.targets[0].id, var_names=used_vars,
213
+ fct_code=ast.get_source_segment(src, asg.value), needed_scope=needed.copy())
214
+ runtime_vars.extend([v for v in var_names if v not in runtime_vars])
215
+ else:
216
+ exec(ast.get_source_segment(src, asg), scope)
217
+ for name in var_names:
218
+ if name in runtime_vars:
219
+ runtime_vars.remove(name)
220
+
221
+ @staticmethod
222
+ def handle_if(node: ast.If, scope: Dict[str, Any], runtime_vars: List[str], src: str, devices: List[str],
223
+ plp: PLProcessSimulator):
224
+ is_runtime_decision = any(f"id='{v}'" in ast.dump(node.test) for v in runtime_vars)
225
+ if is_runtime_decision:
226
+ used_vars = filter(lambda v: f"id='{v}'" in ast.dump(node.test), runtime_vars)
227
+ with get_source_code(src, node.test) as code:
228
+ needed = {key: val for key, val in scope.items() if key in code}
229
+ if_node = plp.add_if_node(used_vars, code, needed)
230
+ origin_state = plp.get_state()
231
+ plp.prepare_true_execution(if_node)
232
+ PLProcessReader.execute_scope(node.body, scope, runtime_vars, src, devices, plp)
233
+ after_true_state = plp.get_state()
234
+ plp.prepare_false_execution(if_node)
235
+ PLProcessReader.execute_scope(node.orelse, scope, runtime_vars, src, devices, plp)
236
+ plp.join_state(after_true_state)
237
+ if len(scope['_break_nodes']) > 0:
238
+ last_break = scope['_break_nodes'][0]
239
+ else:
240
+ last_break = -1
241
+ plp.finalize_if_construction(if_node, origin_state, last_break=last_break)
242
+ else:
243
+ # if this is no runtime decision, we evaluate it and execute accordingly
244
+ with get_source_code(src, node.test) as code:
245
+ decision = eval(code, scope)
246
+ if decision:
247
+ PLProcessReader.execute_scope(node.body, scope, runtime_vars, src, devices, plp)
248
+ else:
249
+ PLProcessReader.execute_scope(node.orelse, scope, runtime_vars, src, devices, plp)
250
+
251
+ @staticmethod
252
+ def handle_for(node: ast.For, scope: Dict[str, Any], runtime_vars: List[str], src: str, devices: List[str],
253
+ plp: PLProcessSimulator):
254
+ iter_var = node.target.id
255
+ with get_source_code(src, node.iter) as iter_over:
256
+ unique_int = uuid1().int
257
+ tmp = f"v{unique_int}"
258
+ exec(f"{tmp} = {iter_over}", scope)
259
+ for buff in scope[tmp]:
260
+ # update the iteration variable in scope
261
+ scope[iter_var] = buff
262
+ PLProcessReader.execute_scope(node.body, scope, runtime_vars, src, devices, plp)
263
+ # after finishing everything in for-loop finalize the break-nodes, that occurred in it
264
+ while scope['_break_nodes']:
265
+ # we have to iterate in a first-in->first-out fashion to respect the python syntax:
266
+ # pop() pops the last element
267
+ break_node = scope['_break_nodes'].pop()
268
+ plp.finalize_break_node(break_node)
269
+
270
+ @staticmethod
271
+ def handle_break(scope: Dict[str, Any], plp: PLProcessSimulator):
272
+ # create a new break-node
273
+ break_node = plp.add_break_node()
274
+ # add it to the hidden list in scope (append adds in the end of the list)
275
+ scope["_break_nodes"].append(break_node)
276
+
277
+
278
+ class PLProcessSimulator(PLProcess, ABC):
279
+ """
280
+ A utility class to help going through actions in a PythonLabProcess while linking them correctly.
281
+ This is done by systematically recursively exploring the abstract syntax tree and executing all
282
+ functions of ServiceResources while keeping track of the labware. Each call of a function of a ServiceResource
283
+ adds a new process step node to the workflow graph. The edges are determined by consecutive participation of labware
284
+ in two steps.
285
+ """
286
+ def __init__(self, process_type: Type[PLProcess]):
287
+ self.workflow = nx.DiGraph() # this graph will represent the whole experiment
288
+ self.last_job = {} # dictionary providing a list of current nodes of each sample in the workflow graph
289
+ self.last_action = {} # dictionary providing the last action added id of each labware in the workflow graph
290
+ self.label_to_node = {}
291
+ process_type.__init__(self)
292
+
293
+ def visualize_workflow_graph(self, show=True):
294
+ dot = graphviz.Digraph(comment="Workflow")
295
+ dot.attr(rankdir='LR')
296
+ node_col = dict(labware='grey', operation='red', if_node="yellow", variable='blue', computation='cyan')
297
+ for n, data in self.workflow.nodes(data=True):
298
+ dot.node(str(n), data['name'], color=node_col[data['type']], style='filled')
299
+ for u, v in self.workflow.edges():
300
+ dot.edge(str(u), str(v), '')
301
+ dot.format = "png"
302
+ dot.render("workflow", view=show)
303
+ return dot
304
+
305
+ def set_starting_position(self, resource: LabwareResource, device: ServiceResource, position: int):
306
+ # the list of last_job is supposed to have only one entry at this point
307
+ start_position = self.last_job[resource.name][0]
308
+ cur_job = self.workflow.nodes[start_position]
309
+ cur_job['origin_pos'] = position
310
+ cur_job['origin'] = device.name
311
+ cur_job['origin_type'] = type(device)
312
+ cur_job['lidded'] = resource.lidded
313
+ if isinstance(resource, DynamicLabwareResource):
314
+ cur_job['is_reagent'] = True
315
+ # copy all keyword arguments into the workflow node
316
+ for key, val in resource.kwargs.items():
317
+ cur_job[key] = val
318
+
319
+ def register_labware_resource(self, resource):
320
+ super().register_labware_resource(resource)
321
+ # add a workflow node marking the start of the resources journey
322
+ new_node = self.add_node(dict(type='labware', name=resource.name))
323
+ # these are used in the construction of the wfg
324
+ self.last_job[resource.name] = [new_node]
325
+ self.last_action[resource.name] = -1
326
+ self.labware_nodes[resource.name] = new_node
327
+
328
+ def handle_labels(self, t, **kwargs):
329
+ """
330
+ Very preliminary handling of relations between process steps independent of labware
331
+ :param t:
332
+ :param kwargs:
333
+ :return:
334
+ """
335
+ if "label" in kwargs:
336
+ self.label_to_node[kwargs['label']] = t
337
+ logging.debug("label_to_node: ", self.label_to_node)
338
+ edges = {}
339
+ if "relations" in kwargs:
340
+ for relation, label, args in kwargs['relations']:
341
+ node = self.label_to_node[label]
342
+ logging.debug(relation, label, node, args)
343
+ if node not in edges:
344
+ edges[node] = dict(wait_cost=1, label=relation, max_wait=float('inf'))
345
+ if relation == "direct_after":
346
+ edges[node]['wait_cost'] = 200
347
+ if relation == "min_wait":
348
+ edges[node]['min_wait'] = args[0]
349
+ if relation == "max_wait":
350
+ edges[node]['max_wait'] = args[0]
351
+ for node, data in edges.items():
352
+ self.add_edge(node, t, **data)
353
+
354
+ def add_process_step(self, resource: ServiceResource, labware: List[LabwareResource], is_movement: bool = False, **kwargs):
355
+ if "executor" not in kwargs:
356
+ kwargs['executor'] = []
357
+ # if a certain device shall execute this operation, we add its name to the kwargs
358
+ if resource.name in [r.name for r in self._service_resources]:
359
+ kwargs['executor'].append(resource)
360
+ node_attr = dict(cont_names=[c.name for c in labware], type='operation',
361
+ name=f"{kwargs['fct']} {', '.join([c.name for c in labware])}",
362
+ device_type=type(resource))
363
+ node_attr.update(kwargs)
364
+ t = self.add_node(node_attr)
365
+ self.handle_labels(t, **kwargs)
366
+ if "reagents" in kwargs:
367
+ labware = labware + kwargs["reagents"]
368
+ for labware_piece in labware:
369
+ max_wait = labware_piece.consume_max_wait()
370
+ min_wait = labware_piece.consume_min_wait()
371
+ wait_cost = labware_piece.consume_wait_cost()
372
+ last = self.last_job[labware_piece.name]
373
+
374
+ edge_attr = dict(wait_cost=wait_cost,
375
+ cont_name=labware_piece.name,
376
+ label='',
377
+ max_wait=max_wait if max_wait is not None else float('inf'))
378
+ # if this is a starting step, these are wait_to_start_costs
379
+ if any(self.workflow.nodes[l]['type'] == "labware" for l in last):
380
+ self.workflow.nodes[t]["wait_to_start_costs"] = wait_cost
381
+
382
+ if min_wait:
383
+ edge_attr['min_wait'] = min_wait
384
+ for s in last:
385
+ self.add_edge(s, t, **edge_attr)
386
+ self.last_job[labware_piece.name] = [t]
387
+ self.last_action[labware_piece.name] = t
388
+
389
+ def add_var_nodes(self, *args):
390
+ last_node = list(self.workflow.nodes)[-1]
391
+ for var_name in args:
392
+ new_node = self.add_node(dict(name=var_name, type='variable', var_name=var_name))
393
+ self.add_edge(last_node, new_node, label='out')
394
+
395
+ def add_computation(self, name, var_names, fct_code, needed_scope):
396
+ # this function will be called at runtime to execute the computation
397
+ def fct(**kwargs):
398
+ my_scope = needed_scope.copy()
399
+ my_scope.update(kwargs)
400
+ return eval(fct_code, my_scope)
401
+
402
+ new_node = self.add_node(dict(type='computation', function=fct, name=name, var_name=name))
403
+ # link the node to its needed variables
404
+ for name in var_names:
405
+ # take the latest of mathing variable nodes
406
+ var_nodes = [n for n, data in self.workflow.nodes(data=True)
407
+ if data['type'] in ['variable', 'computation'] and data['name'] == name]
408
+ var_node = var_nodes[-1]
409
+ self.add_edge(var_node, new_node, label='in')
410
+
411
+ def add_node(self, attr):
412
+ n = self.workflow.number_of_nodes()
413
+ self.workflow.add_nodes_from([(n, attr)])
414
+ return n
415
+
416
+ def add_edge(self, s, t, **kwargs):
417
+ self.workflow.add_edges_from([(s, t, kwargs)])
418
+
419
+ def prepare_true_execution(self, if_node):
420
+ true_dummy = self.workflow.nodes[if_node]["true_dummy"]
421
+ for name, state in self.last_job.items():
422
+ self.last_job[name] = [true_dummy]
423
+
424
+ def prepare_false_execution(self, if_node):
425
+ false_dummy = self.workflow.nodes[if_node]["false_dummy"]
426
+ for name, state in self.last_job.items():
427
+ self.last_job[name] = [false_dummy]
428
+
429
+ # these utility functions are used for example in parsing if-statements
430
+ def get_state(self):
431
+ return self.last_job.copy()
432
+
433
+ def set_state(self, status):
434
+ self.last_job = status.copy()
435
+
436
+ def join_state(self, status):
437
+ for n, l in self.last_job.items():
438
+ l.extend(status[n])
439
+ self.last_job[n] = list(set(l))
440
+
441
+ def add_if_node(self, var_names, decision_code, needed_scope):
442
+ # this function will be called at runtime to evaluate the decision
443
+ def fct(**kwargs):
444
+ my_scope = needed_scope.copy()
445
+ my_scope.update(kwargs)
446
+ return eval(decision_code, my_scope)
447
+
448
+ true_dummy = self.add_node(dict(type="dummy", name="true dummy"))
449
+ false_dummy = self.add_node(dict(type="dummy", name="false dummy"))
450
+ new_node = self.add_node(dict(type="if_node", name=decision_code, function=fct, true_dummy=true_dummy,
451
+ false_dummy=false_dummy))
452
+
453
+ self.add_edge(new_node, true_dummy, sub_tree=True, label="True")
454
+ self.add_edge(new_node, false_dummy, sub_tree=False, label="False")
455
+
456
+ # link the node to its needed variables
457
+ for name in var_names:
458
+ # take the latest of mathing variable nodes
459
+ var_nodes = [n for n, data in self.workflow.nodes(data=True)
460
+ if data['type'] in ['variable', 'computation'] and data['name'] == name]
461
+ var_node = var_nodes[-1]
462
+ self.add_edge(var_node, new_node, label='in')
463
+ return new_node
464
+
465
+ def finalize_if_construction(self, if_node, orig_state, last_break=-1):
466
+ for cont, last in self.last_job.items():
467
+ # check whether some actions have been added to this labware since the if_node creation
468
+ if self.last_action[cont] < if_node:
469
+ # check for break nodes in the scope of this if clause
470
+ if last_break > if_node:
471
+ self.workflow.nodes[last_break]['if_nodes'].insert(0, (orig_state[cont], if_node, cont))
472
+ else:
473
+ self.last_job[cont] = orig_state[cont].copy()
474
+ else:
475
+ for old in orig_state[cont]:
476
+ self.add_edge(old, if_node, label='', cont_name=cont)
477
+
478
+ def add_break_node(self):
479
+ # the only thing a break node needs is a copy of the current state
480
+ new_node = self.add_node(dict(type="dummy", name='break', cur_state=deepcopy(self.last_job), if_nodes=[]))
481
+ for cont in self.last_job:
482
+ self.last_job[cont] = []
483
+ return new_node
484
+
485
+ def finalize_break_node(self, break_node):
486
+ # check for every labware whether something has changed since creating the break node
487
+ before_break = self.workflow.nodes[break_node]['cur_state']
488
+ for cont, last_action in self.last_action.items():
489
+ if last_action > break_node:
490
+ for old in before_break[cont]:
491
+ self.add_edge(old, break_node, label='', cont_name=cont)
492
+ self.last_job[cont].append(break_node)
493
+ else:
494
+ self.last_job[cont] = before_break[cont].copy()
495
+ for orig_state, if_node, cont in self.workflow.nodes[break_node]['if_nodes']:
496
+ if self.last_action[cont] > if_node:
497
+ for old in orig_state:
498
+ self.add_edge(old, if_node, label='', cont_name=cont)
499
+ else:
500
+ self.last_job[cont] = orig_state.copy()
pythonlab/resource.py ADDED
@@ -0,0 +1,248 @@
1
+ """_____________________________________________________________________
2
+
3
+ :PROJECT: pythonLab
4
+
5
+ *resource base classes*
6
+
7
+ :details:
8
+
9
+ :authors: mark doerr (mark@uni-greifswald.de)
10
+
11
+ :date: (creation) 20210410
12
+
13
+ ________________________________________________________________________
14
+
15
+ """
16
+
17
+ __version__ = "0.0.1"
18
+
19
+ import importlib
20
+ import logging
21
+ import pkgutil
22
+ from enum import Enum
23
+ #from labdatareader.data_reader import DataReader
24
+ from typing import Optional
25
+
26
+ from abc import ABC, abstractmethod
27
+
28
+
29
+ class DataDirection(Enum):
30
+ data_in = 1
31
+ data_out = 2
32
+
33
+
34
+ class DataType(Enum):
35
+ single_value = 1 # scalar
36
+ structured_data = 2 # list, dict, or larger
37
+ data_stream = 3
38
+
39
+
40
+ class Position:
41
+ def __init__(self, resource, position: int):
42
+ self._resource = resource
43
+ self._postion = position
44
+ self._postion_name = f"{str(resource.name)}_{str(position)}"
45
+
46
+ @property
47
+ def pos(self):
48
+ return self._postion
49
+
50
+ @property
51
+ def resource(self):
52
+ return self._resource
53
+
54
+ @property
55
+ def name(self):
56
+ return self._postion_name
57
+
58
+
59
+ class Resource(ABC):
60
+ def __init__(self,
61
+ proc=None,
62
+ name: Optional[str] = None):
63
+ self.proc = proc
64
+ self._name = name
65
+
66
+ self.auto_register_resource()
67
+
68
+ @abstractmethod
69
+ def init(self):
70
+ raise NotImplementedError
71
+
72
+ def auto_register_resource(self):
73
+ """auto register resource in corresponding process
74
+ """
75
+ if isinstance(self, ServiceResource):
76
+ self.proc.register_service_resource(self)
77
+ elif isinstance(self, LabwareResource):
78
+ self.proc.register_labware_resource(self)
79
+ elif isinstance(self, DynamicLabwareResource):
80
+ self.proc.register_labware_resource(self)
81
+ elif isinstance(self, SubstanceResource):
82
+ self.proc.register_substance_resource(self)
83
+ elif isinstance(self, DataResource):
84
+ self.proc.register_data_resource(self)
85
+
86
+ def import_resources(self, path):
87
+ """Import resources from a given path
88
+
89
+ :param path: _description_
90
+ :type path: _type_
91
+ TODO: path to resources
92
+ """
93
+ #TODO: Mark: What went wrong here?
94
+ #self._import_all_resources("TDOO: path to resource definitions")
95
+
96
+ def _import_all_resources(self, namespace_pkg):
97
+ """
98
+ recursively iterate through namespace
99
+ Specifying the second argument (prefix) to iter_modules makes the
100
+ returned name an absolute name instead of a relative one. This allows
101
+ import_module to work without having to do additional modification to
102
+ the name.
103
+ s. https://packaging.python.org/guides/creating-and-discovering-plugins/
104
+
105
+ TODO: recursive import !!
106
+ """
107
+ for finder, name, ispkg in pkgutil.iter_modules(namespace_pkg.__path__, namespace_pkg.__name__ + "."):
108
+ submodule = importlib.import_module(name)
109
+ if ispkg:
110
+ self._import_all_resources(submodule)
111
+ # if a dictionary of discovered plugins is required, see LabDataReader
112
+
113
+ @property
114
+ def name(self):
115
+ return self._name
116
+
117
+
118
+ class ServiceResource(Resource):
119
+ """ServiceResource
120
+ is reflection SiLA's service oriented concept
121
+ (more general than a device, since a service can be much more than a device)
122
+
123
+ :param Resource: [description]
124
+ :type Resource: [type]
125
+ """
126
+
127
+ def __init__(self,
128
+ proc,
129
+ name: Optional[str] = None):
130
+ super().__init__(proc=proc, name=name)
131
+
132
+ def init(self):
133
+ pass
134
+
135
+
136
+ class LabwareResource(Resource):
137
+ """
138
+ multi-cavity / single cavity ?
139
+ associated labware, like labwares, lids, stacks
140
+
141
+ :param Resource: [description]
142
+ :type Resource: [type]
143
+ """
144
+
145
+ def __init__(self,
146
+ proc,
147
+ name: str = None,
148
+ priority=None,
149
+ lidded: bool = False,
150
+ **kwargs):
151
+ super().__init__(proc=proc, name=name)
152
+ self._position = None
153
+ self.priority = priority
154
+ self.start_position = None
155
+ # this flag can be set in a process and will be 'consumed' when the next step with this labware is called
156
+ self._max_wait = None
157
+ self._min_wait = None
158
+ self._wait_cost = 0
159
+ self.lidded = lidded
160
+ self.kwargs = kwargs
161
+
162
+ def init(self):
163
+ logging.debug(f"init {self.name}")
164
+
165
+ # returns the maximum waiting time until next step and resets it
166
+ def consume_max_wait(self):
167
+ tmp = self._max_wait
168
+ self._max_wait = None
169
+ return tmp
170
+
171
+ def max_wait(self, duration):
172
+ self._max_wait = duration
173
+
174
+ def min_wait(self, duration):
175
+ self._min_wait = duration
176
+
177
+ def consume_min_wait(self):
178
+ tmp = self._min_wait
179
+ self._min_wait = None
180
+ return tmp
181
+
182
+ # returns the cost for waiting until next step and resets it
183
+ def consume_wait_cost(self):
184
+ tmp = self._wait_cost
185
+ self._wait_cost = 0
186
+ return tmp
187
+
188
+ def wait_cost(self, cost_per_second):
189
+ self._wait_cost = cost_per_second
190
+
191
+ def set_start_position(self, resource, position):
192
+ self._position = Position(resource=resource, position=position)
193
+ self.start_position = self._position
194
+ self.proc.set_starting_position(self, resource, position)
195
+
196
+ @property
197
+ def pos(self):
198
+ return self._position
199
+
200
+
201
+ class DynamicLabwareResource(LabwareResource):
202
+ def __init__(self, proc, name: str, priority=None, lidded: bool = False, outside_cost=0, **kwargs):
203
+ """Dynamic Labware is a special type of labware, where order of usage is (dynamically) determined during the process execution,
204
+ e.g. a reagent trough that is used when addition of a reagent to a certain labware is required, depending on an outcome of a descision,
205
+ like a pH measurement or induction after a certain Absorbtion is reached.
206
+
207
+ :param proc:
208
+ :param name:
209
+ :param priority:
210
+ :param lidded:
211
+ :param outside_cost: Some reagents need to be stored under special properties (e.g. cooled). These costs get
212
+ translated to waiting_costs between getting it out and putting it back
213
+ :param kwargs:
214
+ """
215
+ super().__init__(proc, name=name, priority=priority, lidded=lidded, **kwargs)
216
+ self.outside_cost = outside_cost
217
+
218
+
219
+ class SubstanceResource(Resource):
220
+ """SubstanceResource
221
+ more general concept than sample
222
+ can be used to define substances / samples and their corresponding properties,
223
+ like liquid classes, or physical state (gas, liquid, solid/powder),
224
+ density, viscosity, vapour pressure
225
+
226
+ :param Resource: [description]
227
+ :type Resource: [type]
228
+ """
229
+
230
+ def init(self):
231
+ pass
232
+
233
+ def __init__(self,
234
+ proc,
235
+ name: str = None):
236
+ super().__init__(proc=proc, name=name)
237
+
238
+
239
+ class DataResource(Resource):
240
+ def init(self):
241
+ pass
242
+
243
+ def __init__(self,
244
+ proc,
245
+ name: str = None):
246
+ super().__init__(proc=proc, name=name)
247
+
248
+ self.direction = None # DataDirection.data_in
@@ -0,0 +1,232 @@
1
+ Metadata-Version: 2.4
2
+ Name: uuPythonlab
3
+ Version: 0.2.4
4
+ License-Expression: MIT
5
+ Classifier: Development Status :: 2 - Pre-Alpha
6
+ Classifier: Intended Audience :: Developers
7
+ Classifier: Intended Audience :: Science/Research
8
+ Classifier: Intended Audience :: Education
9
+ Classifier: Operating System :: OS Independent
10
+ Classifier: Natural Language :: English
11
+ Classifier: Programming Language :: Python :: 3
12
+ Classifier: Programming Language :: Python :: 3.8
13
+ Classifier: Programming Language :: Python :: 3.9
14
+ Classifier: Programming Language :: Python :: 3.10
15
+ Classifier: Topic :: Scientific/Engineering
16
+ Classifier: Topic :: Scientific/Engineering :: Information Analysis
17
+ Classifier: Topic :: Scientific/Engineering :: Visualization
18
+ Classifier: Topic :: Scientific/Engineering :: Bio-Informatics
19
+ Classifier: Topic :: Scientific/Engineering :: Chemistry
20
+ Description-Content-Type: text/markdown
21
+ License-File: LICENSE
22
+ License-File: AUTHORS.md
23
+ Requires-Dist: graphviz
24
+ Requires-Dist: networkx
25
+ Dynamic: license-file
26
+
27
+ # pythonLab
28
+
29
+ This is the specification and development repository of
30
+ **pythonLab**, a universal, extendable and safe language for laboratory processes.
31
+
32
+ Reproducibility and reliability is in the core of science.
33
+ Describing lab operations in an precise, easy and standardised form will help to transfer knowledge in every lab around the world.
34
+ A *laboratory process description language* is a long desired goal in laboratory process standardisation and lab automation.
35
+ It will enable automated execution of steps and better machine learning and AI predictions, since these process descriptions can be packed as metadata to the data resulting form this process.
36
+
37
+ Since this process language needs many characteristics of a programming language, like conditions (if ...), loops (for/while), variables, etc. we do not want to re-invent the wheel twice but rather use the **python syntax**, which is very popular in science.
38
+
39
+ ## Key (desired) Features
40
+
41
+ * easy and simple to learn and write (close to simple English)
42
+ * clear, human readable syntax
43
+ * universal - applicable for most laboratory operations
44
+ * transferable from one lab to another
45
+ * easy mapping between abstract resource representation and actual lab resource
46
+ * [*Turing-complete*](https://en.wikipedia.org/wiki/Turing_completeness), including conditions and loops
47
+ * easy extendible - prepared for the constant development of science
48
+ * close to real laboratory work
49
+ * vendor independent
50
+ * safe to execute
51
+ * converter from other lab description languages to pythonLab easy to implement
52
+
53
+ ## Applications of pythonLab
54
+
55
+ * general lab processes, common in any natural sciences lab (very broad application)
56
+ * description of lab automation workflows
57
+ * workflows on the lab devices (e.g. HPLC processes - sometimes also called 'methods', plate reader processes etc.)
58
+ * data evaluation workflows
59
+
60
+ ## Architecture of pythonLab
61
+
62
+ pythonLab processes are denoted in a python like syntax, but they are **not** directly executed by a *python interpreter*. They are rather parsed into a *workflow graph*, which can be used by a *Scheduler* to calculate
63
+ an optimal schedule (=order of execution). This order of execution might be different from the initial notation. An *Orchestrator* executes then the schedule and supervises the device communication, e.g. to SiLA servers/devices.
64
+
65
+ ![pythonLab Architecture](docs/images/pythonLab_architecture_overview.svg)
66
+
67
+ ## [Specification](https://opensourcelab.gitlab.io/pythonLab/specification/0_specification_base.html)
68
+
69
+ Please find a draft of the pythonLab specification in [docs/specification](https://pythonlabor.gitlab.io/pythonLab/specification/specification.html) (very early stage !).
70
+
71
+ Very briefly, the generic lab description language should have many features a common programming language has and following the desired *Turning-completeness*, like:
72
+
73
+ - variables (x = value)
74
+ - conditions (if, else, ...)
75
+ - loops (for ... while ....)
76
+ - functions / methods and subroutines
77
+ - modules
78
+ - namespaces and versions for unique addressing of a process step
79
+ - (at a later stage of language development: object orientation)
80
+
81
+ **!! This is a proposal - we would like to discuss it with a wide range of scientist to find the best common ground**
82
+
83
+ ## [Documentation](https://opensourcelab.gitlab.io/pythonLab/)
84
+
85
+ The pythonLab Documentation can be found in [docs](https://pythonlabor.gitlab.io/pythonLab/)
86
+
87
+ ## Language Core extensions
88
+
89
+ extensible Modules for e.g.
90
+ - liquid handling
91
+ - cultivation
92
+ - (bio-chemical) assays
93
+ - molecular biology
94
+ - chemical synthesis
95
+ - data evaluation
96
+
97
+ are in preparation
98
+
99
+
100
+ ## Examples
101
+
102
+ A simple description of liquid transfer step
103
+
104
+ ```python
105
+
106
+ # using settings: volume unit: uL, liquid class: water
107
+ # these are set in a settings module
108
+ # specifying resources
109
+ from pythonlab.resource import LabwareResource, DeviceResource
110
+ from pythonlab.liquid_handling import aspirate, dispense
111
+
112
+ cont1 = LabwareResource()
113
+ cont2 = LabwareResource()
114
+ liquid_handler = DeviceResource()
115
+
116
+ # process steps
117
+ liquid_handler.aspirate(cont1, row=1, col=3, vol=4.0)
118
+ liquid_handler.dispense(cont2, row=2, col=3 , vol=7.2)
119
+ ...
120
+
121
+ ```
122
+
123
+ A bit more complex example
124
+
125
+ ```python
126
+ # default units (SI) are specified in the standard unit module
127
+ # additional unit definitions can be added in the code
128
+ # specifying resources
129
+
130
+ from pythonlab.resource.labware import LabwareResource
131
+ from pythonlab.resource.services import MoverServiceResource, IncubationServiceResource
132
+
133
+ cont1 = LabwareResource()
134
+ mover = MoverServiceResource()
135
+ incubator = IncubationServiceResource()
136
+ start_pos = cont1.set_start_position(pos=1)
137
+ incubation_duration = 6 # hours
138
+
139
+ # initialise the process
140
+ incubator.init()
141
+ # process steps
142
+ mover.move(cont1, start_pos, incubator.nest1)
143
+ incubator.incubate(cont1, incubation_duration, unit="h")
144
+ mover.move(cont1, incubator.nest1, start_pos)
145
+
146
+
147
+ ...
148
+
149
+ ```
150
+
151
+ And finally a higher level example
152
+
153
+ ```python
154
+ # default units (SI) are specified in the standard unit module
155
+ # additional unit definitions can be added in the code
156
+ # specifying resources
157
+
158
+ from pythonlab.resource.labware import LabwareResource
159
+ from pythonlab.resource.services import MoverServiceResource, DispensionServiceResource, IncubationServiceResource
160
+
161
+ from pythonlab.processes.base import incubate, centrifugate
162
+ from pythonlab.bioprocess import inoculate
163
+
164
+ Labware_set = [LabwareResource(name=f"growth_plate_{cont}")
165
+ for cont in range(8)]
166
+
167
+ dispenser = DispensionServiceResource()
168
+ incubator = IncubationServiceResource()
169
+
170
+ inoculate([dispenser, Labware_set], source="starting_culture")
171
+ incubate([incubator, Labware_set], temp=310.0, shaking=(700,2) ) # temp in K
172
+ centrifugate([incubator, Labware_set], duration=600, force=4500)
173
+
174
+ ...
175
+
176
+ ```
177
+ ## Why python ?
178
+
179
+ Python is a programming language that is very common in modern scientific laboratories and covers all the desired characteristics we expect of a user-friendly lab process programming language.
180
+
181
+ The syntax is very simple, and intuitive to learn.
182
+ Syntax validation comes for free: the python interpreter already does it.
183
+
184
+ Standardisation of a minimal set of functionally will be achieved by standardised packages provided by this site (or any publicly available site).
185
+ Defined namespaces and versioning allow unique addressing of a process step.
186
+
187
+
188
+ ## [Implementation](./implementation)
189
+
190
+ As a proof-of-concept we are planning to provide a [pypy-sandbox](https://www.pypy.org) implementation in the future.
191
+ [pypy-sandbox](https://www.pypy.org) offerers a safe execution environment to execute insecure code.
192
+ A new version is currently developed to by the pypy community.
193
+ Alternatively WASM will be a possible safe execution environment.
194
+
195
+ ## Related projects
196
+
197
+ Here is an incomplete list of related OpenSource projects - please let us know, if we missed a relevant project.
198
+
199
+ ### [Autoprotocoll](http://autoprotocol.org)
200
+
201
+ * Syntax: JSON based
202
+ * (-) not *Turing complete*
203
+ * (-) hard to write and read by humans
204
+
205
+ ### [LabOP](https://bioprotocols.github.io/labop/)
206
+
207
+ * Syntax: RDF / python
208
+ * (-) not *Turing complete* (?)
209
+ * (-) hard to write and read by humans
210
+
211
+ ### [RoboLiq](https://ellis.github.io/roboliq/protocol/index.html)
212
+
213
+ * Syntax: yaml / Javascript
214
+ * (-) not *Turing complete*
215
+ * (-) hard to write and read by humans
216
+ * (-) design not clearly specified
217
+
218
+ ## Repository Maintainer:
219
+
220
+ * mark doerr (mark.doerr@uni-greifswald.de)
221
+
222
+ ## Documentation
223
+
224
+ The Documentation can be found here: [opensourcelab.gitlab.io/pythonLab](opensourcelab.gitlab.io/pythonLab) or [pythonLab.gitlab.io](pythonlab.gitlab.io/)
225
+
226
+
227
+ ## Credits
228
+
229
+ This package was created with Cookiecutter* and the `opensource/templates/cookiecutter-pypackage`* project template.
230
+
231
+ [Cookiecutter](https://github.com/audreyr/cookiecutter )
232
+ [opensource/templates/cookiecutter-pypackage](https://gitlab.com/opensourcelab/software-dev/cookiecutter-pypackage)
@@ -0,0 +1,10 @@
1
+ pythonlab/__init__.py,sha256=VFiH6ZP26gn4WR58bwTy09VKZij0PveXdSnpLleZppk,135
2
+ pythonlab/process.py,sha256=tPyIjvDTIW99PwTEXxvvoX9140xGY8lAe19hotYjPRk,4030
3
+ pythonlab/pythonlab_reader.py,sha256=Nx3C6gkUuSIHu5AveklSXpKIV5I1CsN1DzAgzF5ncwA,23530
4
+ pythonlab/resource.py,sha256=SQl_caY0zrLfPVWMGz-UqFSzG9Dqm0-YZzgg6MEK7-k,7248
5
+ uupythonlab-0.2.4.dist-info/licenses/AUTHORS.md,sha256=0CKaSdSK_ra16Aj2vOKukvEHkwRVn2sIena6m12LTxM,319
6
+ uupythonlab-0.2.4.dist-info/licenses/LICENSE,sha256=roQhQQq7QavJXjo0M5pQwzHQ9kF46SYKfEE8tEFuym8,1069
7
+ uupythonlab-0.2.4.dist-info/METADATA,sha256=xp-TYs23C596kLgSehDdv299YPgs36_7hR1D4AQLb84,8898
8
+ uupythonlab-0.2.4.dist-info/WHEEL,sha256=YCfwYGOYMi5Jhw2fU4yNgwErybb2IX5PEwBKV4ZbdBo,91
9
+ uupythonlab-0.2.4.dist-info/top_level.txt,sha256=bMSAdzhhv_tcLTrP7VJbE7PsLvEJxh6_HU_B38rXPcA,10
10
+ uupythonlab-0.2.4.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (82.0.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,17 @@
1
+
2
+ # Acknowledgements and Credits
3
+
4
+ The pythonLab project thanks
5
+
6
+
7
+ Contributors
8
+ ------------
9
+
10
+ * Stefan Maak <stefan.maak@uni-greifswald.de>
11
+ * Mickey Kim <mickey.kim@genomicsengland.co.uk> ! Thanks for the phantastic cookiecutter template !
12
+
13
+
14
+ Development Lead
15
+ ----------------
16
+
17
+ * mark doerr <mark.doerr@uni-greifswald.de>
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2022, mark doerr
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ pythonlab