processing-graph 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- processing_graph/BaseProcessor.py +408 -0
- processing_graph/ProcessingGraph_tests.py +755 -0
- processing_graph/ProcessingNode.py +651 -0
- processing_graph/__init__.py +0 -0
- processing_graph-0.1.0.dist-info/METADATA +150 -0
- processing_graph-0.1.0.dist-info/RECORD +8 -0
- processing_graph-0.1.0.dist-info/WHEEL +5 -0
- processing_graph-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,755 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import unittest
|
|
3
|
+
import random
|
|
4
|
+
from processing_graph.BaseProcessor import BaseProcessor
|
|
5
|
+
from processing_graph.ProcessingNode import ProcessingNode, ExecutionNode, val_ref
|
|
6
|
+
import inspect
|
|
7
|
+
from typing import Callable, List, Optional, Any,Dict
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
from functools import wraps
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
|
|
14
|
+
def attach_parameters(**params):
|
|
15
|
+
"""
|
|
16
|
+
Decorator to attach parameter descriptors to a function.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
**params: Keyword arguments representing parameter names and their descriptors.
|
|
20
|
+
The descriptors can be any object, such as strings, enums, or custom classes.
|
|
21
|
+
"""
|
|
22
|
+
def decorator(func):
|
|
23
|
+
@dataclass(frozen=True)
|
|
24
|
+
class Parameters:
|
|
25
|
+
pass
|
|
26
|
+
|
|
27
|
+
for name, value in params.items():
|
|
28
|
+
setattr(Parameters, name, value)
|
|
29
|
+
|
|
30
|
+
# Attach the Parameters class to the function
|
|
31
|
+
setattr(func, 'parameters', Parameters)
|
|
32
|
+
|
|
33
|
+
@wraps(func)
|
|
34
|
+
def wrapper(*args, **kwargs):
|
|
35
|
+
return func(*args, **kwargs)
|
|
36
|
+
|
|
37
|
+
return wrapper
|
|
38
|
+
return decorator
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
from functools import wraps
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def fallback_to_str(func):
|
|
46
|
+
"""
|
|
47
|
+
Decorator for json.dumps–style functions that ensures
|
|
48
|
+
anything not JSON‐serializable gets passed through str().
|
|
49
|
+
"""
|
|
50
|
+
@wraps(func)
|
|
51
|
+
def wrapper(obj: Any, *args, **kwargs):
|
|
52
|
+
# If the caller didn't supply a `default=` arg, use str()
|
|
53
|
+
kwargs.setdefault("default", lambda o: str(o))
|
|
54
|
+
return func(obj, *args, **kwargs)
|
|
55
|
+
return wrapper
|
|
56
|
+
safe_dumps = fallback_to_str(json.dumps) ##
|
|
57
|
+
|
|
58
|
+
class TestNewProcessingPipeline(unittest.TestCase):
|
|
59
|
+
|
|
60
|
+
def test_schema_tests(self):
|
|
61
|
+
# 1. Generate data
|
|
62
|
+
random.seed(0)
|
|
63
|
+
data = [(random.random(),) for _ in range(150)]
|
|
64
|
+
|
|
65
|
+
class PointBuffer:
|
|
66
|
+
def __init__(self, in_dict):
|
|
67
|
+
assert type(in_dict) == dict
|
|
68
|
+
self.settings = in_dict
|
|
69
|
+
self.settings['buffer'] = []
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def generate(self, point,**kwargs):
|
|
73
|
+
buf = self.settings['buffer']
|
|
74
|
+
buf.append(point)
|
|
75
|
+
|
|
76
|
+
# trim to max length
|
|
77
|
+
max_len = self.settings['buffer_size']
|
|
78
|
+
while len(buf) > max_len:
|
|
79
|
+
buf.pop(0)
|
|
80
|
+
|
|
81
|
+
return {
|
|
82
|
+
'test_val_out': kwargs['test_val'],
|
|
83
|
+
'test_nested': kwargs['test_nested'],
|
|
84
|
+
'data': point,
|
|
85
|
+
'buffer': list(buf)
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
class MovingAverage(dict):
|
|
89
|
+
def hardcode_func(self,param):
|
|
90
|
+
return param
|
|
91
|
+
|
|
92
|
+
def calc(self, point):
|
|
93
|
+
point = point
|
|
94
|
+
if 'past_number' not in self:
|
|
95
|
+
self['past_number'] = point
|
|
96
|
+
self['past_number'] = 0.9 * self['past_number'] + 0.1 * point
|
|
97
|
+
return self['past_number']
|
|
98
|
+
|
|
99
|
+
def buffer_average(self, buffer):
|
|
100
|
+
return sum(buffer) / len(buffer)
|
|
101
|
+
|
|
102
|
+
# 3. Build the graph definition
|
|
103
|
+
p_def = {}
|
|
104
|
+
# DELEGATE TO THE AI!
|
|
105
|
+
# when you dont know what you want you have reached a state of desirelessness
|
|
106
|
+
# ??? -> Search -> guess .... -> Nothing interesting idea
|
|
107
|
+
|
|
108
|
+
p_def['PointBuffer'] = ProcessingNode.ExecutionNode({
|
|
109
|
+
'name': 'PointBuffer',
|
|
110
|
+
'clas':PointBuffer,
|
|
111
|
+
'settings': {'buffer_size': 10, 'input': 'number'},
|
|
112
|
+
'dependencies': {'generate': {'point':['__ref', 'point_in'],
|
|
113
|
+
'test_val':7,
|
|
114
|
+
'test_nested':{'k':['__ref', 'point_in']}
|
|
115
|
+
}}
|
|
116
|
+
})
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
dNode = ProcessingNode.ExecutionNode
|
|
120
|
+
iParam = ProcessingNode.ValueDict
|
|
121
|
+
oField = ProcessingNode.OutputField
|
|
122
|
+
p_def['MovingAverage'] = dNode({
|
|
123
|
+
'name': 'MovingAverage',
|
|
124
|
+
'clas':MovingAverage,
|
|
125
|
+
'settings': ProcessingNode.NodeSettings({}),
|
|
126
|
+
'dependencies': ProcessingNode.ServiceMap(
|
|
127
|
+
{
|
|
128
|
+
'calc': iParam({'point': ['__ref', 'PointBuffer','generate', 'data']}),
|
|
129
|
+
'buffer_average': iParam({'buffer': ['__ref', 'PointBuffer','generate', 'buffer']}),
|
|
130
|
+
'hardcode_func': iParam({'param':10})
|
|
131
|
+
|
|
132
|
+
})
|
|
133
|
+
})
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
# 4. Instantiate and run
|
|
137
|
+
pn = BaseProcessor(p_def)
|
|
138
|
+
features = []
|
|
139
|
+
for p in data:
|
|
140
|
+
feature = {'point_in': p[0]}
|
|
141
|
+
out = pn.process(feature)
|
|
142
|
+
features.append(out)
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
sample = features[-1]
|
|
146
|
+
self.assertIn('MovingAverage', sample)
|
|
147
|
+
self.assertIn('calc', sample['MovingAverage'])
|
|
148
|
+
self.assertTrue(sample['MovingAverage']['calc'] > 0.45 and sample['MovingAverage']['calc'] < 0.47)
|
|
149
|
+
self.assertTrue(sample['MovingAverage']['buffer_average'] > 0.40 and sample['MovingAverage']['buffer_average'] < 0.5)
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def test_sexy_interface(self):
|
|
153
|
+
|
|
154
|
+
# 1. Generate data
|
|
155
|
+
random.seed(0)
|
|
156
|
+
data = [(random.random(),) for _ in range(150)]
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
class PointBuffer:
|
|
160
|
+
p_point = 'point'
|
|
161
|
+
def __init__(self, in_dict):
|
|
162
|
+
assert type(in_dict) == dict
|
|
163
|
+
self.settings = in_dict
|
|
164
|
+
self.settings['buffer'] = []
|
|
165
|
+
|
|
166
|
+
def generate(self, point,**kwargs):
|
|
167
|
+
buf = self.settings['buffer']
|
|
168
|
+
buf.append(point)
|
|
169
|
+
|
|
170
|
+
# trim to max length
|
|
171
|
+
max_len = self.settings['buffer_size']
|
|
172
|
+
while len(buf) > max_len:
|
|
173
|
+
buf.pop(0)
|
|
174
|
+
|
|
175
|
+
return {
|
|
176
|
+
'test_val_out': kwargs['test_val'],
|
|
177
|
+
'test_nested': kwargs['test_nested'],
|
|
178
|
+
'data': point,
|
|
179
|
+
'buffer': list(buf)
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
class MovingAverage(dict):
|
|
183
|
+
def hardcode_func(self,param,param2,param_nest):
|
|
184
|
+
return param, param2,param_nest
|
|
185
|
+
|
|
186
|
+
def calc(self, point):
|
|
187
|
+
point = point
|
|
188
|
+
if 'past_number' not in self:
|
|
189
|
+
self['past_number'] = point
|
|
190
|
+
self['past_number'] = 0.9 * self['past_number'] + 0.1 * point
|
|
191
|
+
return self['past_number']
|
|
192
|
+
|
|
193
|
+
def buffer_average(self, buffer):
|
|
194
|
+
return sum(buffer) / len(buffer)
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
# 3. Build the graph definition
|
|
198
|
+
p_def = {}
|
|
199
|
+
# DELEGATE TO THE AI!
|
|
200
|
+
# when you dont know what you want you have reached a state of desirelessness
|
|
201
|
+
# ??? -> Search -> guess .... -> Nothing interesting idea
|
|
202
|
+
ExecutionNode = ProcessingNode.ExecutionNode
|
|
203
|
+
ValueDict = ProcessingNode.ValueDict
|
|
204
|
+
oField = ProcessingNode.OutputField
|
|
205
|
+
ServiceMap = ProcessingNode.ServiceMap
|
|
206
|
+
# Just sketching out the many, many, valid techniques
|
|
207
|
+
p_def['PointBuffer'] = ProcessingNode.ExecutionNode({
|
|
208
|
+
'name': 'PointBuffer',
|
|
209
|
+
'clas':PointBuffer,
|
|
210
|
+
'settings': {'buffer_size': 10, 'input': 'number'},
|
|
211
|
+
'dependencies': {ServiceMap.func_name(PointBuffer.generate):
|
|
212
|
+
{
|
|
213
|
+
'point':['__ref', 'point_in'],
|
|
214
|
+
'test_val':7,
|
|
215
|
+
'test_nested':{'k':['__ref', 'point_in']}
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
})
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
p_def['PointBuffer'] = ProcessingNode.ExecutionNode({
|
|
222
|
+
'name': 'PointBuffer',
|
|
223
|
+
'clas':PointBuffer,
|
|
224
|
+
'settings': {'buffer_size': 10, 'input': 'number'},
|
|
225
|
+
'dependencies': {ServiceMap.func_name(PointBuffer.generate):
|
|
226
|
+
{
|
|
227
|
+
**{'point':['__ref', 'point_in']},
|
|
228
|
+
**{'test_val':7},
|
|
229
|
+
**{'test_nested':{'k':['__ref', 'point_in']}}
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
})
|
|
233
|
+
p_def['PointBuffer'] = ProcessingNode.ExecutionNode({
|
|
234
|
+
'name': 'PointBuffer',
|
|
235
|
+
'clas':PointBuffer,
|
|
236
|
+
'settings': {'buffer_size': 10, 'input': 'number'},
|
|
237
|
+
'dependencies': ServiceMap({ServiceMap.func_name(PointBuffer.generate): ServiceMap.mk_dict([
|
|
238
|
+
{'point':ServiceMap.val_ref(pth='point_in')},
|
|
239
|
+
{'test_val':7},
|
|
240
|
+
{'test_nested':{'k':ServiceMap.val_ref(pth='point_in')}}
|
|
241
|
+
])
|
|
242
|
+
})
|
|
243
|
+
})
|
|
244
|
+
p_def['PointBuffer'] = ProcessingNode.ExecutionNode({
|
|
245
|
+
'name': 'PointBuffer',
|
|
246
|
+
'clas':PointBuffer,
|
|
247
|
+
'settings': {'buffer_size': 10, 'input': 'number'},
|
|
248
|
+
'dependencies': {ServiceMap.func_name(PointBuffer.generate):
|
|
249
|
+
{
|
|
250
|
+
'point':ServiceMap.val_ref(pth='point_in'),
|
|
251
|
+
'test_val':7,
|
|
252
|
+
'test_nested':{'k':ServiceMap.val_ref(pth='point_in')}
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
})
|
|
256
|
+
p_def['MovingAverage'] = ProcessingNode.ExecutionNode({
|
|
257
|
+
'name': 'MovingAverage',
|
|
258
|
+
'clas':MovingAverage,
|
|
259
|
+
'settings': ProcessingNode.NodeSettings({}),
|
|
260
|
+
'dependencies': ServiceMap({
|
|
261
|
+
ServiceMap.func_name(MovingAverage.calc): ValueDict({'point': ServiceMap.val_ref(node_id='PointBuffer',func=PointBuffer.generate, pth='data')}),
|
|
262
|
+
ServiceMap.func_name(MovingAverage.buffer_average): ValueDict({'buffer': ServiceMap.val_ref(node_id='PointBuffer',func=PointBuffer.generate, pth='buffer')}),
|
|
263
|
+
ServiceMap.func_name(MovingAverage.hardcode_func): ValueDict({'param':ServiceMap.val_ref(node_id='PointBuffer',func=PointBuffer.generate, pth='data'),
|
|
264
|
+
'param2':7,
|
|
265
|
+
'param_nest':ValueDict({'point_in':ServiceMap.val_ref(pth='point_in')}),
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
})
|
|
269
|
+
})
|
|
270
|
+
})
|
|
271
|
+
#
|
|
272
|
+
F = ServiceMap.func_name
|
|
273
|
+
graph_ref = ServiceMap.val_ref
|
|
274
|
+
p_def['MovingAverage'] = ProcessingNode.ExecutionNode({
|
|
275
|
+
'name': 'MovingAverage',
|
|
276
|
+
'clas':MovingAverage,
|
|
277
|
+
'settings': ProcessingNode.NodeSettings({}),
|
|
278
|
+
'dependencies': ServiceMap({
|
|
279
|
+
F(MovingAverage.calc): {
|
|
280
|
+
'point': graph_ref(node_id='PointBuffer',
|
|
281
|
+
func=PointBuffer.generate,
|
|
282
|
+
pth='data')
|
|
283
|
+
},
|
|
284
|
+
F(MovingAverage.buffer_average): {
|
|
285
|
+
'buffer': graph_ref(node_id='PointBuffer',
|
|
286
|
+
func=PointBuffer.generate,
|
|
287
|
+
pth='buffer')
|
|
288
|
+
},
|
|
289
|
+
F(MovingAverage.hardcode_func): {
|
|
290
|
+
'param':graph_ref(node_id='PointBuffer',
|
|
291
|
+
func=PointBuffer.generate,
|
|
292
|
+
pth='data'),
|
|
293
|
+
'param2':7,
|
|
294
|
+
'param_nest':{
|
|
295
|
+
'point_in':graph_ref(pth='point_in')
|
|
296
|
+
},
|
|
297
|
+
}
|
|
298
|
+
})
|
|
299
|
+
})
|
|
300
|
+
|
|
301
|
+
Fid = ServiceMap.func_name
|
|
302
|
+
Vref = ServiceMap.val_ref
|
|
303
|
+
class Farg(str):
|
|
304
|
+
pass
|
|
305
|
+
# [] - Clean up value interface
|
|
306
|
+
# [] - Clean up value interface
|
|
307
|
+
|
|
308
|
+
p_def['MovingAverage'] = ExecutionNode({
|
|
309
|
+
'name': 'MovingAverage',
|
|
310
|
+
'clas':MovingAverage,
|
|
311
|
+
'settings': ProcessingNode.NodeSettings({}),
|
|
312
|
+
'dependencies': ServiceMap(
|
|
313
|
+
{
|
|
314
|
+
Fid(MovingAverage.calc): ValueDict(
|
|
315
|
+
{
|
|
316
|
+
Farg('point'): Vref(node_id='PointBuffer',func=PointBuffer.generate,
|
|
317
|
+
pth='data') # (1) change each into factories (2) SHOULD TAKE A STRING OR A LIST
|
|
318
|
+
}),
|
|
319
|
+
Fid(MovingAverage.buffer_average): ValueDict(
|
|
320
|
+
{
|
|
321
|
+
Farg('buffer'): Vref(node_id='PointBuffer',func=PointBuffer.generate,
|
|
322
|
+
pth='buffer')
|
|
323
|
+
}),
|
|
324
|
+
Fid(MovingAverage.hardcode_func): ValueDict(
|
|
325
|
+
{
|
|
326
|
+
Farg('param'): Vref(node_id='PointBuffer',func=PointBuffer.generate,pth='data'),
|
|
327
|
+
Farg('param2'):7,
|
|
328
|
+
Farg('param_nest'):ValueDict({'point_in':Vref(pth='point_in')}),
|
|
329
|
+
})
|
|
330
|
+
})
|
|
331
|
+
})
|
|
332
|
+
print("printing test .... ")
|
|
333
|
+
graph_dump = safe_dumps(p_def,indent=4);
|
|
334
|
+
print(safe_dumps(p_def,indent=4))
|
|
335
|
+
with open("./dump.json", 'w') as f:
|
|
336
|
+
f.write(graph_dump)
|
|
337
|
+
|
|
338
|
+
# 4. Instantiate and run
|
|
339
|
+
pn = BaseProcessor(p_def)
|
|
340
|
+
features = []
|
|
341
|
+
for p in data:
|
|
342
|
+
feature = {'point_in': p[0]}
|
|
343
|
+
out = pn.process(feature)
|
|
344
|
+
features.append(out)
|
|
345
|
+
#break
|
|
346
|
+
|
|
347
|
+
sample = features[-1]
|
|
348
|
+
self.assertIn('MovingAverage', sample)
|
|
349
|
+
self.assertIn('calc', sample['MovingAverage'])
|
|
350
|
+
self.assertTrue(sample['MovingAverage']['calc'] > 0.45 and sample['MovingAverage']['calc'] < 0.47)
|
|
351
|
+
self.assertTrue(sample['MovingAverage']['buffer_average'] > 0.40 and sample['MovingAverage']['buffer_average'] < 0.5)
|
|
352
|
+
self.assertTrue(sample['PointBuffer']['generate']['test_val_out'] == 7)
|
|
353
|
+
self.assertTrue(sample['PointBuffer']['generate']['test_nested']['k'] >0 )
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
def test_sexy_standard_interface(self):
|
|
357
|
+
|
|
358
|
+
random.seed(0)
|
|
359
|
+
data = [(random.random(),) for _ in range(150)]
|
|
360
|
+
|
|
361
|
+
class PointBuffer:
|
|
362
|
+
p_point = 'point'
|
|
363
|
+
def __init__(self, in_dict):
|
|
364
|
+
assert type(in_dict) == dict
|
|
365
|
+
self.settings = in_dict
|
|
366
|
+
self.settings['buffer'] = []
|
|
367
|
+
|
|
368
|
+
def generate(self, point,**kwargs):
|
|
369
|
+
buf = self.settings['buffer']
|
|
370
|
+
buf.append(point)
|
|
371
|
+
|
|
372
|
+
# trim to max length
|
|
373
|
+
max_len = self.settings['buffer_size']
|
|
374
|
+
while len(buf) > max_len:
|
|
375
|
+
buf.pop(0)
|
|
376
|
+
|
|
377
|
+
return {
|
|
378
|
+
'test_val_out': kwargs['test_val'],
|
|
379
|
+
'test_nested': kwargs['test_nested'],
|
|
380
|
+
'data': point,
|
|
381
|
+
'buffer': list(buf)
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
class MovingAverage(dict):
|
|
385
|
+
def hardcode_func(self,param,param2,param_nest):
|
|
386
|
+
return param, param2,param_nest
|
|
387
|
+
|
|
388
|
+
def calc(self, point):
|
|
389
|
+
point = point
|
|
390
|
+
if 'past_number' not in self:
|
|
391
|
+
self['past_number'] = point
|
|
392
|
+
self['past_number'] = 0.9 * self['past_number'] + 0.1 * point
|
|
393
|
+
return self['past_number']
|
|
394
|
+
|
|
395
|
+
def buffer_average(self, buffer):
|
|
396
|
+
return sum(buffer) / len(buffer)
|
|
397
|
+
|
|
398
|
+
|
|
399
|
+
p_def = {}
|
|
400
|
+
ExecutionNode = ProcessingNode.ExecutionNode
|
|
401
|
+
ValueDict = ProcessingNode.ValueDict
|
|
402
|
+
oField = ProcessingNode.OutputField
|
|
403
|
+
ServiceMap = ProcessingNode.ServiceMap
|
|
404
|
+
F = ServiceMap.func_name
|
|
405
|
+
graph_ref = ServiceMap.val_ref
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
p_def['PointBuffer'] = ProcessingNode.ExecutionNode({
|
|
409
|
+
'name': 'PointBuffer',
|
|
410
|
+
'clas':PointBuffer,
|
|
411
|
+
'settings': {'buffer_size': 10, 'input': 'number'},
|
|
412
|
+
'dependencies': {F(PointBuffer.generate):
|
|
413
|
+
{
|
|
414
|
+
'point':graph_ref(pth='point_in'),
|
|
415
|
+
'test_val':7,
|
|
416
|
+
'test_nested':{'k':graph_ref(pth='point_in')}
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
})
|
|
420
|
+
|
|
421
|
+
p_def['MovingAverage'] = ProcessingNode.ExecutionNode({
|
|
422
|
+
'name': 'MovingAverage',
|
|
423
|
+
'clas':MovingAverage,
|
|
424
|
+
'settings': ProcessingNode.NodeSettings({}),
|
|
425
|
+
'dependencies': ServiceMap({
|
|
426
|
+
F(MovingAverage.calc): {
|
|
427
|
+
'point': graph_ref(node_id='PointBuffer',func=PointBuffer.generate, pth='data')
|
|
428
|
+
},
|
|
429
|
+
F(MovingAverage.buffer_average): {
|
|
430
|
+
'buffer': graph_ref(node_id='PointBuffer',func=PointBuffer.generate, pth='buffer')
|
|
431
|
+
},
|
|
432
|
+
F(MovingAverage.hardcode_func): {
|
|
433
|
+
'param':graph_ref(node_id='PointBuffer',func=PointBuffer.generate, pth='data'),
|
|
434
|
+
'param2':7,
|
|
435
|
+
'param_nest':{
|
|
436
|
+
'point_in':graph_ref(pth='point_in')
|
|
437
|
+
},
|
|
438
|
+
}
|
|
439
|
+
})
|
|
440
|
+
})
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
pn = BaseProcessor(p_def)
|
|
444
|
+
features = []
|
|
445
|
+
for p in data:
|
|
446
|
+
feature = {'point_in': p[0]}
|
|
447
|
+
out = pn.process(feature)
|
|
448
|
+
features.append(out)
|
|
449
|
+
#break
|
|
450
|
+
|
|
451
|
+
sample = features[-1]
|
|
452
|
+
self.assertIn('MovingAverage', sample)
|
|
453
|
+
self.assertIn('calc', sample['MovingAverage'])
|
|
454
|
+
self.assertTrue(sample['MovingAverage']['calc'] > 0.45 and sample['MovingAverage']['calc'] < 0.47)
|
|
455
|
+
self.assertTrue(sample['MovingAverage']['buffer_average'] > 0.40 and sample['MovingAverage']['buffer_average'] < 0.5)
|
|
456
|
+
self.assertTrue(sample['PointBuffer']['generate']['test_val_out'] == 7)
|
|
457
|
+
self.assertTrue(sample['PointBuffer']['generate']['test_nested']['k'] >0 )
|
|
458
|
+
|
|
459
|
+
|
|
460
|
+
|
|
461
|
+
def test_slim_interface(self):
|
|
462
|
+
|
|
463
|
+
random.seed(0)
|
|
464
|
+
data = [(random.random(),) for _ in range(150)]
|
|
465
|
+
|
|
466
|
+
class PointBuffer:
|
|
467
|
+
p_point = 'point'
|
|
468
|
+
def __init__(self, in_dict):
|
|
469
|
+
assert type(in_dict) == dict
|
|
470
|
+
self.settings = in_dict
|
|
471
|
+
self.settings['buffer'] = []
|
|
472
|
+
|
|
473
|
+
def generate(self, point,**kwargs):
|
|
474
|
+
buf = self.settings['buffer']
|
|
475
|
+
buf.append(point)
|
|
476
|
+
|
|
477
|
+
# trim to max length
|
|
478
|
+
max_len = self.settings['buffer_size']
|
|
479
|
+
while len(buf) > max_len:
|
|
480
|
+
buf.pop(0)
|
|
481
|
+
|
|
482
|
+
return {
|
|
483
|
+
'test_val_out': kwargs['test_val'],
|
|
484
|
+
'test_nested': kwargs['test_nested'],
|
|
485
|
+
'data': point,
|
|
486
|
+
'buffer': list(buf)
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
class MovingAverage(dict):
|
|
490
|
+
def hardcode_func(self,param,param2,param_nest):
|
|
491
|
+
return param, param2,param_nest
|
|
492
|
+
|
|
493
|
+
def calc(self, point):
|
|
494
|
+
point = point
|
|
495
|
+
if 'past_number' not in self:
|
|
496
|
+
self['past_number'] = point
|
|
497
|
+
self['past_number'] = 0.9 * self['past_number'] + 0.1 * point
|
|
498
|
+
return self['past_number']
|
|
499
|
+
|
|
500
|
+
def buffer_average(self, buffer):
|
|
501
|
+
return sum(buffer) / len(buffer)
|
|
502
|
+
|
|
503
|
+
# ——— aliases ———
|
|
504
|
+
ExecutionNode = ProcessingNode.ExecutionNode
|
|
505
|
+
ServiceMap = ProcessingNode.ServiceMap
|
|
506
|
+
vref = ServiceMap.func_name
|
|
507
|
+
param_ref = ServiceMap.param_ref
|
|
508
|
+
val_ref = ServiceMap.val_ref
|
|
509
|
+
|
|
510
|
+
# ——— build p_def ———
|
|
511
|
+
p_def = {}
|
|
512
|
+
|
|
513
|
+
# 1) PointBuffer node
|
|
514
|
+
pb_node = ExecutionNode({
|
|
515
|
+
ExecutionNode.f_name: 'PointBuffer',
|
|
516
|
+
ExecutionNode.f_clas: PointBuffer,
|
|
517
|
+
ExecutionNode.f_settings:{ 'buffer_size': 10 },
|
|
518
|
+
ExecutionNode.f_dependencies:{ },
|
|
519
|
+
|
|
520
|
+
})
|
|
521
|
+
|
|
522
|
+
pb_node.set_input(
|
|
523
|
+
param_ref(PointBuffer.generate, 'point'),
|
|
524
|
+
val_ref(pth='point_in')
|
|
525
|
+
)
|
|
526
|
+
pb_node.set_input(
|
|
527
|
+
param_ref(PointBuffer.generate, 'test_nested', 'k'),
|
|
528
|
+
val_ref(pth='point_in')
|
|
529
|
+
)
|
|
530
|
+
pb_node.set_input(
|
|
531
|
+
param_ref(PointBuffer.generate, 'test_val'),
|
|
532
|
+
7
|
|
533
|
+
)
|
|
534
|
+
|
|
535
|
+
p_def['PointBuffer'] = pb_node
|
|
536
|
+
print(safe_dumps(pb_node, indent=4))
|
|
537
|
+
|
|
538
|
+
# 2) MovingAverage node
|
|
539
|
+
ma_node = ExecutionNode({
|
|
540
|
+
ExecutionNode.f_name: 'MovingAverage',
|
|
541
|
+
ExecutionNode.f_clas: MovingAverage,
|
|
542
|
+
ExecutionNode.f_settings:{},
|
|
543
|
+
ExecutionNode.f_dependencies:{ },
|
|
544
|
+
|
|
545
|
+
})
|
|
546
|
+
|
|
547
|
+
ma_node.set_input(
|
|
548
|
+
param_ref(MovingAverage.calc, 'point'),
|
|
549
|
+
val_ref(node_id='PointBuffer', func=PointBuffer.generate, pth='data')
|
|
550
|
+
) #35 LOC
|
|
551
|
+
ma_node.set_input(
|
|
552
|
+
param_ref(MovingAverage.buffer_average, 'buffer'),
|
|
553
|
+
val_ref(node_id='PointBuffer', func=PointBuffer.generate, pth='buffer')
|
|
554
|
+
)
|
|
555
|
+
ma_node.set_input(
|
|
556
|
+
param_ref(MovingAverage.hardcode_func, 'param'),
|
|
557
|
+
val_ref(node_id='PointBuffer', func=PointBuffer.generate, pth='data')
|
|
558
|
+
)
|
|
559
|
+
ma_node.set_input(
|
|
560
|
+
param_ref(MovingAverage.hardcode_func, 'param2'),
|
|
561
|
+
7
|
|
562
|
+
)
|
|
563
|
+
ma_node.set_input(
|
|
564
|
+
param_ref(MovingAverage.hardcode_func, 'param_nest'),
|
|
565
|
+
{ 'point_in': val_ref(pth='point_in') }
|
|
566
|
+
)
|
|
567
|
+
|
|
568
|
+
p_def['MovingAverage'] = ma_node
|
|
569
|
+
|
|
570
|
+
# ——— execute the graph ———
|
|
571
|
+
pn = BaseProcessor(p_def)
|
|
572
|
+
features = []
|
|
573
|
+
for (val,) in data:
|
|
574
|
+
features.append(pn.process({'point_in': val}))
|
|
575
|
+
|
|
576
|
+
sample = features[-1]
|
|
577
|
+
|
|
578
|
+
# ——— assertions ———
|
|
579
|
+
self.assertIn('MovingAverage', sample)
|
|
580
|
+
self.assertIn('calc', sample['MovingAverage'])
|
|
581
|
+
self.assertTrue(0.45 < sample['MovingAverage']['calc'] < 0.47)
|
|
582
|
+
self.assertTrue(0.40 < sample['MovingAverage']['buffer_average'] < 0.50)
|
|
583
|
+
|
|
584
|
+
pb_out = sample['PointBuffer']['generate']
|
|
585
|
+
self.assertEqual(pb_out['test_val_out'], 7)
|
|
586
|
+
self.assertTrue(pb_out['test_nested']['k'] > 0)
|
|
587
|
+
|
|
588
|
+
|
|
589
|
+
def test_slimmer_interface(self):
|
|
590
|
+
|
|
591
|
+
random.seed(0)
|
|
592
|
+
data = [(random.random(),) for _ in range(150)]
|
|
593
|
+
|
|
594
|
+
class PointBuffer:
|
|
595
|
+
p_point = 'point'
|
|
596
|
+
def __init__(self, in_dict):
|
|
597
|
+
assert type(in_dict) == dict
|
|
598
|
+
self.settings = in_dict
|
|
599
|
+
self.settings['buffer'] = []
|
|
600
|
+
|
|
601
|
+
def generate(self, point:int,**kwargs):
|
|
602
|
+
buf = self.settings['buffer']
|
|
603
|
+
buf.append(point)
|
|
604
|
+
|
|
605
|
+
# trim to max length
|
|
606
|
+
max_len = self.settings['buffer_size']
|
|
607
|
+
while len(buf) > max_len:
|
|
608
|
+
buf.pop(0)
|
|
609
|
+
|
|
610
|
+
return {
|
|
611
|
+
'test_val_out': kwargs['test_val'],
|
|
612
|
+
'test_nested': kwargs['test_nested'],
|
|
613
|
+
'data': point,
|
|
614
|
+
'buffer': list(buf)
|
|
615
|
+
}
|
|
616
|
+
|
|
617
|
+
class SumNode(dict):
|
|
618
|
+
def __init__(self, in_dict):
|
|
619
|
+
assert type(in_dict) == dict
|
|
620
|
+
self.settings = in_dict
|
|
621
|
+
self.settings['sum'] = 0
|
|
622
|
+
|
|
623
|
+
def sumit(self, point):
|
|
624
|
+
print("SumNode" + str(point))
|
|
625
|
+
point = point
|
|
626
|
+
self.settings['sum'] = self.settings['sum'] + point
|
|
627
|
+
return self.settings['sum']
|
|
628
|
+
|
|
629
|
+
# ——— build p_def ———
|
|
630
|
+
p_def = {}
|
|
631
|
+
# 1) PointBuffer node -- Still hate that params are strings
|
|
632
|
+
pb_node = ExecutionNode.Create('PointBuffer',PointBuffer,
|
|
633
|
+
settings={ 'buffer_size': 10 },
|
|
634
|
+
dependencies={
|
|
635
|
+
PointBuffer.generate: {
|
|
636
|
+
"point": val_ref(pth='point_in'),
|
|
637
|
+
"test_nested": {"k": val_ref(pth='point_in')},
|
|
638
|
+
"test_val": 7
|
|
639
|
+
}
|
|
640
|
+
},
|
|
641
|
+
)
|
|
642
|
+
p_def[pb_node.name] = pb_node
|
|
643
|
+
outreff = pb_node.outref( [PointBuffer.generate,"data"])
|
|
644
|
+
sum_node = ExecutionNode.Create('SumNode',SumNode,
|
|
645
|
+
dependencies= {
|
|
646
|
+
SumNode.sumit: { "point": pb_node.outref( [PointBuffer.generate,"data"]) },
|
|
647
|
+
}
|
|
648
|
+
)
|
|
649
|
+
p_def[sum_node.name] = sum_node
|
|
650
|
+
|
|
651
|
+
# ——— execute the graph ———
|
|
652
|
+
pn = BaseProcessor(p_def)
|
|
653
|
+
features = []
|
|
654
|
+
for (val,) in data:
|
|
655
|
+
features.append(pn.process({'point_in': val}))
|
|
656
|
+
print(features[-1]["SumNode"])
|
|
657
|
+
sum_node_correct = features[-1]["SumNode"]
|
|
658
|
+
|
|
659
|
+
pn = BaseProcessor(p_def)
|
|
660
|
+
state = pn.export_state()
|
|
661
|
+
features = []
|
|
662
|
+
for (val,) in data:
|
|
663
|
+
pn = BaseProcessor(p_def)
|
|
664
|
+
state["SumNode"]["TEST"] = 1
|
|
665
|
+
pn.import_state(state)
|
|
666
|
+
print("PRE StATE")
|
|
667
|
+
print(json.dumps(state,indent=3))
|
|
668
|
+
features.append(pn.process({'point_in': val}))
|
|
669
|
+
state = pn.export_state()
|
|
670
|
+
print("POST STATE")
|
|
671
|
+
print(json.dumps(state,indent=3))
|
|
672
|
+
print(features[-1]["SumNode"])
|
|
673
|
+
assert sum_node_correct == features[-1]["SumNode"]
|
|
674
|
+
print("SAME SUM!")
|
|
675
|
+
|
|
676
|
+
# NEXT TESTS
|
|
677
|
+
# Update node independently - can invoke sum node several times
|
|
678
|
+
|
|
679
|
+
|
|
680
|
+
if __name__ == '__main__':
|
|
681
|
+
# unittest.main()
|
|
682
|
+
# unittest.main(defaultTest='TestNewProcessingPipeline.test_slimmer_interface')
|
|
683
|
+
unittest.main(defaultTest='TestNewProcessingPipeline.test_sexy_interface')
|
|
684
|
+
|
|
685
|
+
|
|
686
|
+
|
|
687
|
+
'''
|
|
688
|
+
{
|
|
689
|
+
"PointBuffer": {
|
|
690
|
+
"name": "PointBuffer",
|
|
691
|
+
"clas": "PointBuffer",
|
|
692
|
+
"settings": {
|
|
693
|
+
"buffer_size": 10,
|
|
694
|
+
"input": "number",
|
|
695
|
+
"cache_path": "PointBuffer"
|
|
696
|
+
},
|
|
697
|
+
"dependencies": {
|
|
698
|
+
"generate": {
|
|
699
|
+
"point": [
|
|
700
|
+
"__ref",
|
|
701
|
+
"point_in"
|
|
702
|
+
],
|
|
703
|
+
"test_val": 7,
|
|
704
|
+
"test_nested": {
|
|
705
|
+
"k": [
|
|
706
|
+
"__ref",
|
|
707
|
+
"point_in"
|
|
708
|
+
]
|
|
709
|
+
}
|
|
710
|
+
}
|
|
711
|
+
}
|
|
712
|
+
},
|
|
713
|
+
"MovingAverage": {
|
|
714
|
+
"name": "MovingAverage",
|
|
715
|
+
"clas": "MovingAverage",
|
|
716
|
+
"settings": {
|
|
717
|
+
"cache_path": "MovingAverage"
|
|
718
|
+
},
|
|
719
|
+
"dependencies": {
|
|
720
|
+
"calc": {
|
|
721
|
+
"point": [
|
|
722
|
+
"__ref",
|
|
723
|
+
"PointBuffer",
|
|
724
|
+
"generate",
|
|
725
|
+
"data"
|
|
726
|
+
]
|
|
727
|
+
},
|
|
728
|
+
"buffer_average": {
|
|
729
|
+
"buffer": [
|
|
730
|
+
"__ref",
|
|
731
|
+
"PointBuffer",
|
|
732
|
+
"generate",
|
|
733
|
+
"buffer"
|
|
734
|
+
]
|
|
735
|
+
},
|
|
736
|
+
"hardcode_func": {
|
|
737
|
+
"param": [
|
|
738
|
+
"__ref",
|
|
739
|
+
"PointBuffer",
|
|
740
|
+
"generate",
|
|
741
|
+
"data"
|
|
742
|
+
],
|
|
743
|
+
"param2": 7,
|
|
744
|
+
"param_nest": {
|
|
745
|
+
"point_in": [
|
|
746
|
+
"__ref",
|
|
747
|
+
"point_in"
|
|
748
|
+
]
|
|
749
|
+
}
|
|
750
|
+
}
|
|
751
|
+
}
|
|
752
|
+
}
|
|
753
|
+
}
|
|
754
|
+
|
|
755
|
+
'''
|