PipeGraphPy 2.0.6__py3-none-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. PipeGraphPy/__init__.py +10 -0
  2. PipeGraphPy/common.py +4 -0
  3. PipeGraphPy/config/__init__.py +276 -0
  4. PipeGraphPy/config/custom.py +6 -0
  5. PipeGraphPy/config/default_settings.py +125 -0
  6. PipeGraphPy/constants.py +421 -0
  7. PipeGraphPy/core/__init__.py +2 -0
  8. PipeGraphPy/core/anchor.cp39-win_amd64.pyd +0 -0
  9. PipeGraphPy/core/edge.cp39-win_amd64.pyd +0 -0
  10. PipeGraphPy/core/graph.cp39-win_amd64.pyd +0 -0
  11. PipeGraphPy/core/graph_base.cp39-win_amd64.pyd +0 -0
  12. PipeGraphPy/core/modcls/__init__.py +3 -0
  13. PipeGraphPy/core/modcls/base.cp39-win_amd64.pyd +0 -0
  14. PipeGraphPy/core/modcls/branchselect.cp39-win_amd64.pyd +0 -0
  15. PipeGraphPy/core/modcls/classifier.cp39-win_amd64.pyd +0 -0
  16. PipeGraphPy/core/modcls/cluster.cp39-win_amd64.pyd +0 -0
  17. PipeGraphPy/core/modcls/datacharts.cp39-win_amd64.pyd +0 -0
  18. PipeGraphPy/core/modcls/deeplearning.cp39-win_amd64.pyd +0 -0
  19. PipeGraphPy/core/modcls/endscript.cp39-win_amd64.pyd +0 -0
  20. PipeGraphPy/core/modcls/ensemble.cp39-win_amd64.pyd +0 -0
  21. PipeGraphPy/core/modcls/evaluate.cp39-win_amd64.pyd +0 -0
  22. PipeGraphPy/core/modcls/exportdata.cp39-win_amd64.pyd +0 -0
  23. PipeGraphPy/core/modcls/handlescript.cp39-win_amd64.pyd +0 -0
  24. PipeGraphPy/core/modcls/importdata.cp39-win_amd64.pyd +0 -0
  25. PipeGraphPy/core/modcls/merge.cp39-win_amd64.pyd +0 -0
  26. PipeGraphPy/core/modcls/mergescript.cp39-win_amd64.pyd +0 -0
  27. PipeGraphPy/core/modcls/metrics.cp39-win_amd64.pyd +0 -0
  28. PipeGraphPy/core/modcls/postprocessor.cp39-win_amd64.pyd +0 -0
  29. PipeGraphPy/core/modcls/preprocessor.cp39-win_amd64.pyd +0 -0
  30. PipeGraphPy/core/modcls/pythonscript.cp39-win_amd64.pyd +0 -0
  31. PipeGraphPy/core/modcls/regressor.cp39-win_amd64.pyd +0 -0
  32. PipeGraphPy/core/modcls/selector.cp39-win_amd64.pyd +0 -0
  33. PipeGraphPy/core/modcls/selectscript.cp39-win_amd64.pyd +0 -0
  34. PipeGraphPy/core/modcls/special.cp39-win_amd64.pyd +0 -0
  35. PipeGraphPy/core/modcls/split.cp39-win_amd64.pyd +0 -0
  36. PipeGraphPy/core/modcls/splitscript.cp39-win_amd64.pyd +0 -0
  37. PipeGraphPy/core/modcls/startscript.cp39-win_amd64.pyd +0 -0
  38. PipeGraphPy/core/modcls/transformer.cp39-win_amd64.pyd +0 -0
  39. PipeGraphPy/core/module.cp39-win_amd64.pyd +0 -0
  40. PipeGraphPy/core/modules/__init__.py +65 -0
  41. PipeGraphPy/core/modules/classifier/__init__.py +2 -0
  42. PipeGraphPy/core/modules/cluster/__init__.py +0 -0
  43. PipeGraphPy/core/modules/custom/__init__.py +0 -0
  44. PipeGraphPy/core/modules/custom/classifier/__init__.py +0 -0
  45. PipeGraphPy/core/modules/datacharts/__init__.py +5 -0
  46. PipeGraphPy/core/modules/datacharts/dataview.py +28 -0
  47. PipeGraphPy/core/modules/deeplearning/__init__.py +0 -0
  48. PipeGraphPy/core/modules/ensemble/__init__.py +0 -0
  49. PipeGraphPy/core/modules/evaluate/__init__.py +0 -0
  50. PipeGraphPy/core/modules/exportdata/__init__.py +0 -0
  51. PipeGraphPy/core/modules/importdata/__init__.py +0 -0
  52. PipeGraphPy/core/modules/merge/__init__.py +0 -0
  53. PipeGraphPy/core/modules/model_selector/__init__.py +3 -0
  54. PipeGraphPy/core/modules/postprocessor/__init__.py +0 -0
  55. PipeGraphPy/core/modules/preprocessor/__init__.py +0 -0
  56. PipeGraphPy/core/modules/pythonscript/__init__.py +0 -0
  57. PipeGraphPy/core/modules/regressor/__init__.py +0 -0
  58. PipeGraphPy/core/modules/selector/__init__.py +0 -0
  59. PipeGraphPy/core/modules/special/__init__.py +0 -0
  60. PipeGraphPy/core/modules/split/__init__.py +0 -0
  61. PipeGraphPy/core/modules/transformer/__init__.py +0 -0
  62. PipeGraphPy/core/node.cp39-win_amd64.pyd +0 -0
  63. PipeGraphPy/core/pipegraph.cp39-win_amd64.pyd +0 -0
  64. PipeGraphPy/db/__init__.py +2 -0
  65. PipeGraphPy/db/models.cp39-win_amd64.pyd +0 -0
  66. PipeGraphPy/db/utils.py +106 -0
  67. PipeGraphPy/decorators.py +42 -0
  68. PipeGraphPy/logger.py +170 -0
  69. PipeGraphPy/plot/__init__.py +0 -0
  70. PipeGraphPy/plot/draw.py +424 -0
  71. PipeGraphPy/storage/__init__.py +10 -0
  72. PipeGraphPy/storage/base.py +2 -0
  73. PipeGraphPy/storage/dict_backend.py +102 -0
  74. PipeGraphPy/storage/file_backend.py +342 -0
  75. PipeGraphPy/storage/redis_backend.py +183 -0
  76. PipeGraphPy/tools.py +388 -0
  77. PipeGraphPy/utils/__init__.py +1 -0
  78. PipeGraphPy/utils/check.py +179 -0
  79. PipeGraphPy/utils/core.py +295 -0
  80. PipeGraphPy/utils/examine.py +259 -0
  81. PipeGraphPy/utils/file_operate.py +101 -0
  82. PipeGraphPy/utils/format.py +303 -0
  83. PipeGraphPy/utils/functional.py +422 -0
  84. PipeGraphPy/utils/handle_graph.py +31 -0
  85. PipeGraphPy/utils/lock.py +1 -0
  86. PipeGraphPy/utils/mq.py +54 -0
  87. PipeGraphPy/utils/osutil.py +29 -0
  88. PipeGraphPy/utils/redis_operate.py +195 -0
  89. PipeGraphPy/utils/str_handle.py +122 -0
  90. PipeGraphPy/utils/version.py +108 -0
  91. PipeGraphPy-2.0.6.dist-info/METADATA +17 -0
  92. PipeGraphPy-2.0.6.dist-info/RECORD +94 -0
  93. PipeGraphPy-2.0.6.dist-info/WHEEL +5 -0
  94. PipeGraphPy-2.0.6.dist-info/top_level.txt +1 -0
@@ -0,0 +1,422 @@
1
+ import copy
2
+ import itertools
3
+ import operator
4
+ from functools import total_ordering, wraps
5
+
6
+
7
+ class cached_property:
8
+ """
9
+ Decorator that converts a method with a single self argument into a
10
+ property cached on the instance.
11
+
12
+ A cached property can be made out of an existing method:
13
+ (e.g. ``url = cached_property(get_absolute_url)``).
14
+ The optional ``name`` argument is obsolete as of Python 3.6 and will be
15
+ deprecated in PGP 4.0 (#30127).
16
+ """
17
+ name = None
18
+
19
+ @staticmethod
20
+ def func(instance):
21
+ raise TypeError(
22
+ 'Cannot use cached_property instance without calling '
23
+ '__set_name__() on it.'
24
+ )
25
+
26
+ def __init__(self, func, name=None):
27
+ self.real_func = func
28
+ self.__doc__ = getattr(func, '__doc__')
29
+
30
+ def __set_name__(self, owner, name):
31
+ if self.name is None:
32
+ self.name = name
33
+ self.func = self.real_func
34
+ elif name != self.name:
35
+ raise TypeError(
36
+ "Cannot assign the same cached_property to two different names "
37
+ "(%r and %r)." % (self.name, name)
38
+ )
39
+
40
+ def __get__(self, instance, cls=None):
41
+ """
42
+ 调用函数并将返回值放入实例中。__dict__,以便对实例的后续属性访问返回缓存的值,
43
+ 而不是调用cached_property.__get__()。
44
+ """
45
+ if instance is None:
46
+ return self
47
+ res = instance.__dict__[self.name] = self.func(instance)
48
+ return res
49
+
50
+
51
+ class classproperty:
52
+ """
53
+ Decorator that converts a method with a single cls argument into a property
54
+ that can be accessed directly from the class.
55
+ """
56
+
57
+ def __init__(self, method=None):
58
+ self.fget = method
59
+
60
+ def __get__(self, instance, cls=None):
61
+ return self.fget(cls)
62
+
63
+ def getter(self, method):
64
+ self.fget = method
65
+ return self
66
+
67
+
68
+ class Promise:
69
+ """
70
+ Base class for the proxy class created in the closure of the lazy function.
71
+ It's used to recognize promises in code.
72
+ """
73
+ pass
74
+
75
+
76
+ def lazy(func, *resultclasses):
77
+ """
78
+ Turn any callable into a lazy evaluated callable. result classes or types
79
+ is required -- at least one is needed so that the automatic forcing of
80
+ the lazy evaluation code is triggered. Results are not memoized; the
81
+ function is evaluated on every access.
82
+ """
83
+
84
+ @total_ordering
85
+ class __proxy__(Promise):
86
+ """
87
+ 封装一个函数调用,并充当调用该函数结果的方法的代理。
88
+ 在调用结果上的一个方法之前,函数不会被求值。
89
+ """
90
+ __prepared = False
91
+
92
+ def __init__(self, args, kw):
93
+ self.__args = args
94
+ self.__kw = kw
95
+ if not self.__prepared:
96
+ self.__prepare_class__()
97
+ self.__class__.__prepared = True
98
+
99
+ def __reduce__(self):
100
+ return (
101
+ _lazy_proxy_unpickle,
102
+ (func, self.__args, self.__kw) + resultclasses
103
+ )
104
+
105
+ def __repr__(self):
106
+ return repr(self.__cast())
107
+
108
+ @classmethod
109
+ def __prepare_class__(cls):
110
+ for resultclass in resultclasses:
111
+ for type_ in resultclass.mro():
112
+ for method_name in type_.__dict__:
113
+ # All __promise__ return the same wrapper method, they
114
+ # look up the correct implementation when called.
115
+ if hasattr(cls, method_name):
116
+ continue
117
+ meth = cls.__promise__(method_name)
118
+ setattr(cls, method_name, meth)
119
+ cls._delegate_bytes = bytes in resultclasses
120
+ cls._delegate_text = str in resultclasses
121
+ assert not (cls._delegate_bytes and cls._delegate_text), (
122
+ "Cannot call lazy() with both bytes and text return types.")
123
+ if cls._delegate_text:
124
+ cls.__str__ = cls.__text_cast
125
+ elif cls._delegate_bytes:
126
+ cls.__bytes__ = cls.__bytes_cast
127
+
128
+ @classmethod
129
+ def __promise__(cls, method_name):
130
+ # Builds a wrapper around some magic method
131
+ def __wrapper__(self, *args, **kw):
132
+ # Automatically triggers the evaluation of a lazy value and
133
+ # applies the given magic method of the result type.
134
+ res = func(*self.__args, **self.__kw)
135
+ return getattr(res, method_name)(*args, **kw)
136
+ return __wrapper__
137
+
138
+ def __text_cast(self):
139
+ return func(*self.__args, **self.__kw)
140
+
141
+ def __bytes_cast(self):
142
+ return bytes(func(*self.__args, **self.__kw))
143
+
144
+ def __bytes_cast_encoded(self):
145
+ return func(*self.__args, **self.__kw).encode()
146
+
147
+ def __cast(self):
148
+ if self._delegate_bytes:
149
+ return self.__bytes_cast()
150
+ elif self._delegate_text:
151
+ return self.__text_cast()
152
+ else:
153
+ return func(*self.__args, **self.__kw)
154
+
155
+ def __str__(self):
156
+ # object defines __str__(), so __prepare_class__() won't overload
157
+ # a __str__() method from the proxied class.
158
+ return str(self.__cast())
159
+
160
+ def __eq__(self, other):
161
+ if isinstance(other, Promise):
162
+ other = other.__cast()
163
+ return self.__cast() == other
164
+
165
+ def __lt__(self, other):
166
+ if isinstance(other, Promise):
167
+ other = other.__cast()
168
+ return self.__cast() < other
169
+
170
+ def __hash__(self):
171
+ return hash(self.__cast())
172
+
173
+ def __mod__(self, rhs):
174
+ if self._delegate_text:
175
+ return str(self) % rhs
176
+ return self.__cast() % rhs
177
+
178
+ def __add__(self, other):
179
+ return self.__cast() + other
180
+
181
+ def __radd__(self, other):
182
+ return other + self.__cast()
183
+
184
+ def __deepcopy__(self, memo):
185
+ # Instances of this class are effectively immutable. It's just a
186
+ # collection of functions. So we don't need to do anything
187
+ # complicated for copying.
188
+ memo[id(self)] = self
189
+ return self
190
+
191
+ @wraps(func)
192
+ def __wrapper__(*args, **kw):
193
+ # Creates the proxy object, instead of the actual value.
194
+ return __proxy__(args, kw)
195
+
196
+ return __wrapper__
197
+
198
+
199
+ def _lazy_proxy_unpickle(func, args, kwargs, *resultclasses):
200
+ return lazy(func, *resultclasses)(*args, **kwargs)
201
+
202
+
203
+ def lazystr(text):
204
+ """
205
+ Shortcut for the common case of a lazy callable that returns str.
206
+ """
207
+ return lazy(str, str)(text)
208
+
209
+
210
+ def keep_lazy(*resultclasses):
211
+ """
212
+ A decorator that allows a function to be called with one or more lazy
213
+ arguments. If none of the args are lazy, the function is evaluated
214
+ immediately, otherwise a __proxy__ is returned that will evaluate the
215
+ function when needed.
216
+ """
217
+ if not resultclasses:
218
+ raise TypeError("You must pass at least one argument to keep_lazy().")
219
+
220
+ def decorator(func):
221
+ lazy_func = lazy(func, *resultclasses)
222
+
223
+ @wraps(func)
224
+ def wrapper(*args, **kwargs):
225
+ if any(isinstance(arg, Promise) for arg in itertools.chain(args, kwargs.values())):
226
+ return lazy_func(*args, **kwargs)
227
+ return func(*args, **kwargs)
228
+ return wrapper
229
+ return decorator
230
+
231
+
232
+ def keep_lazy_text(func):
233
+ """
234
+ A decorator for functions that accept lazy arguments and return text.
235
+ """
236
+ return keep_lazy(str)(func)
237
+
238
+
239
+ empty = object()
240
+
241
+
242
+ def new_method_proxy(func):
243
+ def inner(self, *args):
244
+ if self._wrapped is empty:
245
+ self._setup()
246
+ return func(self._wrapped, *args)
247
+ return inner
248
+
249
+
250
+ class LazyObject:
251
+ """
252
+ 一个类的包装器,可用于延迟包装类的实例化。
253
+
254
+ 通过子类化,您有机会截取和修改实例化。如果你不需要这样做,使用SimpleLazyObject。
255
+
256
+ """
257
+
258
+ # 在跟踪__init__(#19456)时避免无限递归
259
+ _wrapped = None
260
+
261
+ def __init__(self):
262
+ # 注意:如果子类重写了__init__(),它可能需要重写
263
+ # override __copy__()和__deepcopy__()。
264
+ self._wrapped = empty
265
+
266
+ __getattr__ = new_method_proxy(getattr)
267
+
268
+ def __setattr__(self, name, value):
269
+ if name == "_wrapped":
270
+ # 赋值__dict__以避免无限__setattr__循环。
271
+ self.__dict__["_wrapped"] = value
272
+ else:
273
+ if self._wrapped is empty:
274
+ self._setup()
275
+ setattr(self._wrapped, name, value)
276
+
277
+ def __delattr__(self, name):
278
+ if name == "_wrapped":
279
+ raise TypeError("can't delete _wrapped.")
280
+ if self._wrapped is empty:
281
+ self._setup()
282
+ delattr(self._wrapped, name)
283
+
284
+ def _setup(self):
285
+ """
286
+ Must be implemented by subclasses to initialize the wrapped object.
287
+ """
288
+ raise NotImplementedError(
289
+ 'subclasses of LazyObject must provide a _setup() method')
290
+
291
+ # Because we have messed with __class__ below, we confuse pickle as to what
292
+ # class we are pickling. We're going to have to initialize the wrapped
293
+ # object to successfully pickle it, so we might as well just pickle the
294
+ # wrapped object since they're supposed to act the same way.
295
+ #
296
+ # Unfortunately, if we try to simply act like the wrapped object, the ruse
297
+ # will break down when pickle gets our id(). Thus we end up with pickle
298
+ # thinking, in effect, that we are a distinct object from the wrapped
299
+ # object, but with the same __dict__. This can cause problems (see #25389).
300
+ #
301
+ # So instead, we define our own __reduce__ method and custom unpickler. We
302
+ # pickle the wrapped object as the unpickler's argument, so that pickle
303
+ # will pickle it normally, and then the unpickler simply returns its
304
+ # argument.
305
+ def __reduce__(self):
306
+ if self._wrapped is empty:
307
+ self._setup()
308
+ return (unpickle_lazyobject, (self._wrapped,))
309
+
310
+ def __copy__(self):
311
+ if self._wrapped is empty:
312
+ # If uninitialized, copy the wrapper. Use type(self), not
313
+ # self.__class__, because the latter is proxied.
314
+ return type(self)()
315
+ else:
316
+ # If initialized, return a copy of the wrapped object.
317
+ return copy.copy(self._wrapped)
318
+
319
+ def __deepcopy__(self, memo):
320
+ if self._wrapped is empty:
321
+ # We have to use type(self), not self.__class__, because the
322
+ # latter is proxied.
323
+ result = type(self)()
324
+ memo[id(self)] = result
325
+ return result
326
+ return copy.deepcopy(self._wrapped, memo)
327
+
328
+ __bytes__ = new_method_proxy(bytes)
329
+ __str__ = new_method_proxy(str)
330
+ __bool__ = new_method_proxy(bool)
331
+
332
+ # Introspection support
333
+ __dir__ = new_method_proxy(dir)
334
+
335
+ # Need to pretend to be the wrapped class, for the sake of objects that
336
+ # care about this (especially in equality tests)
337
+ __class__ = property(new_method_proxy(operator.attrgetter("__class__")))
338
+ __eq__ = new_method_proxy(operator.eq)
339
+ __lt__ = new_method_proxy(operator.lt)
340
+ __gt__ = new_method_proxy(operator.gt)
341
+ __ne__ = new_method_proxy(operator.ne)
342
+ __hash__ = new_method_proxy(hash)
343
+
344
+ # List/Tuple/Dictionary methods support
345
+ __getitem__ = new_method_proxy(operator.getitem)
346
+ __setitem__ = new_method_proxy(operator.setitem)
347
+ __delitem__ = new_method_proxy(operator.delitem)
348
+ __iter__ = new_method_proxy(iter)
349
+ __len__ = new_method_proxy(len)
350
+ __contains__ = new_method_proxy(operator.contains)
351
+
352
+
353
+ def unpickle_lazyobject(wrapped):
354
+ """
355
+ 用于unpickle懒惰对象。只需返回它的包装的对象的参数。
356
+ """
357
+ return wrapped
358
+
359
+
360
+ class SimpleLazyObject(LazyObject):
361
+ """
362
+ A lazy object initialized from any function.
363
+
364
+ Designed for compound objects of unknown type. For builtins or objects of
365
+ known type, use PipeGraphPy.utils.functional.lazy.
366
+ """
367
+
368
+ def __init__(self, func):
369
+ """
370
+ Pass in a callable that returns the object to be wrapped.
371
+
372
+ If copies are made of the resulting SimpleLazyObject, which can happen
373
+ in various circumstances within PGP, then you must ensure that the
374
+ callable can be safely run more than once and will return the same
375
+ value.
376
+ """
377
+ self.__dict__['_setupfunc'] = func
378
+ super().__init__()
379
+
380
+ def _setup(self):
381
+ self._wrapped = self._setupfunc()
382
+
383
+ # Return a meaningful representation of the lazy object for debugging
384
+ # without evaluating the wrapped object.
385
+ def __repr__(self):
386
+ if self._wrapped is empty:
387
+ repr_attr = self._setupfunc
388
+ else:
389
+ repr_attr = self._wrapped
390
+ return '<%s: %r>' % (type(self).__name__, repr_attr)
391
+
392
+ def __copy__(self):
393
+ if self._wrapped is empty:
394
+ # If uninitialized, copy the wrapper. Use SimpleLazyObject, not
395
+ # self.__class__, because the latter is proxied.
396
+ return SimpleLazyObject(self._setupfunc)
397
+ else:
398
+ # If initialized, return a copy of the wrapped object.
399
+ return copy.copy(self._wrapped)
400
+
401
+ def __deepcopy__(self, memo):
402
+ if self._wrapped is empty:
403
+ # We have to use SimpleLazyObject, not self.__class__, because the
404
+ # latter is proxied.
405
+ result = SimpleLazyObject(self._setupfunc)
406
+ memo[id(self)] = result
407
+ return result
408
+ return copy.deepcopy(self._wrapped, memo)
409
+
410
+
411
+ def partition(predicate, values):
412
+ """
413
+ Split the values into two sets, based on the return value of the function
414
+ (True/False). e.g.:
415
+
416
+ >>> partition(lambda x: x > 3, range(5))
417
+ [0, 1, 2, 3], [4]
418
+ """
419
+ results = ([], [])
420
+ for item in values:
421
+ results[predicate(item)].append(item)
422
+ return results
@@ -0,0 +1,31 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ import os
4
+ import signal
5
+ from PipeGraphPy.logger import log
6
+ from PipeGraphPy.common import multi_graph
7
+
8
+
9
+ def stop_running_with_exception(graph_id, err):
10
+ """停止运行"""
11
+ graph = multi_graph[graph_id]
12
+ graph.stop_run(err)
13
+
14
+
15
+ class HandleGraph(object):
16
+ def __init__(self, id):
17
+ self.id = id
18
+ self.graph = multi_graph[self.id]
19
+
20
+ def __getattr__(self, name):
21
+ return getattr(self.graph, name)
22
+
23
+
24
+ def kill_pid(pid):
25
+ """强制kill某个pid"""
26
+ try:
27
+ a = os.kill(pid, signal.SIGKILL)
28
+ # a = os.kill(pid, signal.9) # 与上等效
29
+ log.info("已杀死pid为%s的进程, 返回值是:%s" % (pid, a))
30
+ except Exception:
31
+ log.error("没有如此进程!!!")
@@ -0,0 +1 @@
1
+ # 连接redis
@@ -0,0 +1,54 @@
1
+ # #!/usr/bin/env python
2
+ # # -*- coding: utf-8 -*-
3
+ #
4
+ # import traceback
5
+ # from PipeGraphPy.logger import log
6
+ # from PipeGraphPy.config import settings
7
+ # from PipeGraphPy.db.models import PredictRecordTB, MqTB
8
+ # from rabbitmqpy import Puber
9
+ # from datetime import datetime, timedelta
10
+ #
11
+ #
12
+ # def publish_predict_dict(predict_dict, **kwargs):
13
+ # # global puber
14
+ # puber = None
15
+ # try:
16
+ # if settings.RUN_PERMISSION:
17
+ # try:
18
+ # if settings.DEBUG:
19
+ # puber = Puber(
20
+ # settings.AMQP_URL,
21
+ # 'PipeGraphPypub_predict_e_test',
22
+ # 'direct',
23
+ # routing_key='PipeGraphPypub_predict_k_test'
24
+ # )
25
+ # else:
26
+ # puber = Puber(
27
+ # settings.AMQP_URL,
28
+ # 'PipeGraphPypub_predict_e',
29
+ # 'direct',
30
+ # routing_key='PipeGraphPypub_predict_k'
31
+ # )
32
+ # except:
33
+ # puber = None
34
+ # else:
35
+ # puber = None
36
+ # if puber is not None:
37
+ # puber.send(predict_dict)
38
+ # log_info = '发送MQ:exchange=PipeGraphPypub_predict_e,routing_key=PipeGraphPypub_predict_k,body=%s' % (
39
+ # str(predict_dict)[:100]
40
+ # )
41
+ # MqTB.add(
42
+ # pubdate=int((datetime.utcnow()+timedelta(hours=8)).strftime("%Y%m%d")),
43
+ # graph_id=predict_dict.get("id", 0),
44
+ # exchange="PipeGraphPypub_predict_e",
45
+ # queue="",
46
+ # route_key="PipeGraphPypub_predict_k",
47
+ # clock=predict_dict.get("clock", "12"),
48
+ # kind = 1
49
+ # )
50
+ # log.info(log_info, **kwargs)
51
+ # if kwargs.get('plog_record_id'):
52
+ # PredictRecordTB.set(is_pub=1).where(id=kwargs['plog_record_id'])
53
+ # except Exception:
54
+ # log.error(traceback.format_exc(), **kwargs)
@@ -0,0 +1,29 @@
1
+ import psutil
2
+ import time
3
+
4
+ def get_disk_mbps(interval=1):
5
+ disk_io_prev = psutil.disk_io_counters()
6
+ time.sleep(interval)
7
+ disk_io_now = psutil.disk_io_counters()
8
+ bytes_read = disk_io_now.read_bytes - disk_io_prev.read_bytes
9
+ bytes_written = disk_io_now.write_bytes - disk_io_prev.write_bytes
10
+ mbps_read = bytes_read / interval / 1024 / 1024
11
+ mbps_written = bytes_written / interval / 1024 / 1024
12
+ mbps = mbps_read + mbps_written
13
+ return int(mbps)
14
+
15
+
16
+ def get_cpu_usage():
17
+ return psutil.cpu_percent()
18
+
19
+
20
+ def get_memory_usage():
21
+ return psutil.virtual_memory().percent
22
+
23
+
24
+ def get_disk_usage(path="/"):
25
+ return psutil.disk_usage(path).percent
26
+
27
+
28
+ def get_load():
29
+ return psutil.getloadavg()