oocana-python-executor 0.15.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oocana_python_executor-0.15.0.dist-info/METADATA +9 -0
- oocana_python_executor-0.15.0.dist-info/RECORD +18 -0
- oocana_python_executor-0.15.0.dist-info/WHEEL +4 -0
- oocana_python_executor-0.15.0.dist-info/entry_points.txt +5 -0
- python_executor/__init__.py +0 -0
- python_executor/block.py +189 -0
- python_executor/context.py +68 -0
- python_executor/data.py +5 -0
- python_executor/executor.py +247 -0
- python_executor/hook.py +50 -0
- python_executor/logger.py +19 -0
- python_executor/matplot/matplotlib_oomol/__init__.py +1 -0
- python_executor/matplot/matplotlib_oomol/oomol.py +28 -0
- python_executor/matplot/oomol_matplot_helper.py +68 -0
- python_executor/secret.py +146 -0
- python_executor/service.py +238 -0
- python_executor/topic.py +63 -0
- python_executor/utils.py +32 -0
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: oocana-python-executor
|
|
3
|
+
Version: 0.15.0
|
|
4
|
+
Summary: a client subscribe mqtt topic to execute oocana's block
|
|
5
|
+
Author-Email: l1shen <lishen1635@gmail.com>, yleaf <11785335+leavesster@users.noreply.github.com>
|
|
6
|
+
License: MIT
|
|
7
|
+
Requires-Python: >=3.9
|
|
8
|
+
Requires-Dist: oocana
|
|
9
|
+
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
oocana_python_executor-0.15.0.dist-info/METADATA,sha256=yWmQbi6QROwN3iesVi_D4BtL8We8twY6Idk_owqDWY8,289
|
|
2
|
+
oocana_python_executor-0.15.0.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
|
|
3
|
+
oocana_python_executor-0.15.0.dist-info/entry_points.txt,sha256=n5hnDKR-LHdMNSqVy89zHBqyOMcRqva6KJrajUFfBnk,82
|
|
4
|
+
python_executor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
|
+
python_executor/block.py,sha256=DhKQwAkFfNHQvM0Y_uDxA6XejviEp2h3-Ead_uoG7q0,6209
|
|
6
|
+
python_executor/context.py,sha256=v_STYkHDNKHVwPiwhiE23IfjzGxRncVTrrZqsc6q76Y,2549
|
|
7
|
+
python_executor/data.py,sha256=KF-LNS1fZkjRSTSt8jrVTgZV712y6rEPF5Qi2pqutdk,139
|
|
8
|
+
python_executor/executor.py,sha256=IzSKRkMjR9qyh-M15h4OiC5xtzbSDRR1529GQXKhSOY,10023
|
|
9
|
+
python_executor/hook.py,sha256=lqWkK2I4I8H8AjYOv0YV5hk8X1ZdECCyueW4gU7JL48,1523
|
|
10
|
+
python_executor/logger.py,sha256=kzY_0QT3M72JP1WYyk3p9LVo-iEd6DZjwpwoswb6v8U,418
|
|
11
|
+
python_executor/matplot/matplotlib_oomol/__init__.py,sha256=_5Y6euJwP32h4I8WMtlZ-tJzGOqFk9v6CItmWSQyJek,37
|
|
12
|
+
python_executor/matplot/matplotlib_oomol/oomol.py,sha256=WIdaICAYrJ0nw67hdPHxAOfOQjgvHmeOM-WXwCb2Rr8,1057
|
|
13
|
+
python_executor/matplot/oomol_matplot_helper.py,sha256=lqNM7jl4pjzB0vo_ubYfXn1JSuZf3OKDrGXxhdnPDig,2694
|
|
14
|
+
python_executor/secret.py,sha256=-dAnbNKv9XK-zfxoMulIKWDGt4NHAXJ45cCLT9iBiKQ,5984
|
|
15
|
+
python_executor/service.py,sha256=XcYz7B7Mc_x5LpEw9b81gDmcNvUaZh4UIngDeOMUVwA,9209
|
|
16
|
+
python_executor/topic.py,sha256=YAMD5eZwjYXQYJrYLdCBh-xt2G3jRI1eEed-0K9E6XA,2204
|
|
17
|
+
python_executor/utils.py,sha256=3liF5N94KDbj41wbqiD9Pt0SNtLi-uP8jXMyyUheYQ0,892
|
|
18
|
+
oocana_python_executor-0.15.0.dist-info/RECORD,,
|
|
File without changes
|
python_executor/block.py
ADDED
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
from oocana import Context, Mainframe
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from typing import Optional, TypedDict
|
|
4
|
+
import inspect
|
|
5
|
+
import traceback
|
|
6
|
+
import logging
|
|
7
|
+
from .data import store, vars, EXECUTOR_NAME
|
|
8
|
+
from .context import createContext
|
|
9
|
+
from .hook import ExitFunctionException
|
|
10
|
+
import os
|
|
11
|
+
import sys
|
|
12
|
+
import importlib
|
|
13
|
+
import importlib.util
|
|
14
|
+
import threading
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class ExecutorOptionsDict(TypedDict):
|
|
18
|
+
function: Optional[str]
|
|
19
|
+
entry: Optional[str]
|
|
20
|
+
source: Optional[str]
|
|
21
|
+
|
|
22
|
+
# entry 与 source 是二选一的存在
|
|
23
|
+
class ExecutorDict(TypedDict):
|
|
24
|
+
options: Optional[ExecutorOptionsDict]
|
|
25
|
+
|
|
26
|
+
tmp_files = set()
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class ExecutePayload:
|
|
30
|
+
session_id: str
|
|
31
|
+
job_id: str
|
|
32
|
+
dir: str
|
|
33
|
+
executor: ExecutorDict
|
|
34
|
+
outputs: Optional[dict] = None
|
|
35
|
+
|
|
36
|
+
def __init__(self, *args, **kwargs):
|
|
37
|
+
if args:
|
|
38
|
+
self.session_id = args[0]
|
|
39
|
+
self.job_id = args[1]
|
|
40
|
+
self.executor = args[2]
|
|
41
|
+
self.dir = args[3]
|
|
42
|
+
self.outputs = args[4]
|
|
43
|
+
if kwargs:
|
|
44
|
+
for key, value in kwargs.items():
|
|
45
|
+
setattr(self, key, value)
|
|
46
|
+
|
|
47
|
+
lock = threading.Lock()
|
|
48
|
+
|
|
49
|
+
def load_module(file_path: str, source_dir=None):
|
|
50
|
+
|
|
51
|
+
if (os.path.isabs(file_path)):
|
|
52
|
+
file_abs_path = file_path
|
|
53
|
+
else:
|
|
54
|
+
dirname = source_dir if source_dir else os.getcwd()
|
|
55
|
+
file_abs_path = os.path.abspath(os.path.join(dirname, file_path))
|
|
56
|
+
with lock:
|
|
57
|
+
if file_abs_path in sys.modules:
|
|
58
|
+
return sys.modules[file_abs_path]
|
|
59
|
+
|
|
60
|
+
module_dir = os.path.dirname(file_abs_path)
|
|
61
|
+
sys.path.insert(0, module_dir)
|
|
62
|
+
|
|
63
|
+
file_spec = importlib.util.spec_from_file_location(file_abs_path, file_abs_path)
|
|
64
|
+
module = importlib.util.module_from_spec(file_spec) # type: ignore
|
|
65
|
+
sys.modules[file_abs_path] = module
|
|
66
|
+
|
|
67
|
+
file_spec.loader.exec_module(module) # type: ignore
|
|
68
|
+
return module
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def output_return_object(obj, context: Context):
|
|
72
|
+
if obj is None:
|
|
73
|
+
context.done()
|
|
74
|
+
elif obj is context.keepAlive:
|
|
75
|
+
pass
|
|
76
|
+
elif isinstance(obj, dict):
|
|
77
|
+
for k, v in obj.items():
|
|
78
|
+
context.output(k, v)
|
|
79
|
+
context.done()
|
|
80
|
+
else:
|
|
81
|
+
context.done(f"return object needs to be a dictionary, but get type: {type(obj)}")
|
|
82
|
+
|
|
83
|
+
logger = logging.getLogger(EXECUTOR_NAME)
|
|
84
|
+
|
|
85
|
+
async def run_block(message, mainframe: Mainframe, session_dir: str):
|
|
86
|
+
|
|
87
|
+
logger.info(f"block {message.get('job_id')} start")
|
|
88
|
+
try:
|
|
89
|
+
payload = ExecutePayload(**message)
|
|
90
|
+
context = createContext(mainframe, payload.session_id, payload.job_id, store, payload.outputs, session_dir)
|
|
91
|
+
except Exception:
|
|
92
|
+
traceback_str = traceback.format_exc()
|
|
93
|
+
# rust 那边会保证传过来的 message 一定是符合格式的,所以这里不应该出现异常。这里主要是防止 rust 修改错误。
|
|
94
|
+
mainframe.send(
|
|
95
|
+
{
|
|
96
|
+
"job_id": message["job_id"],
|
|
97
|
+
"session_id": message["session_id"],
|
|
98
|
+
},
|
|
99
|
+
{
|
|
100
|
+
"type": "BlockFinished",
|
|
101
|
+
"job_id": message["job_id"],
|
|
102
|
+
"error": traceback_str
|
|
103
|
+
})
|
|
104
|
+
return
|
|
105
|
+
|
|
106
|
+
vars.set(context)
|
|
107
|
+
|
|
108
|
+
load_dir = payload.dir
|
|
109
|
+
|
|
110
|
+
options = payload.executor.get("options")
|
|
111
|
+
|
|
112
|
+
node_id = context.node_id
|
|
113
|
+
|
|
114
|
+
file_path = options["entry"] if options is not None and options.get("entry") is not None else 'index.py'
|
|
115
|
+
|
|
116
|
+
source = options.get("source") if options is not None else None
|
|
117
|
+
if source is not None:
|
|
118
|
+
if not os.path.exists(load_dir):
|
|
119
|
+
os.makedirs(load_dir)
|
|
120
|
+
|
|
121
|
+
dir_path = os.path.join(load_dir, ".scriptlets")
|
|
122
|
+
if not os.path.exists(dir_path):
|
|
123
|
+
os.makedirs(dir_path)
|
|
124
|
+
tmp_py = os.path.join(dir_path, f"{node_id}.py")
|
|
125
|
+
# 记录临时文件,但是现在不再清理
|
|
126
|
+
tmp_files.add(tmp_py)
|
|
127
|
+
|
|
128
|
+
with open(tmp_py, "w") as f:
|
|
129
|
+
f.write(source)
|
|
130
|
+
file_path = tmp_py
|
|
131
|
+
|
|
132
|
+
try:
|
|
133
|
+
# TODO: 这里的异常处理,应该跟详细一些,提供语法错误提示。
|
|
134
|
+
index_module = load_module(file_path, load_dir) # type: ignore
|
|
135
|
+
except Exception:
|
|
136
|
+
traceback_str = traceback.format_exc()
|
|
137
|
+
context.done(traceback_str)
|
|
138
|
+
return
|
|
139
|
+
function_name: str = options.get("function") if payload.executor is not None and options.get("function") is not None else 'main' # type: ignore
|
|
140
|
+
fn = index_module.__dict__.get(function_name)
|
|
141
|
+
|
|
142
|
+
if fn is None:
|
|
143
|
+
context.done(f"function {function_name} not found in {file_path}")
|
|
144
|
+
return
|
|
145
|
+
if not callable(fn):
|
|
146
|
+
context.done(f"{function_name} is not a function in {file_path}")
|
|
147
|
+
return
|
|
148
|
+
|
|
149
|
+
try:
|
|
150
|
+
signature = inspect.signature(fn)
|
|
151
|
+
params_count = len(signature.parameters)
|
|
152
|
+
result = None
|
|
153
|
+
traceback_str = None
|
|
154
|
+
|
|
155
|
+
try:
|
|
156
|
+
if inspect.iscoroutinefunction(fn):
|
|
157
|
+
if params_count == 0:
|
|
158
|
+
result = await fn()
|
|
159
|
+
elif params_count == 1:
|
|
160
|
+
only_context_param = list(signature.parameters.values())[0].annotation is Context
|
|
161
|
+
result = await fn(context) if only_context_param else await fn(context.inputs)
|
|
162
|
+
else:
|
|
163
|
+
result = await fn(context.inputs, context)
|
|
164
|
+
else:
|
|
165
|
+
if params_count == 0:
|
|
166
|
+
result = fn()
|
|
167
|
+
elif params_count == 1:
|
|
168
|
+
only_context_param = list(signature.parameters.values())[0].annotation is Context
|
|
169
|
+
result = fn(context) if only_context_param else fn(context.inputs)
|
|
170
|
+
else:
|
|
171
|
+
result = fn(context.inputs, context)
|
|
172
|
+
except ExitFunctionException as e:
|
|
173
|
+
if e.args[0] is not None:
|
|
174
|
+
context.done("block call exit with message: " + str(e.args[0]))
|
|
175
|
+
else:
|
|
176
|
+
context.done()
|
|
177
|
+
except Exception:
|
|
178
|
+
traceback_str = traceback.format_exc()
|
|
179
|
+
|
|
180
|
+
if traceback_str is not None:
|
|
181
|
+
context.done(traceback_str)
|
|
182
|
+
else:
|
|
183
|
+
output_return_object(result, context)
|
|
184
|
+
except Exception:
|
|
185
|
+
traceback_str = traceback.format_exc()
|
|
186
|
+
context.done(traceback_str)
|
|
187
|
+
finally:
|
|
188
|
+
logger.info(f"block {message.get('job_id')} done")
|
|
189
|
+
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from oocana import Mainframe, Context, StoreKey, BlockInfo, BinValueDict, VarValueDict, InputHandleDef, is_bin_value, is_var_value
|
|
3
|
+
from typing import Dict
|
|
4
|
+
from .secret import replace_secret
|
|
5
|
+
import os.path
|
|
6
|
+
from .logger import ContextHandler
|
|
7
|
+
from .data import EXECUTOR_NAME
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(EXECUTOR_NAME)
|
|
10
|
+
|
|
11
|
+
def createContext(
|
|
12
|
+
mainframe: Mainframe, session_id: str, job_id: str, store, output, session_dir: str
|
|
13
|
+
) -> Context:
|
|
14
|
+
|
|
15
|
+
node_props = mainframe.notify_block_ready(session_id, job_id)
|
|
16
|
+
|
|
17
|
+
inputs_def: Dict[str, Dict] | None = node_props.get("inputs_def")
|
|
18
|
+
inputs = node_props.get("inputs")
|
|
19
|
+
|
|
20
|
+
if inputs_def is not None and inputs is not None:
|
|
21
|
+
|
|
22
|
+
inputs_def_handles: Dict[str, InputHandleDef] = {}
|
|
23
|
+
for k, v in inputs_def.items():
|
|
24
|
+
inputs_def_handles[k] = InputHandleDef(**v)
|
|
25
|
+
|
|
26
|
+
inputs = replace_secret(inputs, inputs_def_handles, node_props.get("inputs_def_patch"))
|
|
27
|
+
|
|
28
|
+
for k, v in inputs.items():
|
|
29
|
+
input_def = inputs_def_handles.get(k)
|
|
30
|
+
if input_def is None:
|
|
31
|
+
continue
|
|
32
|
+
if is_var_value(v):
|
|
33
|
+
wrap_var: VarValueDict = v
|
|
34
|
+
try:
|
|
35
|
+
ref = StoreKey(**wrap_var["value"])
|
|
36
|
+
except: # noqa: E722
|
|
37
|
+
logger.warning(f"not valid object ref: {wrap_var}")
|
|
38
|
+
continue
|
|
39
|
+
if ref in store:
|
|
40
|
+
inputs[k] = store.get(ref)
|
|
41
|
+
else:
|
|
42
|
+
logger.error(f"object {ref} not found in store")
|
|
43
|
+
elif is_bin_value(v):
|
|
44
|
+
wrap_bin: BinValueDict = v
|
|
45
|
+
path = wrap_bin["value"]
|
|
46
|
+
if isinstance(path, str):
|
|
47
|
+
# check file path v is exist
|
|
48
|
+
if not os.path.exists(path):
|
|
49
|
+
logger.error(f"file {path} for oomol/bin is not found")
|
|
50
|
+
continue
|
|
51
|
+
|
|
52
|
+
with open(path, "rb") as f:
|
|
53
|
+
inputs[k] = f.read()
|
|
54
|
+
else:
|
|
55
|
+
logger.error(f"not valid bin handle: {v}")
|
|
56
|
+
|
|
57
|
+
if inputs is None:
|
|
58
|
+
inputs = {}
|
|
59
|
+
|
|
60
|
+
blockInfo = BlockInfo(**node_props)
|
|
61
|
+
|
|
62
|
+
ctx = Context(inputs, blockInfo, mainframe, store, output, session_dir)
|
|
63
|
+
# 跟 executor 日志分开,避免有的库在 logger 里面使用 print,导致 hook 出现递归调用。
|
|
64
|
+
block_logger = logging.getLogger(f"block {job_id}")
|
|
65
|
+
ctx_handler = ContextHandler(ctx)
|
|
66
|
+
block_logger.addHandler(ctx_handler)
|
|
67
|
+
ctx._logger = logger
|
|
68
|
+
return ctx
|
python_executor/data.py
ADDED
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import os
|
|
5
|
+
import queue
|
|
6
|
+
import sys
|
|
7
|
+
import logging
|
|
8
|
+
from . import hook
|
|
9
|
+
from oocana import Mainframe, ServiceExecutePayload
|
|
10
|
+
from .utils import run_in_new_thread, run_async_code, oocana_dir
|
|
11
|
+
from .block import run_block
|
|
12
|
+
from oocana import EXECUTOR_NAME
|
|
13
|
+
from .matplot.oomol_matplot_helper import import_helper, add_matplot_module
|
|
14
|
+
from typing import Literal
|
|
15
|
+
from .topic import prepare_report_topic, service_config_topic, run_action_topic, ServiceTopicParams, ReportStatusPayload, exit_report_topic, status_report_topic
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(EXECUTOR_NAME)
|
|
18
|
+
service_store: dict[str, Literal["launching", "running"]] = {}
|
|
19
|
+
job_set = set()
|
|
20
|
+
|
|
21
|
+
# 日志目录 ~/.oocana/sessions/{session_id}
|
|
22
|
+
# executor 的日志都会记录在 [python-executor-{suffix}.log | python-executor.log]
|
|
23
|
+
# 全局 logger 会记录在 python-{suffix}.log | python.log
|
|
24
|
+
def config_logger(session_id: str, suffix: str | None, output: Literal["console", "file"]):
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
format = '%(asctime)s - %(levelname)s - {%(pathname)s:%(lineno)d} - %(message)s'
|
|
28
|
+
fmt = logging.Formatter(format)
|
|
29
|
+
logger.setLevel(logging.DEBUG)
|
|
30
|
+
if output == "file":
|
|
31
|
+
executor_dir = os.path.join(oocana_dir(), "sessions", session_id)
|
|
32
|
+
logger_file = os.path.join(executor_dir, f"python-executor-{suffix}.log") if suffix is not None else os.path.join(executor_dir, "python-executor.log")
|
|
33
|
+
|
|
34
|
+
if not os.path.exists(logger_file):
|
|
35
|
+
os.makedirs(os.path.dirname(logger_file), exist_ok=True)
|
|
36
|
+
|
|
37
|
+
print(f"setup logging in file {logger_file}")
|
|
38
|
+
h = logging.FileHandler(logger_file)
|
|
39
|
+
|
|
40
|
+
global_logger_file = os.path.join(executor_dir, f"python-{suffix}.log") if suffix is not None else os.path.join(executor_dir, "python.log")
|
|
41
|
+
logging.basicConfig(filename=global_logger_file, level=logging.DEBUG, format=format)
|
|
42
|
+
else:
|
|
43
|
+
logging.basicConfig(level=logging.DEBUG, format=format)
|
|
44
|
+
h = logging.StreamHandler(sys.stdout)
|
|
45
|
+
|
|
46
|
+
h.setFormatter(fmt)
|
|
47
|
+
logger.addHandler(h)
|
|
48
|
+
# 跟全局日志分开。避免有的库在全局 logger 里面使用了 print 等 API,导致 hook 出现递归调用
|
|
49
|
+
logger.propagate = False
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
async def run_executor(address: str, session_id: str, package: str | None, session_dir: str, suffix: str | None = None):
|
|
53
|
+
|
|
54
|
+
if suffix is not None:
|
|
55
|
+
mainframe = Mainframe(address, f"python-executor-{suffix}", logger)
|
|
56
|
+
else:
|
|
57
|
+
mainframe = Mainframe(address, f"python-executor-{session_id}", logger)
|
|
58
|
+
|
|
59
|
+
mainframe.connect()
|
|
60
|
+
|
|
61
|
+
print(f"connecting to broker {address} success")
|
|
62
|
+
sys.stdout.flush()
|
|
63
|
+
|
|
64
|
+
logger.info("executor start") if package is None else logger.info(f"executor start for package {package}")
|
|
65
|
+
|
|
66
|
+
add_matplot_module()
|
|
67
|
+
import_helper(logger)
|
|
68
|
+
|
|
69
|
+
# add package to sys.path
|
|
70
|
+
if package is not None:
|
|
71
|
+
sys.path.append(package)
|
|
72
|
+
elif os.path.exists("/app/workspace"):
|
|
73
|
+
sys.path.append("/app/workspace")
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def not_current_session(message):
|
|
77
|
+
return message.get("session_id") != session_id
|
|
78
|
+
|
|
79
|
+
def not_current_package(message):
|
|
80
|
+
return message.get("package") != package
|
|
81
|
+
|
|
82
|
+
# 目前的 mqtt 库,在 subscribe 回调里 publish 消息会导致死锁无法工作,参考 https://github.com/eclipse/paho.mqtt.python/issues/527 或者 https://stackoverflow.com/a/36964192/4770006
|
|
83
|
+
# 通过这种方式来绕过,所有需要 callback 后 publish message 的情况,都需要使用 future 类似方式来绕过。
|
|
84
|
+
fs = queue.Queue()
|
|
85
|
+
loop = asyncio.get_event_loop()
|
|
86
|
+
|
|
87
|
+
def execute_block(message):
|
|
88
|
+
if not_current_session(message):
|
|
89
|
+
return
|
|
90
|
+
|
|
91
|
+
if not_current_package(message):
|
|
92
|
+
return
|
|
93
|
+
|
|
94
|
+
# https://github.com/oomol/oocana-rust/issues/310 临时解决方案
|
|
95
|
+
job_id = message.get("job_id")
|
|
96
|
+
if job_id in job_set:
|
|
97
|
+
logger.warning(f"job {job_id} already running, ignore")
|
|
98
|
+
return
|
|
99
|
+
job_set.add(job_id)
|
|
100
|
+
|
|
101
|
+
nonlocal fs
|
|
102
|
+
f = loop.create_future()
|
|
103
|
+
fs.put(f)
|
|
104
|
+
f.set_result(message)
|
|
105
|
+
|
|
106
|
+
def execute_service_block(message):
|
|
107
|
+
if not_current_session(message):
|
|
108
|
+
return
|
|
109
|
+
|
|
110
|
+
if not_current_package(message):
|
|
111
|
+
return
|
|
112
|
+
|
|
113
|
+
nonlocal fs
|
|
114
|
+
f = loop.create_future()
|
|
115
|
+
fs.put(f)
|
|
116
|
+
f.set_result(message)
|
|
117
|
+
|
|
118
|
+
def service_exit(message: ReportStatusPayload):
|
|
119
|
+
service_hash = message.get("service_hash")
|
|
120
|
+
if service_hash in service_store:
|
|
121
|
+
del service_store[service_hash]
|
|
122
|
+
|
|
123
|
+
def service_status(message: ReportStatusPayload):
|
|
124
|
+
service_hash = message.get("service_hash")
|
|
125
|
+
if service_hash in service_store:
|
|
126
|
+
service_store[service_hash] = "running"
|
|
127
|
+
|
|
128
|
+
def report_message(message):
|
|
129
|
+
type = message.get("type")
|
|
130
|
+
if type == "SessionFinished":
|
|
131
|
+
if not_current_session(message):
|
|
132
|
+
return
|
|
133
|
+
logger.info(f"session {session_id} finished, exit executor")
|
|
134
|
+
mainframe.disconnect() # TODO: 即使调用 disconnect,在 broker 上也无法看不到主动断开的信息,有时间再调查。
|
|
135
|
+
if os.getenv("IS_FORKED"): # fork 进程无法直接使用 sys.exit 退出
|
|
136
|
+
os._exit(0)
|
|
137
|
+
else:
|
|
138
|
+
hook.original_exit(0)
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
mainframe.subscribe(f"executor/{EXECUTOR_NAME}/run_block", execute_block)
|
|
142
|
+
mainframe.subscribe(f"executor/{EXECUTOR_NAME}/run_service_block", execute_service_block)
|
|
143
|
+
mainframe.subscribe('report', report_message)
|
|
144
|
+
mainframe.subscribe(exit_report_topic(), service_exit)
|
|
145
|
+
mainframe.subscribe(status_report_topic(), service_status)
|
|
146
|
+
|
|
147
|
+
mainframe.notify_executor_ready(session_id, EXECUTOR_NAME, package)
|
|
148
|
+
|
|
149
|
+
async def spawn_service(message: ServiceExecutePayload, service_hash: str):
|
|
150
|
+
logger.info(f"create new service {message.get('dir')}")
|
|
151
|
+
service_store[service_hash] = "launching"
|
|
152
|
+
|
|
153
|
+
parent_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
|
154
|
+
|
|
155
|
+
is_global_service = message.get("service_executor").get("stop_at") in ["app_end", "never"]
|
|
156
|
+
|
|
157
|
+
if is_global_service:
|
|
158
|
+
process = await asyncio.create_subprocess_shell(
|
|
159
|
+
f"python -u -m python_executor.service --address {address} --service-hash {service_hash} --session-dir {session_dir}",
|
|
160
|
+
cwd=parent_dir
|
|
161
|
+
)
|
|
162
|
+
else:
|
|
163
|
+
process = await asyncio.create_subprocess_shell(
|
|
164
|
+
f"python -u -m python_executor.service --address {address} --session-id {session_id} --service-hash {service_hash} --session-dir {session_dir}",
|
|
165
|
+
cwd=parent_dir
|
|
166
|
+
)
|
|
167
|
+
params: ServiceTopicParams = {
|
|
168
|
+
"service_hash": service_hash,
|
|
169
|
+
"session_id": session_id
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
def send_service_config(params: ServiceTopicParams, message: ServiceExecutePayload):
|
|
173
|
+
|
|
174
|
+
async def run():
|
|
175
|
+
mainframe.publish(service_config_topic(params), message)
|
|
176
|
+
service_store[service_hash] = "running"
|
|
177
|
+
run_in_new_thread(run)
|
|
178
|
+
|
|
179
|
+
# FIXME: mqtt 不能在 subscribe 后立即 publish,需要修复。
|
|
180
|
+
mainframe.subscribe(prepare_report_topic(params), lambda _: send_service_config(params, message))
|
|
181
|
+
|
|
182
|
+
await process.wait()
|
|
183
|
+
logger.info(f"service {service_hash} exit")
|
|
184
|
+
del service_store[service_hash]
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def run_service_block(message: ServiceExecutePayload):
|
|
188
|
+
logger.info(f"service block {message.get('job_id')} start")
|
|
189
|
+
service_hash = message.get("service_hash")
|
|
190
|
+
params: ServiceTopicParams = {
|
|
191
|
+
"service_hash": service_hash,
|
|
192
|
+
"session_id": session_id
|
|
193
|
+
}
|
|
194
|
+
mainframe.publish(run_action_topic(params), message)
|
|
195
|
+
|
|
196
|
+
while True:
|
|
197
|
+
await asyncio.sleep(1)
|
|
198
|
+
if not fs.empty():
|
|
199
|
+
f = fs.get()
|
|
200
|
+
message = await f
|
|
201
|
+
if message.get("service_executor") is not None:
|
|
202
|
+
service_hash = message.get("service_hash")
|
|
203
|
+
status = service_store.get(service_hash)
|
|
204
|
+
if status is None:
|
|
205
|
+
asyncio.create_task(spawn_service(message, service_hash))
|
|
206
|
+
elif status == "running":
|
|
207
|
+
run_service_block(message)
|
|
208
|
+
elif status == "launching":
|
|
209
|
+
logger.info(f"service {service_hash} is launching, set message back to fs to wait next time")
|
|
210
|
+
fs.put(f)
|
|
211
|
+
else:
|
|
212
|
+
if not_current_session(message):
|
|
213
|
+
continue
|
|
214
|
+
run_block_in_new_thread(message, mainframe, session_dir=session_dir)
|
|
215
|
+
|
|
216
|
+
def run_block_in_new_thread(message, mainframe: Mainframe, session_dir: str):
|
|
217
|
+
|
|
218
|
+
async def run():
|
|
219
|
+
await run_block(message, mainframe, session_dir=session_dir)
|
|
220
|
+
run_in_new_thread(run)
|
|
221
|
+
|
|
222
|
+
def main():
|
|
223
|
+
|
|
224
|
+
import argparse
|
|
225
|
+
parser = argparse.ArgumentParser(description="run executor with address, session-id, tmp-dir")
|
|
226
|
+
parser.add_argument("--session-id", help="executor subscribe session id", required=True)
|
|
227
|
+
parser.add_argument("--address", help="mqtt address", default="mqtt://127.0.0.1:47688")
|
|
228
|
+
parser.add_argument("--session-dir", help="a tmp dir for whole session", required=True)
|
|
229
|
+
parser.add_argument("--output", help="output log to console or file", default="file", choices=["console", "file"])
|
|
230
|
+
parser.add_argument("--package", help="package path, if set, executor will only run same package block", default=None)
|
|
231
|
+
parser.add_argument("--suffix", help="suffix for log file", default=None)
|
|
232
|
+
|
|
233
|
+
args = parser.parse_args()
|
|
234
|
+
|
|
235
|
+
address: str = args.address
|
|
236
|
+
session_id: str = str(args.session_id)
|
|
237
|
+
output: Literal["console", "file"] = args.output
|
|
238
|
+
package: str | None = args.package
|
|
239
|
+
suffix: str | None = args.suffix
|
|
240
|
+
session_dir: str = args.session_dir
|
|
241
|
+
|
|
242
|
+
config_logger(session_id, suffix, output)
|
|
243
|
+
|
|
244
|
+
run_async_code(run_executor(address=address, session_id=session_id, package=package, session_dir=session_dir, suffix=suffix))
|
|
245
|
+
|
|
246
|
+
if __name__ == '__main__':
|
|
247
|
+
main()
|
python_executor/hook.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
from sys import exit
|
|
2
|
+
from builtins import exit as global_exit
|
|
3
|
+
from typing import TypeAlias, Any
|
|
4
|
+
import sys
|
|
5
|
+
import builtins
|
|
6
|
+
from .data import vars, EXECUTOR_NAME
|
|
7
|
+
import logging
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(EXECUTOR_NAME)
|
|
10
|
+
|
|
11
|
+
class ExitFunctionException(Exception):
|
|
12
|
+
pass
|
|
13
|
+
|
|
14
|
+
original_exit = exit
|
|
15
|
+
original_global_exit = global_exit
|
|
16
|
+
original_print = print
|
|
17
|
+
|
|
18
|
+
_ExitCode: TypeAlias = str | int | None
|
|
19
|
+
|
|
20
|
+
def sys_exit(status: _ExitCode = None) -> None:
|
|
21
|
+
raise ExitFunctionException(status)
|
|
22
|
+
|
|
23
|
+
def sys_global_exit(status: _ExitCode = None) -> None:
|
|
24
|
+
raise ExitFunctionException(status)
|
|
25
|
+
|
|
26
|
+
def global_print(*values: object, sep: str | None = " ", end: str | None = "\n", file: Any | None = None, flush: bool = False) -> None:
|
|
27
|
+
|
|
28
|
+
context = None # 初始化 context 变量
|
|
29
|
+
try:
|
|
30
|
+
context = vars.get()
|
|
31
|
+
except LookupError:
|
|
32
|
+
# 这个 logger 不会上报到 root handle 中,所以即使 root logger 的 Handler 里面有 print 函数,也不会导致递归调用
|
|
33
|
+
logger.warning("print called outside of block")
|
|
34
|
+
except Exception as e:
|
|
35
|
+
logger.error(f"print error: {e}")
|
|
36
|
+
|
|
37
|
+
if context is not None:
|
|
38
|
+
try:
|
|
39
|
+
msg_sep = sep or " "
|
|
40
|
+
msg = msg_sep.join(map(str, values))
|
|
41
|
+
context.report_log(msg)
|
|
42
|
+
except Exception as e:
|
|
43
|
+
logger.error(f"transform print message to context log error: {e}")
|
|
44
|
+
|
|
45
|
+
original_print(*values, sep=sep, end=end, file=file, flush=flush)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
sys.exit = sys_exit
|
|
49
|
+
builtins.exit = sys_global_exit
|
|
50
|
+
builtins.print = global_print
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from oocana import Context
|
|
3
|
+
import weakref
|
|
4
|
+
|
|
5
|
+
class ContextHandler(logging.Handler):
|
|
6
|
+
|
|
7
|
+
@property
|
|
8
|
+
def context(self):
|
|
9
|
+
return self._context
|
|
10
|
+
|
|
11
|
+
def __init__(self, context: Context):
|
|
12
|
+
super().__init__()
|
|
13
|
+
self._context = weakref.ref(context)
|
|
14
|
+
|
|
15
|
+
def emit(self, record):
|
|
16
|
+
msg = self.format(record)
|
|
17
|
+
ctx = self.context()
|
|
18
|
+
if ctx:
|
|
19
|
+
ctx.report_log(msg)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .oomol import show, FigureCanvas
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"""matplotlib.use('module://matplotlib_oomol'), remember to add this file to PYTHONPATH"""
|
|
2
|
+
|
|
3
|
+
from matplotlib.backend_bases import Gcf # type: ignore
|
|
4
|
+
from matplotlib.backends.backend_agg import FigureCanvasAgg # type: ignore
|
|
5
|
+
from python_executor.data import vars
|
|
6
|
+
|
|
7
|
+
FigureCanvas = FigureCanvasAgg
|
|
8
|
+
|
|
9
|
+
def show(*args, **kwargs):
|
|
10
|
+
import sys
|
|
11
|
+
from io import BytesIO
|
|
12
|
+
from base64 import b64encode
|
|
13
|
+
if vars is not None:
|
|
14
|
+
context = vars.get()
|
|
15
|
+
images = []
|
|
16
|
+
for figmanager in Gcf.get_all_fig_managers():
|
|
17
|
+
buffer = BytesIO()
|
|
18
|
+
figmanager.canvas.figure.savefig(buffer, format='png')
|
|
19
|
+
buffer.seek(0)
|
|
20
|
+
png = buffer.getvalue()
|
|
21
|
+
buffer.close()
|
|
22
|
+
base64Data = b64encode(png).decode('utf-8')
|
|
23
|
+
url = f'data:image/png;base64,{base64Data}'
|
|
24
|
+
images.append(url)
|
|
25
|
+
if images:
|
|
26
|
+
context.preview({ "type": "image", "data": images })
|
|
27
|
+
else:
|
|
28
|
+
print('matplotlib_oomol: no sys.modules["oomol"]', file=sys.stderr)
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
from python_executor.data import vars
|
|
2
|
+
|
|
3
|
+
def add_matplot_module():
|
|
4
|
+
import sys
|
|
5
|
+
import os.path
|
|
6
|
+
dir = os.path.dirname(os.path.abspath(__file__))
|
|
7
|
+
sys.path.insert(0, dir)
|
|
8
|
+
|
|
9
|
+
def import_helper(logger):
|
|
10
|
+
# matplotlib 的 use() 替换
|
|
11
|
+
try:
|
|
12
|
+
import matplotlib # type: ignore
|
|
13
|
+
matplotlib.use('module://matplotlib_oomol') # matplotlib_oomol.py 文件所在目录加入 PYTHONPATH
|
|
14
|
+
except:
|
|
15
|
+
logger.error("import matplotlib failed")
|
|
16
|
+
|
|
17
|
+
# matplotlib 主题替换
|
|
18
|
+
try:
|
|
19
|
+
import os
|
|
20
|
+
import matplotlib.pyplot as plt # type: ignore
|
|
21
|
+
plt.style.use("classic" if os.getenv("OOMOL_COLOR_SCHEME", "dark") == "light" else "dark_background")
|
|
22
|
+
plt.rcParams['font.sans-serif'] = ['Source Han Sans SC']
|
|
23
|
+
except:
|
|
24
|
+
pass
|
|
25
|
+
|
|
26
|
+
# plotly 的 show() 替换
|
|
27
|
+
try:
|
|
28
|
+
import os
|
|
29
|
+
import plotly.io as pio # type: ignore
|
|
30
|
+
from plotly.io import renderers # type: ignore
|
|
31
|
+
from plotly.io.base_renderers import ExternalRenderer # type: ignore
|
|
32
|
+
|
|
33
|
+
pio.templates.default = "plotly" if os.getenv("OOMOL_COLOR_SCHEME", "dark") == "light" else "plotly_dark"
|
|
34
|
+
|
|
35
|
+
class OomolRenderer(ExternalRenderer):
|
|
36
|
+
def render(self, fig_dict):
|
|
37
|
+
if vars:
|
|
38
|
+
context = vars.get()
|
|
39
|
+
|
|
40
|
+
import re
|
|
41
|
+
from plotly.io import to_html # type: ignore
|
|
42
|
+
from plotly.offline import get_plotlyjs_version # type: ignore
|
|
43
|
+
|
|
44
|
+
cdn_ver = get_plotlyjs_version()
|
|
45
|
+
cdn_url = f"https://cdn.jsdelivr.net/npm/plotly.js-dist-min@{cdn_ver}/plotly.min.js"
|
|
46
|
+
|
|
47
|
+
html = to_html(
|
|
48
|
+
fig_dict,
|
|
49
|
+
include_plotlyjs=cdn_url,
|
|
50
|
+
include_mathjax="cdn",
|
|
51
|
+
full_html=True,
|
|
52
|
+
default_width="100%",
|
|
53
|
+
default_height="100%",
|
|
54
|
+
validate=False,
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
color_scheme = os.getenv("OOMOL_COLOR_SCHEME", "dark")
|
|
58
|
+
# The generated html has default body margin 8px in chrome, remove it.
|
|
59
|
+
html = re.sub(r'<html[^>]*?>', r'\g<0><style>html { color-scheme: ' + color_scheme + '; height: 100%; align-content: center } ' +
|
|
60
|
+
'body { overflow: hidden; margin: 0 }</style>', html, flags=re.I)
|
|
61
|
+
context.preview({ "type": "html", "data": html })
|
|
62
|
+
else:
|
|
63
|
+
logger.warning('plotly: no sys.modules["oomol"]')
|
|
64
|
+
|
|
65
|
+
renderers['oomol'] = OomolRenderer()
|
|
66
|
+
renderers.default = 'oomol'
|
|
67
|
+
except:
|
|
68
|
+
logger.warning("import plotly failed")
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
from typing import Any, Dict
|
|
2
|
+
from oocana import InputHandleDef, FieldSchema, ObjectFieldSchema, ArrayFieldSchema, SecretFieldSchema
|
|
3
|
+
import logging
|
|
4
|
+
import os
|
|
5
|
+
import json
|
|
6
|
+
import re
|
|
7
|
+
from .data import EXECUTOR_NAME
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(EXECUTOR_NAME)
|
|
10
|
+
SECRET_FILE = os.path.expanduser("~") + "/app-config/oomol-secrets/secrets.json"
|
|
11
|
+
|
|
12
|
+
# "${{OO_SECRET:type,name,key}}",捕获组为 (type,name,key)
|
|
13
|
+
SECRET_REGEX = r"\"\$\{\{OO_SECRET:([\S]+),([\S]+),([\S]+)\}\}\""
|
|
14
|
+
|
|
15
|
+
def replace_secret(
|
|
16
|
+
value: Any,
|
|
17
|
+
root_def: Dict[str, InputHandleDef],
|
|
18
|
+
patch: Dict[str, Any] | None = None,
|
|
19
|
+
) -> Any:
|
|
20
|
+
if not isinstance(value, dict):
|
|
21
|
+
return value
|
|
22
|
+
|
|
23
|
+
try:
|
|
24
|
+
secretJson = json.load(open(SECRET_FILE))
|
|
25
|
+
except FileNotFoundError:
|
|
26
|
+
logger.warning(f"secret file {SECRET_FILE} not found")
|
|
27
|
+
secretJson = None
|
|
28
|
+
except json.JSONDecodeError:
|
|
29
|
+
logger.error(f"secret file {SECRET_FILE} is not a valid json file")
|
|
30
|
+
secretJson = None
|
|
31
|
+
|
|
32
|
+
string_value = json.dumps(value)
|
|
33
|
+
result = re.findall(SECRET_REGEX, string_value, re.MULTILINE)
|
|
34
|
+
if result:
|
|
35
|
+
if secretJson is None:
|
|
36
|
+
logger.error(f"secret file {SECRET_FILE} not found")
|
|
37
|
+
else:
|
|
38
|
+
for r in result:
|
|
39
|
+
secret = get_secret(f"{r[0]},{r[1]},{r[2]}", secretJson).replace('"', '\\"')
|
|
40
|
+
string_value = string_value.replace(f"${{{{OO_SECRET:{r[0]},{r[1]},{r[2]}}}}}", secret)
|
|
41
|
+
value = json.loads(string_value)
|
|
42
|
+
|
|
43
|
+
for k, v in value.items():
|
|
44
|
+
input_def = root_def.get(k)
|
|
45
|
+
if input_def is None:
|
|
46
|
+
continue
|
|
47
|
+
# 如果是 None 就不替换,直接透传。只有 nullable 的情况下才会出现 None
|
|
48
|
+
if input_def.is_secret_handle() and v is not None:
|
|
49
|
+
value[k] = get_secret(v, secretJson)
|
|
50
|
+
# 为了保持功能聚焦,var 部分在 Context 那边重新迭代处理。var 只在根目录,同时重复迭代开销不大。如果要递归,还是要合并进来。
|
|
51
|
+
elif isinstance(input_def.json_schema, ObjectFieldSchema) or isinstance(input_def.json_schema, ArrayFieldSchema):
|
|
52
|
+
replace_sub_secret(v, input_def.json_schema, secretJson)
|
|
53
|
+
|
|
54
|
+
if patch is not None:
|
|
55
|
+
patch_def = patch # 避免下面的类型提示错误
|
|
56
|
+
for k, v in patch_def.items():
|
|
57
|
+
input_value = value.get(k)
|
|
58
|
+
if input_value is not None:
|
|
59
|
+
for patch in v:
|
|
60
|
+
is_secret = patch_def.get("schema", {}).get("contentMediaType") == "oomol/secret"
|
|
61
|
+
if not is_secret:
|
|
62
|
+
continue
|
|
63
|
+
|
|
64
|
+
path = patch_def.get("path")
|
|
65
|
+
if path is None:
|
|
66
|
+
# 整个替换,所以得是 value 而不是 input_value
|
|
67
|
+
value[k] = get_secret(input_value, secretJson)
|
|
68
|
+
elif isinstance(path, str) and path in input_value:
|
|
69
|
+
input_value[path] = get_secret(input_value[path], secretJson)
|
|
70
|
+
elif isinstance(path, int) and path < len(input_value):
|
|
71
|
+
input_value[path] = get_secret(input_value[path], secretJson)
|
|
72
|
+
elif isinstance(path, list):
|
|
73
|
+
tmp = input_value
|
|
74
|
+
for p in path[:-1]:
|
|
75
|
+
tmp = tmp[p]
|
|
76
|
+
if tmp is None:
|
|
77
|
+
logger.error(f"invalid path: {path}")
|
|
78
|
+
break
|
|
79
|
+
if tmp is not None:
|
|
80
|
+
tmp[path[-1]] = get_secret(tmp[path[-1]], secretJson)
|
|
81
|
+
else:
|
|
82
|
+
logger.error(f"invalid path: {path}")
|
|
83
|
+
|
|
84
|
+
return value
|
|
85
|
+
|
|
86
|
+
def replace_sub_secret(value: Any, field: FieldSchema, secretJson: Dict[str, Any] | None) -> Any:
|
|
87
|
+
if isinstance(value, dict) and isinstance(field, ObjectFieldSchema):
|
|
88
|
+
if field.properties is None:
|
|
89
|
+
return value
|
|
90
|
+
|
|
91
|
+
for k, v in value.items():
|
|
92
|
+
schema = field.properties.get(k)
|
|
93
|
+
if schema is None:
|
|
94
|
+
continue
|
|
95
|
+
if isinstance(schema, SecretFieldSchema):
|
|
96
|
+
value[k] = get_secret(v, secretJson)
|
|
97
|
+
elif isinstance(schema, ObjectFieldSchema) or isinstance(schema, ArrayFieldSchema):
|
|
98
|
+
value[k] = replace_sub_secret(v, schema, secretJson)
|
|
99
|
+
elif isinstance(value, list) and isinstance(field, ArrayFieldSchema):
|
|
100
|
+
if field.items is None:
|
|
101
|
+
return value
|
|
102
|
+
|
|
103
|
+
for i, v in enumerate(value):
|
|
104
|
+
schema = field.items
|
|
105
|
+
if isinstance(schema, SecretFieldSchema):
|
|
106
|
+
value[i] = get_secret(v, secretJson)
|
|
107
|
+
elif isinstance(schema, ObjectFieldSchema) or isinstance(schema, ArrayFieldSchema):
|
|
108
|
+
value[i] = replace_sub_secret(v, schema, secretJson)
|
|
109
|
+
|
|
110
|
+
return value
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def get_secret(path: str, secretJson: dict | None) -> str:
|
|
114
|
+
if secretJson is None:
|
|
115
|
+
# throw error
|
|
116
|
+
logger.error(f"secret file {SECRET_FILE} not found")
|
|
117
|
+
raise ValueError("secret file not found or invalid json file")
|
|
118
|
+
|
|
119
|
+
assert isinstance(secretJson, dict)
|
|
120
|
+
|
|
121
|
+
try:
|
|
122
|
+
[secretType, secretName, secretKey] = path.split(",")
|
|
123
|
+
except ValueError:
|
|
124
|
+
logger.error(f"invalid secret path: {path}")
|
|
125
|
+
return path
|
|
126
|
+
|
|
127
|
+
s = secretJson.get(secretName)
|
|
128
|
+
|
|
129
|
+
if s is None:
|
|
130
|
+
logger.error(f"secret {secretName} not found in {SECRET_FILE}")
|
|
131
|
+
return path
|
|
132
|
+
|
|
133
|
+
if s.get("secretType") != secretType:
|
|
134
|
+
logger.warning(f"secret type mismatch: {s.get('secretType')} != {secretType}")
|
|
135
|
+
|
|
136
|
+
secrets: list[Any] = s.get("secrets")
|
|
137
|
+
if secrets:
|
|
138
|
+
for secret in secrets:
|
|
139
|
+
if secret.get("secretKey") == secretKey:
|
|
140
|
+
return secret.get("value")
|
|
141
|
+
else:
|
|
142
|
+
logger.error(f"secret {secretName} has no value")
|
|
143
|
+
return path
|
|
144
|
+
|
|
145
|
+
logger.error(f"secret {secretKey} not found in {secretName}")
|
|
146
|
+
return path
|
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
from typing import Callable, Any
|
|
2
|
+
from oocana import ServiceExecutePayload, Mainframe, StopAtOption, ServiceContextAbstractClass, ServiceMessage, BlockHandler
|
|
3
|
+
from .block import output_return_object, load_module
|
|
4
|
+
from .context import createContext
|
|
5
|
+
from .utils import run_async_code_and_loop, loop_in_new_thread, run_in_new_thread, oocana_dir
|
|
6
|
+
from .topic import service_config_topic, ServiceTopicParams, ReportStatusPayload, prepare_report_topic, shutdown_action_topic, run_action_topic, service_message_topic, exit_report_topic, status_report_topic,global_shutdown_topic
|
|
7
|
+
from threading import Timer
|
|
8
|
+
import inspect
|
|
9
|
+
import asyncio
|
|
10
|
+
import logging
|
|
11
|
+
import os
|
|
12
|
+
import threading
|
|
13
|
+
|
|
14
|
+
DEFAULT_BLOCK_ALIVE_TIME = 10
|
|
15
|
+
|
|
16
|
+
# 两种文件,根据是否有 session id 来区分:
|
|
17
|
+
# 1. session service: ~/.oocana/sessions/{session_id}/python-{service_hash}.log
|
|
18
|
+
# 2. 跨 session service(global service): ~/.oocana/services/python-{service_hash}.log
|
|
19
|
+
def config_logger(service_hash: str, session_id: str | None):
|
|
20
|
+
import os.path
|
|
21
|
+
logger_file = os.path.join(oocana_dir(), "services", "python-" + service_hash + ".log") if session_id is None else os.path.join(oocana_dir(), "sessions", session_id, "python-" + service_hash + ".log")
|
|
22
|
+
|
|
23
|
+
if not os.path.exists(logger_file):
|
|
24
|
+
os.makedirs(os.path.dirname(logger_file), exist_ok=True)
|
|
25
|
+
|
|
26
|
+
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - {%(filename)s:%(lineno)d} - %(message)s', filename=logger_file)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class ServiceRuntime(ServiceContextAbstractClass):
|
|
30
|
+
|
|
31
|
+
_store = {}
|
|
32
|
+
_config: ServiceExecutePayload
|
|
33
|
+
_mainframe: Mainframe
|
|
34
|
+
_service_hash: str
|
|
35
|
+
_session_id: str | None = None
|
|
36
|
+
_timer: Timer | None = None
|
|
37
|
+
_stop_at: StopAtOption
|
|
38
|
+
_keep_alive: int | None = None
|
|
39
|
+
_registered = threading.Event()
|
|
40
|
+
_waiting_ready_notify = False
|
|
41
|
+
_session_dir: str
|
|
42
|
+
_topic_params: ServiceTopicParams
|
|
43
|
+
_report_timer: Timer | None = None
|
|
44
|
+
|
|
45
|
+
_runningBlocks = set()
|
|
46
|
+
_jobs = set()
|
|
47
|
+
|
|
48
|
+
def __init__(self, config: ServiceExecutePayload, mainframe: Mainframe, service_hash: str, session_dir: str, session_id: str | None = None):
|
|
49
|
+
self._block_handler = dict()
|
|
50
|
+
self._config = config
|
|
51
|
+
self._mainframe = mainframe
|
|
52
|
+
self._service_hash = service_hash
|
|
53
|
+
self._session_id = session_id
|
|
54
|
+
self._session_dir = session_dir
|
|
55
|
+
self._topic_params = {
|
|
56
|
+
"service_hash": service_hash,
|
|
57
|
+
"session_id": session_id
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
self._stop_at = config.get("service_executor").get("stop_at") if config.get("service_executor") is not None and config.get("service_executor").get("stop_at") is not None else "session_end"
|
|
61
|
+
self._keep_alive = config.get("service_executor").get("keep_alive") if config.get("service_executor") is not None else None
|
|
62
|
+
|
|
63
|
+
mainframe.subscribe(run_action_topic(self._topic_params), self.run_action_callback)
|
|
64
|
+
mainframe.subscribe(shutdown_action_topic(self._topic_params), self.shutdown_callback)
|
|
65
|
+
mainframe.subscribe(global_shutdown_topic, self.shutdown_callback)
|
|
66
|
+
|
|
67
|
+
self._setup_timer()
|
|
68
|
+
self.report_status()
|
|
69
|
+
|
|
70
|
+
# post message every 5 seconds
|
|
71
|
+
def report_status(self):
|
|
72
|
+
payload: ReportStatusPayload = {
|
|
73
|
+
"service_hash": self._service_hash,
|
|
74
|
+
"session_id": self._session_id,
|
|
75
|
+
"executor": "python"
|
|
76
|
+
}
|
|
77
|
+
self._mainframe.publish(status_report_topic(), payload)
|
|
78
|
+
self._report_timer = Timer(5, self.report_status)
|
|
79
|
+
self._report_timer.start()
|
|
80
|
+
|
|
81
|
+
# 不能直接在 callback 里面调用 mainframe publish 所以 callback 都要单独开
|
|
82
|
+
def shutdown_callback(self, payload: Any):
|
|
83
|
+
|
|
84
|
+
async def shutdown():
|
|
85
|
+
self.exit()
|
|
86
|
+
|
|
87
|
+
run_in_new_thread(shutdown)
|
|
88
|
+
|
|
89
|
+
def run_action_callback(self, payload: ServiceExecutePayload):
|
|
90
|
+
|
|
91
|
+
async def run():
|
|
92
|
+
await self.run_block(payload)
|
|
93
|
+
|
|
94
|
+
run_in_new_thread(run)
|
|
95
|
+
|
|
96
|
+
def _setup_timer(self):
|
|
97
|
+
if self._stop_at is None:
|
|
98
|
+
return
|
|
99
|
+
elif self._stop_at == "session_end":
|
|
100
|
+
self._mainframe.subscribe("report", lambda payload: self.exit() if payload.get("type") == "SessionFinished" and payload.get("session_id") == self._config.get("session_id") else None)
|
|
101
|
+
elif self._stop_at == "app_end":
|
|
102
|
+
# app 暂停可以直接先不管
|
|
103
|
+
pass
|
|
104
|
+
elif self._stop_at == "block_end":
|
|
105
|
+
pass
|
|
106
|
+
|
|
107
|
+
def __setitem__(self, key: str, value: Any):
|
|
108
|
+
if key == "block_handler":
|
|
109
|
+
self.block_handler = value
|
|
110
|
+
|
|
111
|
+
@property
|
|
112
|
+
def waiting_ready_notify(self) -> bool:
|
|
113
|
+
return self._waiting_ready_notify
|
|
114
|
+
|
|
115
|
+
@waiting_ready_notify.setter
|
|
116
|
+
def waiting_ready_notify(self, value: bool):
|
|
117
|
+
self._waiting_ready_notify = value
|
|
118
|
+
|
|
119
|
+
@property
|
|
120
|
+
def block_handler(self) -> BlockHandler:
|
|
121
|
+
return self._block_handler
|
|
122
|
+
|
|
123
|
+
@block_handler.setter
|
|
124
|
+
def block_handler(self, value: BlockHandler):
|
|
125
|
+
self._block_handler = value
|
|
126
|
+
if not self.waiting_ready_notify:
|
|
127
|
+
self._registered.set()
|
|
128
|
+
|
|
129
|
+
def notify_ready(self):
|
|
130
|
+
self._registered.set()
|
|
131
|
+
|
|
132
|
+
def add_message_callback(self, callback: Callable[[ServiceMessage], Any]):
|
|
133
|
+
def filter(payload):
|
|
134
|
+
if payload.get("job_id") in self._jobs:
|
|
135
|
+
callback(payload)
|
|
136
|
+
self._mainframe.subscribe(service_message_topic, filter)
|
|
137
|
+
|
|
138
|
+
async def run(self):
|
|
139
|
+
service_config = self._config.get("service_executor")
|
|
140
|
+
m = load_module(service_config.get("entry"), self._config.get("dir"))
|
|
141
|
+
fn = m.__dict__.get(service_config.get("function"))
|
|
142
|
+
if not callable(fn):
|
|
143
|
+
raise Exception(f"function {service_config.get('function')} not found in {service_config.get('entry')}")
|
|
144
|
+
|
|
145
|
+
if inspect.iscoroutinefunction(fn):
|
|
146
|
+
async def async_run():
|
|
147
|
+
await fn(self)
|
|
148
|
+
run_in_new_thread(async_run)
|
|
149
|
+
else:
|
|
150
|
+
def run():
|
|
151
|
+
fn(self)
|
|
152
|
+
import threading
|
|
153
|
+
threading.Thread(target=run).start()
|
|
154
|
+
|
|
155
|
+
await self.run_block(self._config)
|
|
156
|
+
|
|
157
|
+
def exit(self):
|
|
158
|
+
|
|
159
|
+
payload: ReportStatusPayload = {
|
|
160
|
+
"service_hash": self._service_hash,
|
|
161
|
+
"session_id": self._session_id,
|
|
162
|
+
"executor": "python"
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
self._mainframe.publish(exit_report_topic(), payload)
|
|
166
|
+
self._mainframe.disconnect()
|
|
167
|
+
# child process need call os._exit not sys.exit
|
|
168
|
+
os._exit(0)
|
|
169
|
+
|
|
170
|
+
async def run_block(self, payload: ServiceExecutePayload):
|
|
171
|
+
self._registered.wait()
|
|
172
|
+
block_name = payload["block_name"]
|
|
173
|
+
job_id = payload["job_id"]
|
|
174
|
+
|
|
175
|
+
if self._timer is not None:
|
|
176
|
+
self._timer.cancel()
|
|
177
|
+
self._timer = None
|
|
178
|
+
|
|
179
|
+
self._runningBlocks.add(job_id)
|
|
180
|
+
self._jobs.add(job_id)
|
|
181
|
+
|
|
182
|
+
context = createContext(self._mainframe, payload["session_id"], payload["job_id"], self._store, payload["outputs"], self._session_dir)
|
|
183
|
+
|
|
184
|
+
if isinstance(self.block_handler, dict):
|
|
185
|
+
handler = self.block_handler.get(block_name)
|
|
186
|
+
if handler is None:
|
|
187
|
+
raise Exception(f"block {block_name} not found")
|
|
188
|
+
result = handler(context.inputs, context)
|
|
189
|
+
elif callable(self.block_handler):
|
|
190
|
+
handler = self.block_handler
|
|
191
|
+
result = handler(block_name, context.inputs, context)
|
|
192
|
+
else:
|
|
193
|
+
raise Exception("blockHandler must be a dict or a callable function")
|
|
194
|
+
output_return_object(result, context)
|
|
195
|
+
|
|
196
|
+
self._runningBlocks.remove(job_id)
|
|
197
|
+
|
|
198
|
+
if self._stop_at == "block_end" and len(self._runningBlocks) == 0:
|
|
199
|
+
self._timer = Timer(self._keep_alive or DEFAULT_BLOCK_ALIVE_TIME, self.exit)
|
|
200
|
+
self._timer.start()
|
|
201
|
+
|
|
202
|
+
def setup_service(payload: Any, mainframe: Mainframe, service_id: str, session_dir: str):
|
|
203
|
+
service = ServiceRuntime(payload, mainframe, service_id, session_dir)
|
|
204
|
+
|
|
205
|
+
async def run():
|
|
206
|
+
await service.run()
|
|
207
|
+
loop_in_new_thread(run)
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
async def run_service(address: str, service_hash: str, session_id: str | None, session_dir: str):
|
|
211
|
+
mainframe = Mainframe(address, service_hash)
|
|
212
|
+
mainframe.connect()
|
|
213
|
+
|
|
214
|
+
params: ServiceTopicParams = {
|
|
215
|
+
"session_id": session_id,
|
|
216
|
+
"service_hash": service_hash
|
|
217
|
+
}
|
|
218
|
+
mainframe.subscribe(service_config_topic(params), lambda payload: setup_service(payload, mainframe, service_hash, session_dir))
|
|
219
|
+
await asyncio.sleep(1)
|
|
220
|
+
mainframe.publish(prepare_report_topic(params), {})
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
if __name__ == "__main__":
|
|
224
|
+
import argparse
|
|
225
|
+
parser = argparse.ArgumentParser(description="run service")
|
|
226
|
+
parser.add_argument("--address", help="mqtt address", required=True)
|
|
227
|
+
parser.add_argument("--service-hash", help="service hash", required=True)
|
|
228
|
+
parser.add_argument("--session-id", help="session id")
|
|
229
|
+
parser.add_argument("--session-dir", required=True)
|
|
230
|
+
args = parser.parse_args()
|
|
231
|
+
|
|
232
|
+
address: str = args.address
|
|
233
|
+
service_hash: str = args.service_hash
|
|
234
|
+
session_id: str | None = args.session_id
|
|
235
|
+
session_dir: str = args.session_dir
|
|
236
|
+
|
|
237
|
+
config_logger(service_hash, session_id)
|
|
238
|
+
run_async_code_and_loop(run_service(address, service_hash, session_id, session_dir))
|
python_executor/topic.py
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from typing import Optional, TypedDict
|
|
2
|
+
|
|
3
|
+
status_topic = "service/report/status"
|
|
4
|
+
exit_topic = "service/report/exit"
|
|
5
|
+
global_shutdown_topic = "service/global/action/shutdown"
|
|
6
|
+
|
|
7
|
+
class ReportStatusPayload(TypedDict):
|
|
8
|
+
service_hash: str
|
|
9
|
+
session_id: Optional[str]
|
|
10
|
+
executor: str
|
|
11
|
+
|
|
12
|
+
# prefix
|
|
13
|
+
global_service_topic_prefix = "service/python"
|
|
14
|
+
service_topic_prefix = "/{session_id}/service/python"
|
|
15
|
+
|
|
16
|
+
# action suffix
|
|
17
|
+
service_config_suffix = "action/config"
|
|
18
|
+
service_run_suffix = "action/run"
|
|
19
|
+
service_ping_suffix = "action/ping"
|
|
20
|
+
service_shutdown_suffix = "action/shutdown"
|
|
21
|
+
|
|
22
|
+
# report suffix
|
|
23
|
+
service_prepare_suffix = "report/prepare"
|
|
24
|
+
service_pong_suffix = "report/pong"
|
|
25
|
+
|
|
26
|
+
# topic = {prefix}/{service_hash}/{suffix}
|
|
27
|
+
|
|
28
|
+
def prefix(session_id: Optional[str] = None) -> str:
|
|
29
|
+
if session_id:
|
|
30
|
+
return service_topic_prefix.replace("{session_id}", session_id)
|
|
31
|
+
else:
|
|
32
|
+
return global_service_topic_prefix
|
|
33
|
+
|
|
34
|
+
class ServiceTopicParams(TypedDict):
|
|
35
|
+
session_id: Optional[str]
|
|
36
|
+
service_hash: str
|
|
37
|
+
|
|
38
|
+
def run_action_topic(params: ServiceTopicParams) -> str:
|
|
39
|
+
return "/".join([prefix(params.get('session_id')), params['service_hash'], service_run_suffix])
|
|
40
|
+
|
|
41
|
+
def ping_action_topic(params: ServiceTopicParams) -> str:
|
|
42
|
+
return "/".join([prefix(params.get('session_id')), params['service_hash'], service_ping_suffix])
|
|
43
|
+
|
|
44
|
+
def shutdown_action_topic(params: ServiceTopicParams) -> str:
|
|
45
|
+
return "/".join([prefix(params.get('session_id')), params['service_hash'], service_shutdown_suffix])
|
|
46
|
+
|
|
47
|
+
# TODO: 修改 oomol 端实现,使其为固定的 topic,不需要 session_id
|
|
48
|
+
service_message_topic = "/service/message"
|
|
49
|
+
|
|
50
|
+
def prepare_report_topic(params: ServiceTopicParams) -> str:
|
|
51
|
+
return "/".join([prefix(params.get('session_id')), params['service_hash'], service_prepare_suffix])
|
|
52
|
+
|
|
53
|
+
def pong_report_topic(params: ServiceTopicParams) -> str:
|
|
54
|
+
return "/".join([prefix(params.get('session_id')), params['service_hash'], service_pong_suffix])
|
|
55
|
+
|
|
56
|
+
def exit_report_topic() -> str:
|
|
57
|
+
return exit_topic
|
|
58
|
+
|
|
59
|
+
def service_config_topic(params: ServiceTopicParams) -> str:
|
|
60
|
+
return "/".join([prefix(params.get('session_id')), params['service_hash'], service_config_suffix])
|
|
61
|
+
|
|
62
|
+
def status_report_topic() -> str:
|
|
63
|
+
return status_topic
|
python_executor/utils.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
import asyncio
|
|
3
|
+
from typing import Callable, Awaitable, Any
|
|
4
|
+
|
|
5
|
+
def run_async_code(async_func):
|
|
6
|
+
loop = asyncio.new_event_loop()
|
|
7
|
+
asyncio.set_event_loop(loop)
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
loop.run_until_complete(async_func)
|
|
11
|
+
finally:
|
|
12
|
+
loop.close()
|
|
13
|
+
|
|
14
|
+
def run_async_code_and_loop(async_func):
|
|
15
|
+
loop = asyncio.new_event_loop()
|
|
16
|
+
asyncio.set_event_loop(loop)
|
|
17
|
+
|
|
18
|
+
try:
|
|
19
|
+
loop.run_until_complete(async_func)
|
|
20
|
+
loop.run_forever()
|
|
21
|
+
finally:
|
|
22
|
+
loop.close()
|
|
23
|
+
|
|
24
|
+
def run_in_new_thread(async_func: Callable[[], Awaitable[Any]]):
|
|
25
|
+
threading.Thread(target=run_async_code, args=(async_func(),)).start()
|
|
26
|
+
|
|
27
|
+
def loop_in_new_thread(async_func: Callable[[], Awaitable[Any]]):
|
|
28
|
+
threading.Thread(target=run_async_code_and_loop, args=(async_func(),)).start()
|
|
29
|
+
|
|
30
|
+
def oocana_dir() -> str:
|
|
31
|
+
from os.path import expanduser, join
|
|
32
|
+
return join(expanduser("~"), ".oocana")
|