canvas 0.2.11__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of canvas might be problematic. Click here for more details.
- {canvas-0.2.11.dist-info → canvas-0.3.1.dist-info}/METADATA +3 -3
- canvas-0.3.1.dist-info/RECORD +216 -0
- canvas_cli/apps/emit/__init__.py +3 -0
- canvas_cli/apps/emit/emit.py +67 -0
- canvas_cli/apps/emit/event_fixtures/ALLERGY_INTOLERANCE_CREATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/ALLERGY_INTOLERANCE_UPDATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/APPOINTMENT_CANCELED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/APPOINTMENT_CHECKED_IN.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/APPOINTMENT_CREATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/APPOINTMENT_NO_SHOWED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/APPOINTMENT_RESCHEDULED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/APPOINTMENT_RESTORED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/APPOINTMENT_UPDATED.ndjson +2 -0
- canvas_cli/apps/emit/event_fixtures/ASSESS_COMMAND__CONDITION_SELECTED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/ASSESS_COMMAND__POST_COMMIT.ndjson +3 -0
- canvas_cli/apps/emit/event_fixtures/ASSESS_COMMAND__POST_ORIGINATE.ndjson +4 -0
- canvas_cli/apps/emit/event_fixtures/ASSESS_COMMAND__POST_UPDATE.ndjson +5 -0
- canvas_cli/apps/emit/event_fixtures/ASSESS_COMMAND__PRE_COMMIT.ndjson +3 -0
- canvas_cli/apps/emit/event_fixtures/ASSESS_COMMAND__PRE_ORIGINATE.ndjson +4 -0
- canvas_cli/apps/emit/event_fixtures/ASSESS_COMMAND__PRE_UPDATE.ndjson +5 -0
- canvas_cli/apps/emit/event_fixtures/BILLING_LINE_ITEM_CREATED.ndjson +3 -0
- canvas_cli/apps/emit/event_fixtures/BILLING_LINE_ITEM_UPDATED.ndjson +2 -0
- canvas_cli/apps/emit/event_fixtures/CONDITION_ASSESSED.ndjson +2 -0
- canvas_cli/apps/emit/event_fixtures/CONDITION_CREATED.ndjson +4 -0
- canvas_cli/apps/emit/event_fixtures/CONDITION_UPDATED.ndjson +5 -0
- canvas_cli/apps/emit/event_fixtures/CRON.ndjson +3 -0
- canvas_cli/apps/emit/event_fixtures/ENCOUNTER_CREATED.ndjson +3 -0
- canvas_cli/apps/emit/event_fixtures/ENCOUNTER_UPDATED.ndjson +2 -0
- canvas_cli/apps/emit/event_fixtures/IMMUNIZATION_CREATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/IMMUNIZATION_STATEMENT_CREATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/IMMUNIZATION_STATEMENT_UPDATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/IMMUNIZATION_UPDATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/INTERVIEW_CREATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/INTERVIEW_UPDATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/LAB_ORDER_CREATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/LAB_ORDER_UPDATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/MEDICATION_LIST_ITEM_CREATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/MEDICATION_LIST_ITEM_UPDATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/MEDICATION_STATEMENT_COMMAND__POST_COMMIT.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/MEDICATION_STATEMENT_COMMAND__POST_ORIGINATE.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/MEDICATION_STATEMENT_COMMAND__POST_UPDATE.ndjson +2 -0
- canvas_cli/apps/emit/event_fixtures/MEDICATION_STATEMENT_COMMAND__PRE_COMMIT.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/MEDICATION_STATEMENT_COMMAND__PRE_ORIGINATE.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/MEDICATION_STATEMENT_COMMAND__PRE_UPDATE.ndjson +2 -0
- canvas_cli/apps/emit/event_fixtures/MEDICATION_STATEMENT__MEDICATION__POST_SEARCH.ndjson +2 -0
- canvas_cli/apps/emit/event_fixtures/PATIENT_CREATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/PATIENT_UPDATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/PLAN_COMMAND__POST_ORIGINATE.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/PLAN_COMMAND__PRE_ORIGINATE.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/QUESTIONNAIRE_COMMAND__POST_COMMIT.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/QUESTIONNAIRE_COMMAND__POST_ORIGINATE.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/QUESTIONNAIRE_COMMAND__POST_UPDATE.ndjson +2 -0
- canvas_cli/apps/emit/event_fixtures/QUESTIONNAIRE_COMMAND__PRE_COMMIT.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/QUESTIONNAIRE_COMMAND__PRE_ORIGINATE.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/QUESTIONNAIRE_COMMAND__PRE_UPDATE.ndjson +2 -0
- canvas_cli/apps/emit/event_fixtures/QUESTIONNAIRE__QUESTIONNAIRE__POST_SEARCH.ndjson +4 -0
- canvas_cli/apps/emit/event_fixtures/TASK_COMMENT_CREATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/TASK_CREATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/TASK_UPDATED.ndjson +1 -0
- canvas_cli/apps/emit/event_fixtures/VITAL_SIGN_CREATED.ndjson +14 -0
- canvas_cli/apps/emit/event_fixtures/VITAL_SIGN_UPDATED.ndjson +364 -0
- canvas_cli/apps/plugin/plugin.py +56 -23
- canvas_cli/apps/run_plugins/__init__.py +3 -0
- canvas_cli/apps/run_plugins/run_plugins.py +16 -0
- canvas_cli/main.py +8 -38
- canvas_cli/templates/plugins/default/{{ cookiecutter.__project_slug }}/README.md +0 -1
- canvas_cli/utils/validators/validators.py +1 -1
- canvas_generated/messages/effects_pb2.py +5 -5
- canvas_generated/messages/effects_pb2.pyi +4 -2
- canvas_generated/messages/events_pb2.py +3 -3
- canvas_generated/messages/events_pb2.pyi +70 -0
- canvas_generated/messages/plugins_pb2.py +1 -1
- canvas_generated/services/plugin_runner_pb2.py +1 -1
- canvas_sdk/base.py +2 -2
- canvas_sdk/commands/__init__.py +3 -1
- canvas_sdk/commands/base.py +2 -2
- canvas_sdk/commands/commands/allergy.py +2 -0
- canvas_sdk/commands/commands/close_goal.py +3 -0
- canvas_sdk/commands/commands/prescribe.py +1 -3
- canvas_sdk/commands/commands/refill.py +1 -0
- canvas_sdk/commands/commands/task.py +1 -1
- canvas_sdk/commands/commands/vitals.py +15 -12
- canvas_sdk/commands/tests/schema/tests.py +1 -1
- canvas_sdk/commands/tests/test_utils.py +4 -2
- canvas_sdk/data/base.py +5 -1
- canvas_sdk/data/client.py +1 -1
- canvas_sdk/data/patient.py +2 -0
- canvas_sdk/data/staff.py +2 -0
- canvas_sdk/data/task.py +7 -0
- canvas_sdk/effects/protocol_card/protocol_card.py +2 -0
- canvas_sdk/effects/protocol_card/tests.py +5 -2
- canvas_sdk/protocols/__init__.py +1 -0
- canvas_sdk/protocols/clinical_quality_measure.py +1 -0
- canvas_sdk/utils/http.py +19 -18
- canvas_sdk/utils/stats.py +2 -1
- canvas_sdk/v1/data/allergy_intolerance.py +2 -3
- canvas_sdk/v1/data/base.py +55 -9
- canvas_sdk/v1/data/command.py +27 -0
- canvas_sdk/v1/data/device.py +44 -0
- canvas_sdk/v1/data/lab.py +8 -0
- canvas_sdk/v1/data/observation.py +117 -0
- canvas_sdk/v1/data/patient.py +3 -0
- canvas_sdk/v1/data/questionnaire.py +204 -0
- canvas_sdk/v1/data/user.py +2 -0
- canvas_sdk/value_set/v2022/condition.py +2 -2
- canvas_sdk/value_set/v2022/encounter.py +1 -1
- canvas_sdk/value_set/value_set.py +20 -5
- logger/__init__.py +1 -0
- logger/logger.py +17 -6
- plugin_runner/__init__.py +0 -0
- plugin_runner/authentication.py +48 -0
- plugin_runner/plugin_runner.py +338 -0
- plugin_runner/plugin_synchronizer.py +87 -0
- plugin_runner/sandbox.py +273 -0
- pubsub/__init__.py +0 -0
- pubsub/pubsub.py +38 -0
- canvas-0.2.11.dist-info/RECORD +0 -144
- {canvas-0.2.11.dist-info → canvas-0.3.1.dist-info}/WHEEL +0 -0
- {canvas-0.2.11.dist-info → canvas-0.3.1.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,338 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import importlib.util
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import pathlib
|
|
6
|
+
import signal
|
|
7
|
+
import sys
|
|
8
|
+
import time
|
|
9
|
+
import traceback
|
|
10
|
+
from collections import defaultdict
|
|
11
|
+
from types import FrameType
|
|
12
|
+
from typing import Any, Optional
|
|
13
|
+
|
|
14
|
+
import grpc
|
|
15
|
+
import statsd
|
|
16
|
+
|
|
17
|
+
from canvas_generated.messages.plugins_pb2 import (
|
|
18
|
+
ReloadPluginsRequest,
|
|
19
|
+
ReloadPluginsResponse,
|
|
20
|
+
)
|
|
21
|
+
from canvas_generated.services.plugin_runner_pb2_grpc import (
|
|
22
|
+
PluginRunnerServicer,
|
|
23
|
+
add_PluginRunnerServicer_to_server,
|
|
24
|
+
)
|
|
25
|
+
from canvas_sdk.effects import Effect
|
|
26
|
+
from canvas_sdk.events import Event, EventResponse, EventType
|
|
27
|
+
from canvas_sdk.protocols import ClinicalQualityMeasure
|
|
28
|
+
from canvas_sdk.utils.stats import get_duration_ms, tags_to_line_protocol
|
|
29
|
+
from logger import log
|
|
30
|
+
from plugin_runner.authentication import token_for_plugin
|
|
31
|
+
from plugin_runner.plugin_synchronizer import publish_message
|
|
32
|
+
from plugin_runner.sandbox import Sandbox
|
|
33
|
+
|
|
34
|
+
ENV = os.getenv("ENV", "development")
|
|
35
|
+
|
|
36
|
+
IS_PRODUCTION = ENV == "production"
|
|
37
|
+
|
|
38
|
+
MANIFEST_FILE_NAME = "CANVAS_MANIFEST.json"
|
|
39
|
+
|
|
40
|
+
SECRETS_FILE_NAME = "SECRETS.json"
|
|
41
|
+
|
|
42
|
+
# specify a local plugin directory for development
|
|
43
|
+
PLUGIN_DIRECTORY = "/plugin-runner/custom-plugins" if IS_PRODUCTION else "./custom-plugins"
|
|
44
|
+
|
|
45
|
+
# when we import plugins we'll use the module name directly so we need to add the plugin
|
|
46
|
+
# directory to the path
|
|
47
|
+
sys.path.append(PLUGIN_DIRECTORY)
|
|
48
|
+
|
|
49
|
+
# a global dictionary of loaded plugins
|
|
50
|
+
# TODO: create typings here for the subkeys
|
|
51
|
+
LOADED_PLUGINS: dict = {}
|
|
52
|
+
|
|
53
|
+
# a global dictionary of events to protocol class names
|
|
54
|
+
EVENT_PROTOCOL_MAP: dict = {}
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class PluginRunner(PluginRunnerServicer):
|
|
58
|
+
"""This process runs provided plugins that register interest in incoming events."""
|
|
59
|
+
|
|
60
|
+
def __init__(self) -> None:
|
|
61
|
+
self.statsd_client = statsd.StatsClient()
|
|
62
|
+
super().__init__()
|
|
63
|
+
|
|
64
|
+
sandbox: Sandbox
|
|
65
|
+
|
|
66
|
+
async def HandleEvent(self, request: Event, context: Any) -> EventResponse:
|
|
67
|
+
"""This is invoked when an event comes in."""
|
|
68
|
+
event_start_time = time.time()
|
|
69
|
+
event_name = EventType.Name(request.type)
|
|
70
|
+
relevant_plugins = EVENT_PROTOCOL_MAP.get(event_name, [])
|
|
71
|
+
|
|
72
|
+
effect_list = []
|
|
73
|
+
|
|
74
|
+
for plugin_name in relevant_plugins:
|
|
75
|
+
plugin = LOADED_PLUGINS[plugin_name]
|
|
76
|
+
protocol_class = plugin["class"]
|
|
77
|
+
base_plugin_name = plugin_name.split(":")[0]
|
|
78
|
+
|
|
79
|
+
secrets = plugin.get("secrets", {})
|
|
80
|
+
secrets["graphql_jwt"] = token_for_plugin(plugin_name=plugin_name, audience="home")
|
|
81
|
+
|
|
82
|
+
try:
|
|
83
|
+
protocol = protocol_class(request, secrets)
|
|
84
|
+
classname = (
|
|
85
|
+
protocol.__class__.__name__
|
|
86
|
+
if isinstance(protocol, ClinicalQualityMeasure)
|
|
87
|
+
else None
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
compute_start_time = time.time()
|
|
91
|
+
_effects = await asyncio.get_running_loop().run_in_executor(None, protocol.compute)
|
|
92
|
+
effects = [
|
|
93
|
+
Effect(
|
|
94
|
+
type=effect.type,
|
|
95
|
+
payload=effect.payload,
|
|
96
|
+
plugin_name=base_plugin_name,
|
|
97
|
+
classname=classname,
|
|
98
|
+
)
|
|
99
|
+
for effect in _effects
|
|
100
|
+
]
|
|
101
|
+
compute_duration = get_duration_ms(compute_start_time)
|
|
102
|
+
|
|
103
|
+
log.info(f"{plugin_name}.compute() completed ({compute_duration} ms)")
|
|
104
|
+
statsd_tags = tags_to_line_protocol({"plugin": plugin_name})
|
|
105
|
+
self.statsd_client.timing(
|
|
106
|
+
f"plugins.protocol_duration_ms,{statsd_tags}",
|
|
107
|
+
delta=compute_duration,
|
|
108
|
+
)
|
|
109
|
+
except Exception as e:
|
|
110
|
+
for error_line_with_newlines in traceback.format_exception(e):
|
|
111
|
+
for error_line in error_line_with_newlines.split("\n"):
|
|
112
|
+
log.error(error_line)
|
|
113
|
+
continue
|
|
114
|
+
|
|
115
|
+
effect_list += effects
|
|
116
|
+
|
|
117
|
+
event_duration = get_duration_ms(event_start_time)
|
|
118
|
+
|
|
119
|
+
# Don't log anything if a protocol didn't actually run.
|
|
120
|
+
if relevant_plugins:
|
|
121
|
+
log.info(f"Responded to Event {event_name} ({event_duration} ms)")
|
|
122
|
+
statsd_tags = tags_to_line_protocol({"event": event_name})
|
|
123
|
+
self.statsd_client.timing(
|
|
124
|
+
f"plugins.event_duration_ms,{statsd_tags}",
|
|
125
|
+
delta=event_duration,
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
yield EventResponse(success=True, effects=effect_list)
|
|
129
|
+
|
|
130
|
+
async def ReloadPlugins(
|
|
131
|
+
self, request: ReloadPluginsRequest, context: Any
|
|
132
|
+
) -> ReloadPluginsResponse:
|
|
133
|
+
"""This is invoked when we need to reload plugins."""
|
|
134
|
+
try:
|
|
135
|
+
load_plugins()
|
|
136
|
+
publish_message({"action": "restart"})
|
|
137
|
+
except ImportError:
|
|
138
|
+
yield ReloadPluginsResponse(success=False)
|
|
139
|
+
else:
|
|
140
|
+
yield ReloadPluginsResponse(success=True)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def handle_hup_cb(_signum: int, _frame: Optional[FrameType]) -> None:
|
|
144
|
+
"""handle_hup_cb."""
|
|
145
|
+
log.info("Received SIGHUP, reloading plugins...")
|
|
146
|
+
load_plugins()
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def sandbox_from_module_name(module_name: str) -> Any:
|
|
150
|
+
"""Sandbox the code execution."""
|
|
151
|
+
spec = importlib.util.find_spec(module_name)
|
|
152
|
+
|
|
153
|
+
if not spec or not spec.origin:
|
|
154
|
+
raise Exception(f'Could not load plugin "{module_name}"')
|
|
155
|
+
|
|
156
|
+
origin = pathlib.Path(spec.origin)
|
|
157
|
+
source_code = origin.read_text()
|
|
158
|
+
|
|
159
|
+
sandbox = Sandbox(source_code)
|
|
160
|
+
|
|
161
|
+
return sandbox.execute()
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def load_or_reload_plugin(path: pathlib.Path) -> None:
|
|
165
|
+
"""Given a path, load or reload a plugin."""
|
|
166
|
+
log.info(f"Loading {path}")
|
|
167
|
+
|
|
168
|
+
manifest_file = path / MANIFEST_FILE_NAME
|
|
169
|
+
manifest_json = manifest_file.read_text()
|
|
170
|
+
|
|
171
|
+
# the name is the folder name underneath the plugins directory
|
|
172
|
+
name = path.name
|
|
173
|
+
|
|
174
|
+
try:
|
|
175
|
+
manifest_json = json.loads(manifest_json)
|
|
176
|
+
except Exception as e:
|
|
177
|
+
log.error(f'Unable to load plugin "{name}": {e}')
|
|
178
|
+
return
|
|
179
|
+
|
|
180
|
+
secrets_file = path / SECRETS_FILE_NAME
|
|
181
|
+
|
|
182
|
+
secrets_json = {}
|
|
183
|
+
if secrets_file.exists():
|
|
184
|
+
try:
|
|
185
|
+
secrets_json = json.load(secrets_file.open())
|
|
186
|
+
except Exception as e:
|
|
187
|
+
log.error(f'Unable to load secrets for plugin "{name}": {str(e)}')
|
|
188
|
+
|
|
189
|
+
# TODO add existing schema validation from Michela here
|
|
190
|
+
try:
|
|
191
|
+
protocols = manifest_json["components"]["protocols"]
|
|
192
|
+
except Exception as e:
|
|
193
|
+
log.error(f'Unable to load plugin "{name}": {str(e)}')
|
|
194
|
+
return
|
|
195
|
+
|
|
196
|
+
for protocol in protocols:
|
|
197
|
+
# TODO add class colon validation to existing schema validation
|
|
198
|
+
# TODO when we encounter an exception here, disable the plugin in response
|
|
199
|
+
try:
|
|
200
|
+
protocol_module, protocol_class = protocol["class"].split(":")
|
|
201
|
+
name_and_class = f"{name}:{protocol_module}:{protocol_class}"
|
|
202
|
+
except ValueError:
|
|
203
|
+
log.error(f"Unable to parse class for plugin '{name}': '{protocol['class']}'")
|
|
204
|
+
continue
|
|
205
|
+
|
|
206
|
+
try:
|
|
207
|
+
if name_and_class in LOADED_PLUGINS:
|
|
208
|
+
log.info(f"Reloading plugin '{name_and_class}'")
|
|
209
|
+
|
|
210
|
+
result = sandbox_from_module_name(protocol_module)
|
|
211
|
+
|
|
212
|
+
LOADED_PLUGINS[name_and_class]["active"] = True
|
|
213
|
+
|
|
214
|
+
LOADED_PLUGINS[name_and_class]["class"] = result[protocol_class]
|
|
215
|
+
LOADED_PLUGINS[name_and_class]["sandbox"] = result
|
|
216
|
+
LOADED_PLUGINS[name_and_class]["secrets"] = secrets_json
|
|
217
|
+
else:
|
|
218
|
+
log.info(f"Loading plugin '{name_and_class}'")
|
|
219
|
+
|
|
220
|
+
result = sandbox_from_module_name(protocol_module)
|
|
221
|
+
|
|
222
|
+
LOADED_PLUGINS[name_and_class] = {
|
|
223
|
+
"active": True,
|
|
224
|
+
"class": result[protocol_class],
|
|
225
|
+
"sandbox": result,
|
|
226
|
+
"protocol": protocol,
|
|
227
|
+
"secrets": secrets_json,
|
|
228
|
+
}
|
|
229
|
+
except Exception as err:
|
|
230
|
+
log.error(f"Error importing module '{name_and_class}': {err}")
|
|
231
|
+
for error_line in traceback.format_exception(err):
|
|
232
|
+
log.error(error_line)
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def refresh_event_type_map() -> None:
|
|
236
|
+
"""Ensure the event subscriptions are up to date."""
|
|
237
|
+
global EVENT_PROTOCOL_MAP
|
|
238
|
+
EVENT_PROTOCOL_MAP = defaultdict(list)
|
|
239
|
+
|
|
240
|
+
for name, plugin in LOADED_PLUGINS.items():
|
|
241
|
+
if hasattr(plugin["class"], "RESPONDS_TO"):
|
|
242
|
+
responds_to = plugin["class"].RESPONDS_TO
|
|
243
|
+
|
|
244
|
+
if isinstance(responds_to, str):
|
|
245
|
+
EVENT_PROTOCOL_MAP[responds_to].append(name)
|
|
246
|
+
elif isinstance(responds_to, list):
|
|
247
|
+
for event in responds_to:
|
|
248
|
+
EVENT_PROTOCOL_MAP[event].append(name)
|
|
249
|
+
else:
|
|
250
|
+
log.warning(f"Unknown RESPONDS_TO type: {type(responds_to)}")
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def load_plugins(specified_plugin_paths: list[str] | None = None) -> None:
|
|
254
|
+
"""Load the plugins."""
|
|
255
|
+
# first mark each plugin as inactive since we want to remove it from
|
|
256
|
+
# LOADED_PLUGINS if it no longer exists on disk
|
|
257
|
+
for plugin in LOADED_PLUGINS.values():
|
|
258
|
+
plugin["active"] = False
|
|
259
|
+
|
|
260
|
+
if specified_plugin_paths is not None:
|
|
261
|
+
# convert to Paths
|
|
262
|
+
plugin_paths = [pathlib.Path(name) for name in specified_plugin_paths]
|
|
263
|
+
|
|
264
|
+
for plugin_path in plugin_paths:
|
|
265
|
+
# when we import plugins we'll use the module name directly so we need to add the plugin
|
|
266
|
+
# directory to the path
|
|
267
|
+
path_to_append = f"./{plugin_path.parent}"
|
|
268
|
+
sys.path.append(path_to_append)
|
|
269
|
+
else:
|
|
270
|
+
candidates = os.listdir(PLUGIN_DIRECTORY)
|
|
271
|
+
|
|
272
|
+
# convert to Paths
|
|
273
|
+
plugin_paths = [pathlib.Path(os.path.join(PLUGIN_DIRECTORY, name)) for name in candidates]
|
|
274
|
+
|
|
275
|
+
# get all directories under the plugin directory
|
|
276
|
+
plugin_paths = [path for path in plugin_paths if path.is_dir()]
|
|
277
|
+
|
|
278
|
+
# filter to only the directories containing a manifest file
|
|
279
|
+
plugin_paths = [path for path in plugin_paths if (path / MANIFEST_FILE_NAME).exists()]
|
|
280
|
+
|
|
281
|
+
# load or reload each plugin
|
|
282
|
+
for plugin_path in plugin_paths:
|
|
283
|
+
load_or_reload_plugin(plugin_path)
|
|
284
|
+
|
|
285
|
+
# if a plugin has been uninstalled/disabled remove it from LOADED_PLUGINS
|
|
286
|
+
for name, plugin in LOADED_PLUGINS.copy().items():
|
|
287
|
+
if not plugin["active"]:
|
|
288
|
+
del LOADED_PLUGINS[name]
|
|
289
|
+
|
|
290
|
+
refresh_event_type_map()
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
_cleanup_coroutines = []
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
async def serve(specified_plugin_paths: list[str] | None = None) -> None:
|
|
297
|
+
"""Run the server."""
|
|
298
|
+
port = "50051"
|
|
299
|
+
|
|
300
|
+
server = grpc.aio.server()
|
|
301
|
+
server.add_insecure_port("127.0.0.1:" + port)
|
|
302
|
+
|
|
303
|
+
add_PluginRunnerServicer_to_server(PluginRunner(), server)
|
|
304
|
+
|
|
305
|
+
log.info(f"Starting server, listening on port {port}")
|
|
306
|
+
|
|
307
|
+
load_plugins(specified_plugin_paths)
|
|
308
|
+
|
|
309
|
+
await server.start()
|
|
310
|
+
|
|
311
|
+
async def server_graceful_shutdown() -> None:
|
|
312
|
+
log.info("Starting graceful shutdown...")
|
|
313
|
+
await server.stop(5)
|
|
314
|
+
|
|
315
|
+
_cleanup_coroutines.append(server_graceful_shutdown())
|
|
316
|
+
|
|
317
|
+
await server.wait_for_termination()
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
def run_server(specified_plugin_paths: list[str] | None = None) -> None:
|
|
321
|
+
"""Run the server."""
|
|
322
|
+
loop = asyncio.new_event_loop()
|
|
323
|
+
|
|
324
|
+
asyncio.set_event_loop(loop)
|
|
325
|
+
|
|
326
|
+
signal.signal(signal.SIGHUP, handle_hup_cb)
|
|
327
|
+
|
|
328
|
+
try:
|
|
329
|
+
loop.run_until_complete(serve(specified_plugin_paths))
|
|
330
|
+
except KeyboardInterrupt:
|
|
331
|
+
pass
|
|
332
|
+
finally:
|
|
333
|
+
loop.run_until_complete(*_cleanup_coroutines)
|
|
334
|
+
loop.close()
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
if __name__ == "__main__":
|
|
338
|
+
run_server()
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import pickle
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from subprocess import STDOUT, CalledProcessError, check_output
|
|
7
|
+
|
|
8
|
+
import redis
|
|
9
|
+
|
|
10
|
+
APP_NAME = os.getenv("APP_NAME")
|
|
11
|
+
|
|
12
|
+
CUSTOMER_IDENTIFIER = os.getenv("CUSTOMER_IDENTIFIER")
|
|
13
|
+
PLUGINS_PUBSUB_CHANNEL = os.getenv("PLUGINS_PUBSUB_CHANNEL", default="plugins")
|
|
14
|
+
|
|
15
|
+
CHANNEL_NAME = f"{CUSTOMER_IDENTIFIER}:{PLUGINS_PUBSUB_CHANNEL}"
|
|
16
|
+
|
|
17
|
+
REDIS_ENDPOINT = os.getenv("REDIS_ENDPOINT", f"redis://{APP_NAME}-redis:6379")
|
|
18
|
+
|
|
19
|
+
try:
|
|
20
|
+
CLIENT_ID = Path("/app/container-unique-id.txt").read_text()
|
|
21
|
+
except FileNotFoundError:
|
|
22
|
+
CLIENT_ID = "non-unique"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def get_client() -> tuple[redis.Redis, redis.client.PubSub]:
|
|
26
|
+
"""Return a Redis client and pubsub object."""
|
|
27
|
+
client = redis.Redis.from_url(REDIS_ENDPOINT)
|
|
28
|
+
pubsub = client.pubsub()
|
|
29
|
+
|
|
30
|
+
return client, pubsub
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def publish_message(message: dict) -> None:
|
|
34
|
+
"""Publish a message to the pubsub channel."""
|
|
35
|
+
client, _ = get_client()
|
|
36
|
+
|
|
37
|
+
message_with_id = {**message, "client_id": CLIENT_ID}
|
|
38
|
+
|
|
39
|
+
client.publish(CHANNEL_NAME, pickle.dumps(message_with_id))
|
|
40
|
+
client.close()
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def main() -> None:
|
|
44
|
+
"""Listen for messages on the pubsub channel and restart the plugin-runner."""
|
|
45
|
+
print("plugin-synchronizer: starting")
|
|
46
|
+
|
|
47
|
+
_, pubsub = get_client()
|
|
48
|
+
|
|
49
|
+
pubsub.psubscribe(CHANNEL_NAME)
|
|
50
|
+
|
|
51
|
+
for message in pubsub.listen():
|
|
52
|
+
if not message:
|
|
53
|
+
continue
|
|
54
|
+
|
|
55
|
+
message_type = message.get("type", "")
|
|
56
|
+
|
|
57
|
+
if message_type != "pmessage":
|
|
58
|
+
continue
|
|
59
|
+
|
|
60
|
+
data = pickle.loads(message.get("data", pickle.dumps({})))
|
|
61
|
+
|
|
62
|
+
if "action" not in data or "client_id" not in data:
|
|
63
|
+
return
|
|
64
|
+
|
|
65
|
+
# Don't respond to our own messages
|
|
66
|
+
if data["client_id"] == CLIENT_ID:
|
|
67
|
+
return
|
|
68
|
+
|
|
69
|
+
if data["action"] == "restart":
|
|
70
|
+
# Run the plugin installer process
|
|
71
|
+
try:
|
|
72
|
+
print("plugin-synchronizer: installing plugins")
|
|
73
|
+
check_output(["./manage.py", "install_plugins_v2"], cwd="/app", stderr=STDOUT)
|
|
74
|
+
except CalledProcessError as e:
|
|
75
|
+
print("plugin-synchronizer: `./manage.py install_plugins_v2` failed:", e)
|
|
76
|
+
|
|
77
|
+
try:
|
|
78
|
+
print("plugin-synchronizer: sending SIGHUP to plugin-runner")
|
|
79
|
+
check_output(
|
|
80
|
+
["circusctl", "signal", "plugin-runner", "1"], cwd="/app", stderr=STDOUT
|
|
81
|
+
)
|
|
82
|
+
except CalledProcessError as e:
|
|
83
|
+
print("plugin-synchronizer: `circusctl signal plugin-runner 1` failed:", e)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
if __name__ == "__main__":
|
|
87
|
+
main()
|
plugin_runner/sandbox.py
ADDED
|
@@ -0,0 +1,273 @@
|
|
|
1
|
+
import ast
|
|
2
|
+
import builtins
|
|
3
|
+
from _ast import AnnAssign
|
|
4
|
+
from functools import cached_property
|
|
5
|
+
from typing import Any, cast
|
|
6
|
+
|
|
7
|
+
from RestrictedPython import (
|
|
8
|
+
CompileResult,
|
|
9
|
+
PrintCollector,
|
|
10
|
+
RestrictingNodeTransformer,
|
|
11
|
+
compile_restricted_exec,
|
|
12
|
+
safe_builtins,
|
|
13
|
+
utility_builtins,
|
|
14
|
+
)
|
|
15
|
+
from RestrictedPython.Eval import default_guarded_getitem
|
|
16
|
+
from RestrictedPython.Guards import (
|
|
17
|
+
guarded_iter_unpack_sequence,
|
|
18
|
+
guarded_unpack_sequence,
|
|
19
|
+
)
|
|
20
|
+
from RestrictedPython.transformer import (
|
|
21
|
+
ALLOWED_FUNC_NAMES,
|
|
22
|
+
FORBIDDEN_FUNC_NAMES,
|
|
23
|
+
copy_locations,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
##
|
|
27
|
+
# ALLOWED_MODULES
|
|
28
|
+
#
|
|
29
|
+
# The modules in this list are the only ones that can be imported in a sandboxed
|
|
30
|
+
# runtime.
|
|
31
|
+
#
|
|
32
|
+
ALLOWED_MODULES = frozenset(
|
|
33
|
+
[
|
|
34
|
+
"_strptime",
|
|
35
|
+
"arrow",
|
|
36
|
+
"base64",
|
|
37
|
+
"cached_property",
|
|
38
|
+
"canvas_sdk.commands",
|
|
39
|
+
"canvas_sdk.data",
|
|
40
|
+
"canvas_sdk.effects",
|
|
41
|
+
"canvas_sdk.events",
|
|
42
|
+
"canvas_sdk.handlers",
|
|
43
|
+
"canvas_sdk.protocols",
|
|
44
|
+
"canvas_sdk.utils",
|
|
45
|
+
"canvas_sdk.v1",
|
|
46
|
+
"canvas_sdk.value_set",
|
|
47
|
+
"canvas_sdk.views",
|
|
48
|
+
"contextlib",
|
|
49
|
+
"datetime",
|
|
50
|
+
"dateutil",
|
|
51
|
+
"enum",
|
|
52
|
+
"functools",
|
|
53
|
+
"hashlib",
|
|
54
|
+
"hmac",
|
|
55
|
+
"http",
|
|
56
|
+
"json",
|
|
57
|
+
"logger",
|
|
58
|
+
"math",
|
|
59
|
+
"operator",
|
|
60
|
+
"pickletools",
|
|
61
|
+
"random",
|
|
62
|
+
"re",
|
|
63
|
+
"requests",
|
|
64
|
+
"string",
|
|
65
|
+
"time",
|
|
66
|
+
"traceback",
|
|
67
|
+
"typing",
|
|
68
|
+
"urllib",
|
|
69
|
+
"uuid",
|
|
70
|
+
]
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def _is_known_module(name: str) -> bool:
|
|
75
|
+
return any(name.startswith(m) for m in ALLOWED_MODULES)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _safe_import(name: str, *args: Any, **kwargs: Any) -> Any:
|
|
79
|
+
if not _is_known_module(name):
|
|
80
|
+
raise ImportError(f"{name!r} is not an allowed import.")
|
|
81
|
+
return __import__(name, *args, **kwargs)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _unrestricted(_ob: Any, *args: Any, **kwargs: Any) -> Any:
|
|
85
|
+
"""Return the given object, unmodified."""
|
|
86
|
+
return _ob
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _apply(_ob: Any, *args: Any, **kwargs: Any) -> Any:
|
|
90
|
+
"""Call the bound method with args, support calling super().__init__()."""
|
|
91
|
+
return _ob(*args, **kwargs)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
class Sandbox:
|
|
95
|
+
"""A restricted sandbox for safely executing arbitrary Python code."""
|
|
96
|
+
|
|
97
|
+
source_code: str
|
|
98
|
+
namespace: str
|
|
99
|
+
|
|
100
|
+
class Transformer(RestrictingNodeTransformer):
|
|
101
|
+
"""A node transformer for customizing the sandbox compiler."""
|
|
102
|
+
|
|
103
|
+
def visit_AnnAssign(self, node: AnnAssign) -> AnnAssign:
|
|
104
|
+
"""Allow type annotations."""
|
|
105
|
+
return node
|
|
106
|
+
|
|
107
|
+
def check_import_names(self, node: ast.ImportFrom) -> ast.AST:
|
|
108
|
+
"""Check the names being imported.
|
|
109
|
+
|
|
110
|
+
This is a protection against rebinding dunder names like
|
|
111
|
+
_getitem_, _write_ via imports.
|
|
112
|
+
|
|
113
|
+
=> 'from _a import x' is ok, because '_a' is not added to the scope.
|
|
114
|
+
"""
|
|
115
|
+
for name in node.names:
|
|
116
|
+
if "*" in name.name and not _is_known_module(node.module):
|
|
117
|
+
self.error(node, '"*" imports are not allowed.')
|
|
118
|
+
self.check_name(node, name.name)
|
|
119
|
+
if name.asname:
|
|
120
|
+
self.check_name(node, name.asname)
|
|
121
|
+
|
|
122
|
+
return self.node_contents_visit(node)
|
|
123
|
+
|
|
124
|
+
def check_name(
|
|
125
|
+
self, node: ast.ImportFrom, name: str | None, allow_magic_methods: bool = False
|
|
126
|
+
) -> None:
|
|
127
|
+
"""Check names if they are allowed.
|
|
128
|
+
|
|
129
|
+
If ``allow_magic_methods is True`` names in `ALLOWED_FUNC_NAMES`
|
|
130
|
+
are additionally allowed although their names start with `_`.
|
|
131
|
+
|
|
132
|
+
Override to turn errors into warnings for leading underscores.
|
|
133
|
+
"""
|
|
134
|
+
if name is None:
|
|
135
|
+
return
|
|
136
|
+
|
|
137
|
+
if (
|
|
138
|
+
name.startswith("_")
|
|
139
|
+
and name != "_"
|
|
140
|
+
and not (
|
|
141
|
+
allow_magic_methods and name in ALLOWED_FUNC_NAMES and node.col_offset != 0
|
|
142
|
+
)
|
|
143
|
+
):
|
|
144
|
+
self.warn(
|
|
145
|
+
node,
|
|
146
|
+
'"{name}" is an invalid variable name because it '
|
|
147
|
+
'starts with "_"'.format(name=name),
|
|
148
|
+
)
|
|
149
|
+
elif name.endswith("__roles__"):
|
|
150
|
+
self.error(
|
|
151
|
+
node,
|
|
152
|
+
'"%s" is an invalid variable name because ' 'it ends with "__roles__".' % name,
|
|
153
|
+
)
|
|
154
|
+
elif name in FORBIDDEN_FUNC_NAMES:
|
|
155
|
+
self.error(node, f'"{name}" is a reserved name.')
|
|
156
|
+
|
|
157
|
+
def visit_Attribute(self, node: ast.Attribute) -> ast.AST:
|
|
158
|
+
"""Checks and mutates attribute access/assignment.
|
|
159
|
+
|
|
160
|
+
'a.b' becomes '_getattr_(a, "b")'
|
|
161
|
+
'a.b = c' becomes '_write_(a).b = c'
|
|
162
|
+
'del a.b' becomes 'del _write_(a).b'
|
|
163
|
+
|
|
164
|
+
The _write_ function should return a security proxy.
|
|
165
|
+
|
|
166
|
+
Override to turn errors into warnings for leading underscores.
|
|
167
|
+
"""
|
|
168
|
+
if node.attr.startswith("_") and node.attr != "_":
|
|
169
|
+
self.warn(
|
|
170
|
+
node,
|
|
171
|
+
'"{name}" is an invalid attribute name because it starts '
|
|
172
|
+
'with "_".'.format(name=node.attr),
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
if node.attr.endswith("__roles__"):
|
|
176
|
+
self.error(
|
|
177
|
+
node,
|
|
178
|
+
'"{name}" is an invalid attribute name because it ends '
|
|
179
|
+
'with "__roles__".'.format(name=node.attr),
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
if isinstance(node.ctx, ast.Load):
|
|
183
|
+
node = self.node_contents_visit(node)
|
|
184
|
+
new_node = ast.Call(
|
|
185
|
+
func=ast.Name("_getattr_", ast.Load()),
|
|
186
|
+
args=[node.value, ast.Str(node.attr)],
|
|
187
|
+
keywords=[],
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
copy_locations(new_node, node)
|
|
191
|
+
return new_node
|
|
192
|
+
|
|
193
|
+
elif isinstance(node.ctx, (ast.Store, ast.Del)):
|
|
194
|
+
node = self.node_contents_visit(node)
|
|
195
|
+
new_value = ast.Call(
|
|
196
|
+
func=ast.Name("_write_", ast.Load()), args=[node.value], keywords=[]
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
copy_locations(new_value, node.value)
|
|
200
|
+
node.value = new_value
|
|
201
|
+
return node
|
|
202
|
+
|
|
203
|
+
else: # pragma: no cover
|
|
204
|
+
# Impossible Case only ctx Load, Store and Del are defined in ast.
|
|
205
|
+
raise NotImplementedError(f"Unknown ctx type: {type(node.ctx)}")
|
|
206
|
+
|
|
207
|
+
def __init__(self, source_code: str, namespace: str | None = None) -> None:
|
|
208
|
+
if source_code is None:
|
|
209
|
+
raise TypeError("source_code may not be None")
|
|
210
|
+
self.namespace = namespace or "protocols"
|
|
211
|
+
self.source_code = source_code
|
|
212
|
+
|
|
213
|
+
@cached_property
|
|
214
|
+
def scope(self) -> dict[str, Any]:
|
|
215
|
+
"""Return the scope used for evaluation."""
|
|
216
|
+
return {
|
|
217
|
+
"__builtins__": {
|
|
218
|
+
**safe_builtins.copy(),
|
|
219
|
+
**utility_builtins.copy(),
|
|
220
|
+
"__import__": _safe_import,
|
|
221
|
+
"classmethod": builtins.classmethod,
|
|
222
|
+
"staticmethod": builtins.staticmethod,
|
|
223
|
+
"any": builtins.any,
|
|
224
|
+
"all": builtins.all,
|
|
225
|
+
"enumerate": builtins.enumerate,
|
|
226
|
+
"property": builtins.property,
|
|
227
|
+
"super": builtins.super,
|
|
228
|
+
"dict": builtins.dict,
|
|
229
|
+
"filter": builtins.filter,
|
|
230
|
+
"max": builtins.max,
|
|
231
|
+
"min": builtins.min,
|
|
232
|
+
"list": builtins.list,
|
|
233
|
+
"next": builtins.next,
|
|
234
|
+
"iter": builtins.iter,
|
|
235
|
+
"type": builtins.type,
|
|
236
|
+
},
|
|
237
|
+
"__metaclass__": type,
|
|
238
|
+
"__name__": self.namespace,
|
|
239
|
+
"_write_": _unrestricted,
|
|
240
|
+
"_getiter_": _unrestricted,
|
|
241
|
+
"_getitem_": default_guarded_getitem,
|
|
242
|
+
"_getattr_": getattr,
|
|
243
|
+
"_print_": PrintCollector,
|
|
244
|
+
"_apply_": _apply,
|
|
245
|
+
"_inplacevar_": _unrestricted,
|
|
246
|
+
"_iter_unpack_sequence_": guarded_iter_unpack_sequence,
|
|
247
|
+
"_unpack_sequence_": guarded_unpack_sequence,
|
|
248
|
+
"hasattr": hasattr,
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
@cached_property
|
|
252
|
+
def compile_result(self) -> CompileResult:
|
|
253
|
+
"""Compile the source code into bytecode."""
|
|
254
|
+
return compile_restricted_exec(self.source_code, policy=self.Transformer)
|
|
255
|
+
|
|
256
|
+
@property
|
|
257
|
+
def errors(self) -> tuple[str, ...]:
|
|
258
|
+
"""Return errors encountered when compiling the source code."""
|
|
259
|
+
return cast(tuple[str, ...], self.compile_result.errors)
|
|
260
|
+
|
|
261
|
+
@property
|
|
262
|
+
def warnings(self) -> tuple[str, ...]:
|
|
263
|
+
"""Return warnings encountered when compiling the source code."""
|
|
264
|
+
return cast(tuple[str, ...], self.compile_result.warnings)
|
|
265
|
+
|
|
266
|
+
def execute(self) -> dict:
|
|
267
|
+
"""Execute the given code in a restricted sandbox."""
|
|
268
|
+
if self.errors:
|
|
269
|
+
raise RuntimeError(f"Code is invalid: {self.errors}")
|
|
270
|
+
|
|
271
|
+
exec(self.compile_result.code, self.scope)
|
|
272
|
+
|
|
273
|
+
return self.scope
|
pubsub/__init__.py
ADDED
|
File without changes
|