canvas 0.19.1__py3-none-any.whl → 0.21.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of canvas might be problematic. Click here for more details.
- {canvas-0.19.1.dist-info → canvas-0.21.0.dist-info}/METADATA +1 -1
- {canvas-0.19.1.dist-info → canvas-0.21.0.dist-info}/RECORD +50 -44
- canvas_generated/messages/effects_pb2.py +2 -2
- canvas_generated/messages/effects_pb2.pyi +16 -0
- canvas_generated/messages/events_pb2.py +2 -2
- canvas_generated/messages/events_pb2.pyi +32 -0
- canvas_sdk/commands/__init__.py +2 -0
- canvas_sdk/commands/base.py +47 -3
- canvas_sdk/commands/commands/allergy.py +0 -12
- canvas_sdk/commands/commands/assess.py +0 -10
- canvas_sdk/commands/commands/close_goal.py +0 -11
- canvas_sdk/commands/commands/diagnose.py +0 -14
- canvas_sdk/commands/commands/family_history.py +0 -5
- canvas_sdk/commands/commands/follow_up.py +69 -0
- canvas_sdk/commands/commands/goal.py +0 -14
- canvas_sdk/commands/commands/history_present_illness.py +0 -5
- canvas_sdk/commands/commands/instruct.py +0 -5
- canvas_sdk/commands/commands/lab_order.py +70 -12
- canvas_sdk/commands/commands/medical_history.py +0 -15
- canvas_sdk/commands/commands/medication_statement.py +0 -5
- canvas_sdk/commands/commands/past_surgical_history.py +0 -11
- canvas_sdk/commands/commands/perform.py +0 -5
- canvas_sdk/commands/commands/plan.py +0 -5
- canvas_sdk/commands/commands/prescribe.py +7 -14
- canvas_sdk/commands/commands/questionnaire.py +0 -5
- canvas_sdk/commands/commands/reason_for_visit.py +0 -9
- canvas_sdk/commands/commands/remove_allergy.py +0 -8
- canvas_sdk/commands/commands/stop_medication.py +0 -5
- canvas_sdk/commands/commands/task.py +0 -12
- canvas_sdk/commands/commands/update_diagnosis.py +0 -10
- canvas_sdk/commands/commands/update_goal.py +0 -13
- canvas_sdk/commands/commands/vitals.py +0 -26
- canvas_sdk/commands/tests/test_base_command.py +81 -0
- canvas_sdk/effects/billing_line_item/__init__.py +11 -0
- canvas_sdk/effects/billing_line_item/add_billing_line_item.py +38 -0
- canvas_sdk/effects/billing_line_item/remove_billing_line_item.py +22 -0
- canvas_sdk/effects/billing_line_item/update_billing_line_item.py +44 -0
- canvas_sdk/effects/launch_modal.py +1 -0
- canvas_sdk/utils/stats.py +35 -0
- canvas_sdk/v1/data/lab.py +38 -0
- canvas_sdk/v1/data/note.py +3 -0
- logger/logger.py +8 -1
- plugin_runner/{plugin_installer.py → installation.py} +23 -11
- plugin_runner/plugin_runner.py +70 -40
- plugin_runner/tests/test_plugin_installer.py +3 -3
- plugin_runner/tests/test_plugin_runner.py +1 -1
- protobufs/canvas_generated/messages/effects.proto +10 -0
- protobufs/canvas_generated/messages/events.proto +16 -12
- {canvas-0.19.1.dist-info → canvas-0.21.0.dist-info}/WHEEL +0 -0
- {canvas-0.19.1.dist-info → canvas-0.21.0.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from canvas_sdk.effects.billing_line_item.add_billing_line_item import (
|
|
2
|
+
AddBillingLineItem,
|
|
3
|
+
)
|
|
4
|
+
from canvas_sdk.effects.billing_line_item.remove_billing_line_item import (
|
|
5
|
+
RemoveBillingLineItem,
|
|
6
|
+
)
|
|
7
|
+
from canvas_sdk.effects.billing_line_item.update_billing_line_item import (
|
|
8
|
+
UpdateBillingLineItem,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
__all__ = ("AddBillingLineItem", "UpdateBillingLineItem", "RemoveBillingLineItem")
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
from canvas_sdk.commands.constants import Coding
|
|
4
|
+
from canvas_sdk.effects.base import EffectType, _BaseEffect
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class AddBillingLineItem(_BaseEffect):
|
|
8
|
+
"""
|
|
9
|
+
An Effect that will result in a billing line item in a note footer.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
class Meta:
|
|
13
|
+
effect_type = EffectType.ADD_BILLING_LINE_ITEM
|
|
14
|
+
apply_required_fields = ("note_id", "cpt")
|
|
15
|
+
|
|
16
|
+
note_id: str | None = None
|
|
17
|
+
cpt: str | None = None
|
|
18
|
+
units: int | None = 1
|
|
19
|
+
assessment_ids: list[str] = []
|
|
20
|
+
modifiers: list[Coding] = []
|
|
21
|
+
|
|
22
|
+
@property
|
|
23
|
+
def values(self) -> dict[str, Any]:
|
|
24
|
+
"""The BillingLineItem's values."""
|
|
25
|
+
return {
|
|
26
|
+
"cpt": self.cpt,
|
|
27
|
+
"units": self.units,
|
|
28
|
+
"assessment_ids": self.assessment_ids,
|
|
29
|
+
"modifiers": self.modifiers,
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
@property
|
|
33
|
+
def effect_payload(self) -> dict[str, Any]:
|
|
34
|
+
"""The payload of the effect."""
|
|
35
|
+
return {
|
|
36
|
+
"note_id": self.note_id,
|
|
37
|
+
"data": self.values,
|
|
38
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
from canvas_sdk.effects.base import EffectType, _BaseEffect
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class RemoveBillingLineItem(_BaseEffect):
|
|
7
|
+
"""
|
|
8
|
+
An Effect that will remove a billing line item in a note footer.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
class Meta:
|
|
12
|
+
effect_type = EffectType.REMOVE_BILLING_LINE_ITEM
|
|
13
|
+
apply_required_fields = ("billing_line_item_id",)
|
|
14
|
+
|
|
15
|
+
billing_line_item_id: str | None = None
|
|
16
|
+
|
|
17
|
+
@property
|
|
18
|
+
def effect_payload(self) -> dict[str, Any]:
|
|
19
|
+
"""The payload of the effect."""
|
|
20
|
+
return {
|
|
21
|
+
"billing_line_item_id": self.billing_line_item_id,
|
|
22
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
from canvas_sdk.commands.constants import Coding
|
|
4
|
+
from canvas_sdk.effects.base import EffectType, _BaseEffect
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class UpdateBillingLineItem(_BaseEffect):
|
|
8
|
+
"""
|
|
9
|
+
An Effect that will update a billing line item in a note footer.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
class Meta:
|
|
13
|
+
effect_type = EffectType.UPDATE_BILLING_LINE_ITEM
|
|
14
|
+
apply_required_fields = ("billing_line_item_id",)
|
|
15
|
+
|
|
16
|
+
billing_line_item_id: str | None = None
|
|
17
|
+
cpt: str | None = None
|
|
18
|
+
units: int | None = None
|
|
19
|
+
assessment_ids: list[str] | None = None
|
|
20
|
+
modifiers: list[Coding] | None = None
|
|
21
|
+
|
|
22
|
+
@property
|
|
23
|
+
def values(self) -> dict[str, Any]:
|
|
24
|
+
"""The BillingLineItem's values."""
|
|
25
|
+
values: dict[str, str | int | list] = {}
|
|
26
|
+
# only include the values where a value has been set so as to not unintentionally override existing values
|
|
27
|
+
if self.cpt is not None:
|
|
28
|
+
values["cpt"] = self.cpt
|
|
29
|
+
if self.units is not None:
|
|
30
|
+
values["units"] = self.units
|
|
31
|
+
if self.assessment_ids is not None:
|
|
32
|
+
values["assessment_ids"] = self.assessment_ids
|
|
33
|
+
if self.modifiers is not None:
|
|
34
|
+
values["modifiers"] = self.modifiers
|
|
35
|
+
|
|
36
|
+
return values
|
|
37
|
+
|
|
38
|
+
@property
|
|
39
|
+
def effect_payload(self) -> dict[str, Any]:
|
|
40
|
+
"""The payload of the effect."""
|
|
41
|
+
return {
|
|
42
|
+
"billing_line_item_id": self.billing_line_item_id,
|
|
43
|
+
"data": self.values,
|
|
44
|
+
}
|
canvas_sdk/utils/stats.py
CHANGED
|
@@ -1,6 +1,9 @@
|
|
|
1
|
+
from datetime import timedelta
|
|
1
2
|
from time import time
|
|
2
3
|
from typing import Any
|
|
3
4
|
|
|
5
|
+
from statsd.defaults.env import statsd as default_statsd_client
|
|
6
|
+
|
|
4
7
|
|
|
5
8
|
def get_duration_ms(start_time: float) -> int:
|
|
6
9
|
"""Get the duration in milliseconds since the given start time."""
|
|
@@ -26,3 +29,35 @@ def tags_to_line_protocol(tags: dict[str, Any]) -> str:
|
|
|
26
29
|
f"{tag_name}={str(tag_value).translate(LINE_PROTOCOL_TRANSLATION)}"
|
|
27
30
|
for tag_name, tag_value in tags.items()
|
|
28
31
|
)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class StatsDClientProxy:
|
|
35
|
+
"""Proxy for a StatsD client."""
|
|
36
|
+
|
|
37
|
+
def __init__(self) -> None:
|
|
38
|
+
self.client = default_statsd_client
|
|
39
|
+
|
|
40
|
+
def gauge(self, metric_name: str, value: float, tags: dict[str, str]) -> None:
|
|
41
|
+
"""Sends a gauge metric to StatsD with properly formatted tags.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
metric_name (str): The name of the metric.
|
|
45
|
+
value (float): The value to report.
|
|
46
|
+
tags (dict[str, str]): Dictionary of tags to attach to the metric.
|
|
47
|
+
"""
|
|
48
|
+
statsd_tags = tags_to_line_protocol(tags)
|
|
49
|
+
self.client.gauge(f"{metric_name},{statsd_tags}", value)
|
|
50
|
+
|
|
51
|
+
def timing(self, metric_name: str, delta: float | timedelta, tags: dict[str, str]) -> None:
|
|
52
|
+
"""Sends a timing metric to StatsD with properly formatted tags.
|
|
53
|
+
|
|
54
|
+
Args:
|
|
55
|
+
metric_name (str): The name of the metric.
|
|
56
|
+
delta (float | timedelta): The value to report.
|
|
57
|
+
tags (dict[str, str]): Dictionary of tags to attach to the metric.
|
|
58
|
+
"""
|
|
59
|
+
statsd_tags = tags_to_line_protocol(tags)
|
|
60
|
+
self.client.timing(f"{metric_name},{statsd_tags}", delta)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
statsd_client = StatsDClientProxy()
|
canvas_sdk/v1/data/lab.py
CHANGED
|
@@ -311,3 +311,41 @@ class LabTest(models.Model):
|
|
|
311
311
|
|
|
312
312
|
def __str__(self) -> str:
|
|
313
313
|
return f"{self.ontology_test_name}: f{self.ontology_test_code}"
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
class LabPartner(models.Model):
|
|
317
|
+
"""A class representing a lab partner."""
|
|
318
|
+
|
|
319
|
+
class Meta:
|
|
320
|
+
managed = False
|
|
321
|
+
db_table = "canvas_sdk_data_lab_partner_001"
|
|
322
|
+
|
|
323
|
+
objects: models.Manager["LabPartner"]
|
|
324
|
+
|
|
325
|
+
id = models.UUIDField()
|
|
326
|
+
dbid = models.BigIntegerField(primary_key=True)
|
|
327
|
+
name = models.CharField(max_length=256)
|
|
328
|
+
active = models.BooleanField()
|
|
329
|
+
electronic_ordering_enabled = models.BooleanField()
|
|
330
|
+
keywords = models.TextField()
|
|
331
|
+
default_lab_account_number = models.CharField(max_length=256)
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
class LabPartnerTest(models.Model):
|
|
335
|
+
"""A class representing a lab partner's test."""
|
|
336
|
+
|
|
337
|
+
class Meta:
|
|
338
|
+
managed = False
|
|
339
|
+
db_table = "canvas_sdk_data_lab_partner_test_001"
|
|
340
|
+
|
|
341
|
+
objects: models.Manager["LabPartnerTest"]
|
|
342
|
+
|
|
343
|
+
id = models.UUIDField()
|
|
344
|
+
dbid = models.BigIntegerField(primary_key=True)
|
|
345
|
+
lab_partner = models.ForeignKey(
|
|
346
|
+
"LabPartner", on_delete=models.DO_NOTHING, related_name="available_tests"
|
|
347
|
+
)
|
|
348
|
+
order_code = models.CharField(max_length=256, blank=True)
|
|
349
|
+
order_name = models.TextField()
|
|
350
|
+
keywords = models.TextField(blank=True)
|
|
351
|
+
cpt_code = models.CharField(max_length=256, blank=True, null=True)
|
canvas_sdk/v1/data/note.py
CHANGED
|
@@ -90,10 +90,13 @@ class NoteTypes(models.TextChoices):
|
|
|
90
90
|
class NoteType(models.Model):
|
|
91
91
|
"""NoteType."""
|
|
92
92
|
|
|
93
|
+
objects: models.Manager["NoteType"]
|
|
94
|
+
|
|
93
95
|
class Meta:
|
|
94
96
|
managed = False
|
|
95
97
|
db_table = "canvas_sdk_data_api_notetype_001"
|
|
96
98
|
|
|
99
|
+
id = models.UUIDField()
|
|
97
100
|
dbid = models.BigIntegerField(primary_key=True)
|
|
98
101
|
created = models.DateTimeField()
|
|
99
102
|
modified = models.DateTimeField()
|
logger/logger.py
CHANGED
|
@@ -24,18 +24,25 @@ class PluginLogger:
|
|
|
24
24
|
def __init__(self) -> None:
|
|
25
25
|
self.logger = logging.getLogger("plugin_runner_logger")
|
|
26
26
|
self.logger.setLevel(logging.INFO)
|
|
27
|
+
|
|
27
28
|
log_prefix = os.getenv("HOSTNAME", "")
|
|
29
|
+
|
|
28
30
|
if log_prefix != "":
|
|
29
31
|
log_prefix = f"[{log_prefix}] "
|
|
30
|
-
|
|
32
|
+
|
|
33
|
+
formatter = logging.Formatter(
|
|
34
|
+
f"plugin_runner {log_prefix}%(levelname)s %(asctime)s %(message)s"
|
|
35
|
+
)
|
|
31
36
|
|
|
32
37
|
streaming_handler = logging.StreamHandler()
|
|
33
38
|
streaming_handler.setFormatter(formatter)
|
|
39
|
+
|
|
34
40
|
self.logger.addHandler(streaming_handler)
|
|
35
41
|
|
|
36
42
|
if os.getenv("REDIS_ENDPOINT"):
|
|
37
43
|
pubsub_handler = PubSubLogHandler()
|
|
38
44
|
pubsub_handler.setFormatter(formatter)
|
|
45
|
+
|
|
39
46
|
self.logger.addHandler(pubsub_handler)
|
|
40
47
|
|
|
41
48
|
def debug(self, message: Any) -> None:
|
|
@@ -14,6 +14,7 @@ import requests
|
|
|
14
14
|
from psycopg import Connection
|
|
15
15
|
from psycopg.rows import dict_row
|
|
16
16
|
|
|
17
|
+
from logger import log
|
|
17
18
|
from plugin_runner.aws_headers import aws_sig_v4_headers
|
|
18
19
|
from plugin_runner.exceptions import InvalidPluginFormat, PluginInstallationError
|
|
19
20
|
from settings import (
|
|
@@ -72,8 +73,8 @@ def enabled_plugins() -> dict[str, PluginAttributes]:
|
|
|
72
73
|
|
|
73
74
|
with conn.cursor(row_factory=dict_row) as cursor:
|
|
74
75
|
cursor.execute(
|
|
75
|
-
"
|
|
76
|
-
"
|
|
76
|
+
"SELECT name, package, version, key, value FROM plugin_io_plugin p "
|
|
77
|
+
"LEFT JOIN plugin_io_pluginsecret s ON p.id = s.plugin_id WHERE is_enabled"
|
|
77
78
|
)
|
|
78
79
|
rows = cursor.fetchall()
|
|
79
80
|
plugins = _extract_rows_to_dict(rows)
|
|
@@ -83,6 +84,7 @@ def enabled_plugins() -> dict[str, PluginAttributes]:
|
|
|
83
84
|
|
|
84
85
|
def _extract_rows_to_dict(rows: list) -> dict[str, PluginAttributes]:
|
|
85
86
|
plugins = {}
|
|
87
|
+
|
|
86
88
|
for row in rows:
|
|
87
89
|
if row["name"] not in plugins:
|
|
88
90
|
plugins[row["name"]] = PluginAttributes(
|
|
@@ -92,6 +94,7 @@ def _extract_rows_to_dict(rows: list) -> dict[str, PluginAttributes]:
|
|
|
92
94
|
)
|
|
93
95
|
else:
|
|
94
96
|
plugins[row["name"]]["secrets"][row["key"]] = row["value"]
|
|
97
|
+
|
|
95
98
|
return plugins
|
|
96
99
|
|
|
97
100
|
|
|
@@ -123,16 +126,18 @@ def download_plugin(plugin_package: str) -> Generator[Path, None, None]:
|
|
|
123
126
|
prefix_dir = Path(temp_dir) / UPLOAD_TO_PREFIX
|
|
124
127
|
prefix_dir.mkdir() # create an intermediate directory reflecting the prefix
|
|
125
128
|
download_path = Path(temp_dir) / plugin_package
|
|
129
|
+
|
|
126
130
|
with open(download_path, "wb") as download_file:
|
|
127
131
|
response = requests.request(method=method, url=f"https://{host}{path}", headers=headers)
|
|
128
132
|
download_file.write(response.content)
|
|
133
|
+
|
|
129
134
|
yield download_path
|
|
130
135
|
|
|
131
136
|
|
|
132
137
|
def install_plugin(plugin_name: str, attributes: PluginAttributes) -> None:
|
|
133
138
|
"""Install the given Plugin's package into the runtime."""
|
|
134
139
|
try:
|
|
135
|
-
|
|
140
|
+
log.info(f'Installing plugin "{plugin_name}"')
|
|
136
141
|
|
|
137
142
|
plugin_installation_path = Path(PLUGIN_DIRECTORY) / plugin_name
|
|
138
143
|
|
|
@@ -145,12 +150,14 @@ def install_plugin(plugin_name: str, attributes: PluginAttributes) -> None:
|
|
|
145
150
|
|
|
146
151
|
install_plugin_secrets(plugin_name=plugin_name, secrets=attributes["secrets"])
|
|
147
152
|
except Exception as ex:
|
|
148
|
-
|
|
153
|
+
log.error(f'Failed to install plugin "{plugin_name}", version {attributes["version"]}')
|
|
149
154
|
raise PluginInstallationError() from ex
|
|
150
155
|
|
|
151
156
|
|
|
152
157
|
def extract_plugin(plugin_file_path: Path, plugin_installation_path: Path) -> None:
|
|
153
158
|
"""Extract plugin in `file` to the given `path`."""
|
|
159
|
+
log.info(f'Extracting plugin at "{plugin_file_path}"')
|
|
160
|
+
|
|
154
161
|
archive: tarfile.TarFile | None = None
|
|
155
162
|
|
|
156
163
|
try:
|
|
@@ -160,10 +167,10 @@ def extract_plugin(plugin_file_path: Path, plugin_installation_path: Path) -> No
|
|
|
160
167
|
archive = tarfile.TarFile.open(fileobj=file)
|
|
161
168
|
archive.extractall(plugin_installation_path, filter="data")
|
|
162
169
|
except tarfile.ReadError as ex:
|
|
163
|
-
|
|
170
|
+
log.error(f"Unreadable tar archive: '{plugin_file_path}'")
|
|
164
171
|
raise InvalidPluginFormat from ex
|
|
165
172
|
else:
|
|
166
|
-
|
|
173
|
+
log.error(f"Unsupported file format: '{plugin_file_path}'")
|
|
167
174
|
raise InvalidPluginFormat
|
|
168
175
|
finally:
|
|
169
176
|
if archive:
|
|
@@ -172,7 +179,7 @@ def extract_plugin(plugin_file_path: Path, plugin_installation_path: Path) -> No
|
|
|
172
179
|
|
|
173
180
|
def install_plugin_secrets(plugin_name: str, secrets: dict[str, str]) -> None:
|
|
174
181
|
"""Write the plugin's secrets to disk in the package's directory."""
|
|
175
|
-
|
|
182
|
+
log.info(f"Writing plugin secrets for '{plugin_name}'")
|
|
176
183
|
|
|
177
184
|
secrets_path = Path(PLUGIN_DIRECTORY) / plugin_name / SECRETS_FILE_NAME
|
|
178
185
|
|
|
@@ -188,7 +195,7 @@ def disable_plugin(plugin_name: str) -> None:
|
|
|
188
195
|
"""Disable the given plugin."""
|
|
189
196
|
conn = open_database_connection()
|
|
190
197
|
conn.cursor().execute(
|
|
191
|
-
"
|
|
198
|
+
"UPDATE plugin_io_plugin SET is_enabled = false WHERE name = %s", (plugin_name,)
|
|
192
199
|
)
|
|
193
200
|
conn.commit()
|
|
194
201
|
conn.close()
|
|
@@ -198,6 +205,8 @@ def disable_plugin(plugin_name: str) -> None:
|
|
|
198
205
|
|
|
199
206
|
def uninstall_plugin(plugin_name: str) -> None:
|
|
200
207
|
"""Remove the plugin from the filesystem."""
|
|
208
|
+
log.info(f'Uninstalling plugin "{plugin_name}"')
|
|
209
|
+
|
|
201
210
|
plugin_path = Path(PLUGIN_DIRECTORY) / plugin_name
|
|
202
211
|
|
|
203
212
|
if plugin_path.exists():
|
|
@@ -206,6 +215,8 @@ def uninstall_plugin(plugin_name: str) -> None:
|
|
|
206
215
|
|
|
207
216
|
def install_plugins() -> None:
|
|
208
217
|
"""Install all enabled plugins."""
|
|
218
|
+
log.info("Installing plugins")
|
|
219
|
+
|
|
209
220
|
if Path(PLUGIN_DIRECTORY).exists():
|
|
210
221
|
shutil.rmtree(PLUGIN_DIRECTORY)
|
|
211
222
|
|
|
@@ -213,12 +224,13 @@ def install_plugins() -> None:
|
|
|
213
224
|
|
|
214
225
|
for plugin_name, attributes in enabled_plugins().items():
|
|
215
226
|
try:
|
|
216
|
-
|
|
227
|
+
log.info(f'Installing plugin "{plugin_name}", version {attributes["version"]}')
|
|
217
228
|
install_plugin(plugin_name, attributes)
|
|
218
229
|
except PluginInstallationError:
|
|
219
230
|
disable_plugin(plugin_name)
|
|
220
|
-
|
|
221
|
-
f
|
|
231
|
+
log.error(
|
|
232
|
+
f'Installation failed for plugin "{plugin_name}", version {attributes["version"]};'
|
|
233
|
+
" the plugin has been disabled"
|
|
222
234
|
)
|
|
223
235
|
continue
|
|
224
236
|
|
plugin_runner/plugin_runner.py
CHANGED
|
@@ -13,13 +13,9 @@ from typing import Any, TypedDict
|
|
|
13
13
|
|
|
14
14
|
import grpc
|
|
15
15
|
import redis.asyncio as redis
|
|
16
|
-
import statsd
|
|
17
16
|
|
|
18
17
|
from canvas_generated.messages.effects_pb2 import EffectType
|
|
19
|
-
from canvas_generated.messages.plugins_pb2 import
|
|
20
|
-
ReloadPluginsRequest,
|
|
21
|
-
ReloadPluginsResponse,
|
|
22
|
-
)
|
|
18
|
+
from canvas_generated.messages.plugins_pb2 import ReloadPluginsRequest, ReloadPluginsResponse
|
|
23
19
|
from canvas_generated.services.plugin_runner_pb2_grpc import (
|
|
24
20
|
PluginRunnerServicer,
|
|
25
21
|
add_PluginRunnerServicer_to_server,
|
|
@@ -27,10 +23,10 @@ from canvas_generated.services.plugin_runner_pb2_grpc import (
|
|
|
27
23
|
from canvas_sdk.effects import Effect
|
|
28
24
|
from canvas_sdk.events import Event, EventRequest, EventResponse, EventType
|
|
29
25
|
from canvas_sdk.protocols import ClinicalQualityMeasure
|
|
30
|
-
from canvas_sdk.utils.stats import get_duration_ms,
|
|
26
|
+
from canvas_sdk.utils.stats import get_duration_ms, statsd_client
|
|
31
27
|
from logger import log
|
|
32
28
|
from plugin_runner.authentication import token_for_plugin
|
|
33
|
-
from plugin_runner.
|
|
29
|
+
from plugin_runner.installation import install_plugins
|
|
34
30
|
from plugin_runner.sandbox import Sandbox
|
|
35
31
|
from settings import (
|
|
36
32
|
CHANNEL_NAME,
|
|
@@ -110,10 +106,6 @@ class PluginManifest(TypedDict):
|
|
|
110
106
|
class PluginRunner(PluginRunnerServicer):
|
|
111
107
|
"""This process runs provided plugins that register interest in incoming events."""
|
|
112
108
|
|
|
113
|
-
def __init__(self) -> None:
|
|
114
|
-
self.statsd_client = statsd.StatsClient()
|
|
115
|
-
super().__init__()
|
|
116
|
-
|
|
117
109
|
sandbox: Sandbox
|
|
118
110
|
|
|
119
111
|
async def HandleEvent(
|
|
@@ -168,10 +160,10 @@ class PluginRunner(PluginRunnerServicer):
|
|
|
168
160
|
compute_duration = get_duration_ms(compute_start_time)
|
|
169
161
|
|
|
170
162
|
log.info(f"{plugin_name}.compute() completed ({compute_duration} ms)")
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
f"plugins.protocol_duration_ms,{statsd_tags}",
|
|
163
|
+
statsd_client.timing(
|
|
164
|
+
"plugins.protocol_duration_ms",
|
|
174
165
|
delta=compute_duration,
|
|
166
|
+
tags={"plugin": plugin_name},
|
|
175
167
|
)
|
|
176
168
|
except Exception as e:
|
|
177
169
|
for error_line_with_newlines in traceback.format_exception(e):
|
|
@@ -186,10 +178,8 @@ class PluginRunner(PluginRunnerServicer):
|
|
|
186
178
|
# Don't log anything if a plugin handler didn't actually run.
|
|
187
179
|
if relevant_plugins:
|
|
188
180
|
log.info(f"Responded to Event {event_name} ({event_duration} ms)")
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
f"plugins.event_duration_ms,{statsd_tags}",
|
|
192
|
-
delta=event_duration,
|
|
181
|
+
statsd_client.timing(
|
|
182
|
+
"plugins.event_duration_ms", delta=event_duration, tags={"event": event_name}
|
|
193
183
|
)
|
|
194
184
|
|
|
195
185
|
yield EventResponse(success=True, effects=effect_list)
|
|
@@ -207,19 +197,21 @@ class PluginRunner(PluginRunnerServicer):
|
|
|
207
197
|
yield ReloadPluginsResponse(success=True)
|
|
208
198
|
|
|
209
199
|
|
|
210
|
-
async def synchronize_plugins(
|
|
211
|
-
"""
|
|
200
|
+
async def synchronize_plugins(run_once: bool = False) -> None:
|
|
201
|
+
"""
|
|
202
|
+
Listen for messages on the pubsub channel that will indicate it is
|
|
203
|
+
necessary to reinstall and reload plugins.
|
|
204
|
+
"""
|
|
205
|
+
log.info(f'synchronize_plugins: listening for messages on pubsub channel "{CHANNEL_NAME}"')
|
|
206
|
+
|
|
212
207
|
client, pubsub = get_client()
|
|
213
208
|
await pubsub.psubscribe(CHANNEL_NAME)
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
while (
|
|
217
|
-
max_iterations is None or iterations < max_iterations
|
|
218
|
-
): # max_iterations == -1 means infinite iterations
|
|
219
|
-
iterations += 1
|
|
209
|
+
|
|
210
|
+
while True:
|
|
220
211
|
message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=None)
|
|
212
|
+
|
|
221
213
|
if message is not None:
|
|
222
|
-
log.info(
|
|
214
|
+
log.info(f'synchronize_plugins: received message from pubsub channel "{CHANNEL_NAME}"')
|
|
223
215
|
|
|
224
216
|
message_type = message.get("type", "")
|
|
225
217
|
|
|
@@ -232,19 +224,44 @@ async def synchronize_plugins(max_iterations: None | int = None) -> None:
|
|
|
232
224
|
continue
|
|
233
225
|
|
|
234
226
|
if data["action"] == "reload":
|
|
227
|
+
log.info("synchronize_plugins: installing and reloading plugins for action=reload")
|
|
228
|
+
|
|
235
229
|
try:
|
|
236
|
-
log.info(
|
|
237
|
-
"plugin-synchronizer: installing and reloading plugins after receiving command"
|
|
238
|
-
)
|
|
239
230
|
install_plugins()
|
|
231
|
+
except Exception as e:
|
|
232
|
+
# TODO capture_exception when Sentry is installed
|
|
233
|
+
log.error(f"synchronize_plugins: install_plugins failed: {e}")
|
|
234
|
+
|
|
235
|
+
try:
|
|
240
236
|
load_plugins()
|
|
241
237
|
except Exception as e:
|
|
242
|
-
|
|
238
|
+
# TODO capture_exception when Sentry is installed
|
|
239
|
+
log.error(f"synchronize_plugins: load_plugins failed: {e}")
|
|
240
|
+
if run_once:
|
|
241
|
+
break
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
async def synchronize_plugins_and_report_errors() -> None:
|
|
245
|
+
"""
|
|
246
|
+
Run synchronize_plugins() in perpetuity and report any encountered errors.
|
|
247
|
+
"""
|
|
248
|
+
log.info("Starting synchronize_plugins loop...")
|
|
249
|
+
|
|
250
|
+
while True:
|
|
251
|
+
try:
|
|
252
|
+
await synchronize_plugins()
|
|
253
|
+
except Exception as e:
|
|
254
|
+
log.error(f"synchronize_plugins error: {e}")
|
|
255
|
+
|
|
256
|
+
# don't crush redis if we're retrying in a tight loop
|
|
257
|
+
await asyncio.sleep(0.5)
|
|
243
258
|
|
|
244
259
|
|
|
245
260
|
def validate_effects(effects: list[Effect]) -> list[Effect]:
|
|
246
261
|
"""Validates the effects based on predefined rules.
|
|
247
|
-
|
|
262
|
+
|
|
263
|
+
Keeps only the first AUTOCOMPLETE_SEARCH_RESULTS effect and preserve all
|
|
264
|
+
non-search-related effects.
|
|
248
265
|
"""
|
|
249
266
|
seen_autocomplete = False
|
|
250
267
|
validated_effects = []
|
|
@@ -254,7 +271,9 @@ def validate_effects(effects: list[Effect]) -> list[Effect]:
|
|
|
254
271
|
if seen_autocomplete:
|
|
255
272
|
log.warning("Discarding additional AUTOCOMPLETE_SEARCH_RESULTS effect.")
|
|
256
273
|
continue
|
|
274
|
+
|
|
257
275
|
seen_autocomplete = True
|
|
276
|
+
|
|
258
277
|
validated_effects.append(effect)
|
|
259
278
|
|
|
260
279
|
return validated_effects
|
|
@@ -311,7 +330,7 @@ def sandbox_from_module(base_path: pathlib.Path, module_name: str) -> Any:
|
|
|
311
330
|
|
|
312
331
|
async def publish_message(message: dict) -> None:
|
|
313
332
|
"""Publish a message to the pubsub channel."""
|
|
314
|
-
log.info(
|
|
333
|
+
log.info(f'Publishing message to pubsub channel "{CHANNEL_NAME}"')
|
|
315
334
|
client, _ = get_client()
|
|
316
335
|
|
|
317
336
|
await client.publish(CHANNEL_NAME, pickle.dumps(message))
|
|
@@ -327,7 +346,7 @@ def get_client() -> tuple[redis.Redis, redis.client.PubSub]:
|
|
|
327
346
|
|
|
328
347
|
def load_or_reload_plugin(path: pathlib.Path) -> None:
|
|
329
348
|
"""Given a path, load or reload a plugin."""
|
|
330
|
-
log.info(f
|
|
349
|
+
log.info(f'Loading plugin at "{path}"')
|
|
331
350
|
|
|
332
351
|
manifest_file = path / MANIFEST_FILE_NAME
|
|
333
352
|
manifest_json_str = manifest_file.read_text()
|
|
@@ -366,7 +385,7 @@ def load_or_reload_plugin(path: pathlib.Path) -> None:
|
|
|
366
385
|
handler_module, handler_class = handler["class"].split(":")
|
|
367
386
|
name_and_class = f"{name}:{handler_module}:{handler_class}"
|
|
368
387
|
except ValueError:
|
|
369
|
-
log.error(f
|
|
388
|
+
log.error(f'Unable to parse class for plugin "{name}": "{handler["class"]}"')
|
|
370
389
|
continue
|
|
371
390
|
|
|
372
391
|
try:
|
|
@@ -391,7 +410,8 @@ def load_or_reload_plugin(path: pathlib.Path) -> None:
|
|
|
391
410
|
"secrets": secrets_json,
|
|
392
411
|
}
|
|
393
412
|
except Exception as err:
|
|
394
|
-
log.error(f
|
|
413
|
+
log.error(f'Error importing module "{name_and_class}": {err}')
|
|
414
|
+
|
|
395
415
|
for error_line in traceback.format_exception(err):
|
|
396
416
|
log.error(error_line)
|
|
397
417
|
|
|
@@ -438,9 +458,6 @@ def load_plugins(specified_plugin_paths: list[str] | None = None) -> None:
|
|
|
438
458
|
# get all directories under the plugin directory
|
|
439
459
|
plugin_paths = [path for path in plugin_paths if path.is_dir()]
|
|
440
460
|
|
|
441
|
-
# filter to only the directories containing a manifest file
|
|
442
|
-
plugin_paths = [path for path in plugin_paths if (path / MANIFEST_FILE_NAME).exists()]
|
|
443
|
-
|
|
444
461
|
# load or reload each plugin
|
|
445
462
|
for plugin_path in plugin_paths:
|
|
446
463
|
load_or_reload_plugin(plugin_path)
|
|
@@ -452,6 +469,15 @@ def load_plugins(specified_plugin_paths: list[str] | None = None) -> None:
|
|
|
452
469
|
|
|
453
470
|
refresh_event_type_map()
|
|
454
471
|
|
|
472
|
+
log_nr_event_handlers()
|
|
473
|
+
|
|
474
|
+
|
|
475
|
+
def log_nr_event_handlers() -> None:
|
|
476
|
+
"""Log the number of event handlers for each event."""
|
|
477
|
+
for key in EventType.keys(): # noqa: SIM118
|
|
478
|
+
value = len(EVENT_HANDLER_MAP[key]) if key in EVENT_HANDLER_MAP else 0
|
|
479
|
+
statsd_client.timing("plugins.event_nr_handlers", value, tags={"event": key})
|
|
480
|
+
|
|
455
481
|
|
|
456
482
|
_cleanup_coroutines = []
|
|
457
483
|
|
|
@@ -481,6 +507,7 @@ async def serve(specified_plugin_paths: list[str] | None = None) -> None:
|
|
|
481
507
|
await server.wait_for_termination()
|
|
482
508
|
|
|
483
509
|
|
|
510
|
+
# NOTE: specified_plugin_paths powers the `canvas run-plugins` command
|
|
484
511
|
def run_server(specified_plugin_paths: list[str] | None = None) -> None:
|
|
485
512
|
"""Run the server."""
|
|
486
513
|
loop = asyncio.new_event_loop()
|
|
@@ -489,7 +516,10 @@ def run_server(specified_plugin_paths: list[str] | None = None) -> None:
|
|
|
489
516
|
|
|
490
517
|
try:
|
|
491
518
|
loop.run_until_complete(
|
|
492
|
-
asyncio.gather(
|
|
519
|
+
asyncio.gather(
|
|
520
|
+
serve(specified_plugin_paths),
|
|
521
|
+
synchronize_plugins_and_report_errors(),
|
|
522
|
+
)
|
|
493
523
|
)
|
|
494
524
|
except KeyboardInterrupt:
|
|
495
525
|
pass
|
|
@@ -6,7 +6,7 @@ from unittest.mock import MagicMock, patch
|
|
|
6
6
|
|
|
7
7
|
from pytest_mock import MockerFixture
|
|
8
8
|
|
|
9
|
-
from plugin_runner.
|
|
9
|
+
from plugin_runner.installation import (
|
|
10
10
|
PluginAttributes,
|
|
11
11
|
_extract_rows_to_dict,
|
|
12
12
|
download_plugin,
|
|
@@ -82,7 +82,7 @@ def test_plugin_installation_from_tarball(mocker: MockerFixture) -> None:
|
|
|
82
82
|
tarball_1 = _create_tarball("plugin1")
|
|
83
83
|
tarball_2 = _create_tarball("plugin2")
|
|
84
84
|
|
|
85
|
-
mocker.patch("plugin_runner.
|
|
85
|
+
mocker.patch("plugin_runner.installation.enabled_plugins", return_value=mock_plugins)
|
|
86
86
|
|
|
87
87
|
def mock_download_plugin(package: str) -> MagicMock:
|
|
88
88
|
mock_context = mocker.Mock()
|
|
@@ -94,7 +94,7 @@ def test_plugin_installation_from_tarball(mocker: MockerFixture) -> None:
|
|
|
94
94
|
return mock_context
|
|
95
95
|
|
|
96
96
|
mocker.patch(
|
|
97
|
-
"plugin_runner.
|
|
97
|
+
"plugin_runner.installation.download_plugin",
|
|
98
98
|
side_effect=mock_download_plugin,
|
|
99
99
|
)
|
|
100
100
|
|
|
@@ -271,7 +271,7 @@ async def test_synchronize_plugins_calls_install_and_load_plugins() -> None:
|
|
|
271
271
|
"data": pickle.dumps({"action": "reload"}),
|
|
272
272
|
}
|
|
273
273
|
|
|
274
|
-
task = asyncio.create_task(synchronize_plugins(
|
|
274
|
+
task = asyncio.create_task(synchronize_plugins(run_once=True))
|
|
275
275
|
await asyncio.sleep(0.1) # Give some time for the coroutine to run
|
|
276
276
|
task.cancel()
|
|
277
277
|
|