corva-worker-python 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- corva_worker_python-2.0.0.dist-info/METADATA +30 -0
- corva_worker_python-2.0.0.dist-info/RECORD +63 -0
- corva_worker_python-2.0.0.dist-info/WHEEL +5 -0
- corva_worker_python-2.0.0.dist-info/top_level.txt +1 -0
- worker/__init__.py +5 -0
- worker/app/__init__.py +291 -0
- worker/app/modules/__init__.py +265 -0
- worker/app/modules/activity_module.py +141 -0
- worker/app/modules/connection_module.py +21 -0
- worker/app/modules/depth_activity_module.py +21 -0
- worker/app/modules/scheduler.py +44 -0
- worker/app/modules/time_activity_module.py +21 -0
- worker/app/modules/trigger.py +43 -0
- worker/constants.py +51 -0
- worker/data/__init__.py +0 -0
- worker/data/activity/__init__.py +132 -0
- worker/data/activity/activity_grouping.py +242 -0
- worker/data/alert.py +89 -0
- worker/data/api.py +155 -0
- worker/data/enums.py +141 -0
- worker/data/json_encoder.py +18 -0
- worker/data/math.py +104 -0
- worker/data/operations.py +477 -0
- worker/data/serialization.py +110 -0
- worker/data/task_handler.py +82 -0
- worker/data/two_way_dict.py +17 -0
- worker/data/unit_conversions.py +5 -0
- worker/data/wits.py +323 -0
- worker/event/__init__.py +53 -0
- worker/event/event_handler.py +90 -0
- worker/event/scheduled.py +64 -0
- worker/event/stream.py +48 -0
- worker/exceptions.py +26 -0
- worker/mixins/__init__.py +0 -0
- worker/mixins/logging.py +119 -0
- worker/mixins/rollbar.py +87 -0
- worker/partial_rerun_merge/__init__.py +0 -0
- worker/partial_rerun_merge/merge.py +500 -0
- worker/partial_rerun_merge/models.py +91 -0
- worker/partial_rerun_merge/progress.py +241 -0
- worker/state/__init__.py +96 -0
- worker/state/mixins.py +111 -0
- worker/state/state.py +46 -0
- worker/test/__init__.py +3 -0
- worker/test/lambda_function_test_run.py +196 -0
- worker/test/local_testing/__init__.py +0 -0
- worker/test/local_testing/to_local_transfer.py +360 -0
- worker/test/utils.py +51 -0
- worker/wellbore/__init__.py +0 -0
- worker/wellbore/factory.py +496 -0
- worker/wellbore/measured_depth_finder.py +12 -0
- worker/wellbore/model/__init__.py +0 -0
- worker/wellbore/model/ann.py +103 -0
- worker/wellbore/model/annulus.py +113 -0
- worker/wellbore/model/drillstring.py +196 -0
- worker/wellbore/model/drillstring_components.py +439 -0
- worker/wellbore/model/element.py +102 -0
- worker/wellbore/model/enums.py +92 -0
- worker/wellbore/model/hole.py +297 -0
- worker/wellbore/model/hole_section.py +51 -0
- worker/wellbore/model/riser.py +22 -0
- worker/wellbore/sections_mixin.py +64 -0
- worker/wellbore/wellbore.py +289 -0
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
import itertools
|
|
2
|
+
from collections import OrderedDict
|
|
3
|
+
from typing import List, Union
|
|
4
|
+
|
|
5
|
+
from worker.data import operations
|
|
6
|
+
from worker.data.activity import Activity
|
|
7
|
+
from worker.data.math import split_zip_edges
|
|
8
|
+
from worker.data.serialization import serialization
|
|
9
|
+
from worker.data.wits import WITS
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@serialization
|
|
13
|
+
class ActivityGroup:
|
|
14
|
+
SERIALIZED_VARIABLES = {"activity": Activity, "start": int, "end": int}
|
|
15
|
+
|
|
16
|
+
def __init__(self, activity: Activity, start: int, end: int, **kwargs):
|
|
17
|
+
self.activity = activity
|
|
18
|
+
if end >= start:
|
|
19
|
+
self.start = start
|
|
20
|
+
self.end = end
|
|
21
|
+
|
|
22
|
+
@property
|
|
23
|
+
def duration(self) -> int:
|
|
24
|
+
"""
|
|
25
|
+
:return: duration in seconds
|
|
26
|
+
"""
|
|
27
|
+
return self.end - self.start + 1
|
|
28
|
+
|
|
29
|
+
@classmethod
|
|
30
|
+
def merge(
|
|
31
|
+
cls,
|
|
32
|
+
grp1: "ActivityGroup",
|
|
33
|
+
grp2: "ActivityGroup",
|
|
34
|
+
max_duration_gap: int = None,
|
|
35
|
+
resultant_activity: Activity = None,
|
|
36
|
+
) -> Union["ActivityGroup", None]:
|
|
37
|
+
"""
|
|
38
|
+
Merging two groups of activities into one.
|
|
39
|
+
:param grp1:
|
|
40
|
+
:param grp2:
|
|
41
|
+
:param max_duration_gap: allowed gap between groups
|
|
42
|
+
:param resultant_activity: if provided it will be used as the output activity, otherwise the activity
|
|
43
|
+
of both groups are checked and if they are only the same it will continue the merging
|
|
44
|
+
:return:
|
|
45
|
+
"""
|
|
46
|
+
min_start = min(grp1.start, grp2.start)
|
|
47
|
+
max_end = max(grp1.end, grp2.end)
|
|
48
|
+
|
|
49
|
+
if max_duration_gap is not None:
|
|
50
|
+
max_start = max(grp1.start, grp2.start)
|
|
51
|
+
min_end = min(grp1.end, grp2.end)
|
|
52
|
+
|
|
53
|
+
# if the gap between two activities are greater than the threshold, return None
|
|
54
|
+
if max_start - min_end > max_duration_gap:
|
|
55
|
+
return None
|
|
56
|
+
|
|
57
|
+
if not resultant_activity:
|
|
58
|
+
if grp1.activity != grp2.activity:
|
|
59
|
+
return None
|
|
60
|
+
resultant_activity = grp1.activity
|
|
61
|
+
|
|
62
|
+
return ActivityGroup(resultant_activity, min_start, max_end)
|
|
63
|
+
|
|
64
|
+
@staticmethod
|
|
65
|
+
def has_overlap(group1: "ActivityGroup", group2: "ActivityGroup") -> bool:
|
|
66
|
+
"""
|
|
67
|
+
Checking if two activity groups has overlap regardless of their precedence
|
|
68
|
+
:param group1:
|
|
69
|
+
:param group2:
|
|
70
|
+
:return:
|
|
71
|
+
"""
|
|
72
|
+
if not all([group1, group2]):
|
|
73
|
+
return False
|
|
74
|
+
|
|
75
|
+
max_start = max(group1.start, group2.start)
|
|
76
|
+
min_end = min(group1.end, group2.end)
|
|
77
|
+
|
|
78
|
+
return max_start <= min_end
|
|
79
|
+
|
|
80
|
+
def __eq__(self, other):
|
|
81
|
+
if not isinstance(other, ActivityGroup):
|
|
82
|
+
return False
|
|
83
|
+
|
|
84
|
+
return operations.equal(self, other, list(self.SERIALIZED_VARIABLES.keys()))
|
|
85
|
+
|
|
86
|
+
def __repr__(self):
|
|
87
|
+
return f"{self.activity.value:<25}: {self.start}-{self.end} -> duration={self.duration:>4}"
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
"""
|
|
91
|
+
In 1-second data frequency sometimes some activities will be added that can be
|
|
92
|
+
part of the bounding activities. For instance, you might be Reaming Down and
|
|
93
|
+
for a second the bit depth is the same as the previous timestamp and the activity
|
|
94
|
+
is Rotary Off Bottom. This will then be grouped into Reaming Down group.
|
|
95
|
+
"""
|
|
96
|
+
NEUTRAL_ACTIVITIES = OrderedDict(
|
|
97
|
+
[
|
|
98
|
+
(Activity.RUN_IN_HOLE, [Activity.STATIC_OFF_BOTTOM]),
|
|
99
|
+
(Activity.PULL_OUT_OF_HOLE, [Activity.STATIC_OFF_BOTTOM]),
|
|
100
|
+
(Activity.WASHING_DOWN, [Activity.CIRCULATING]),
|
|
101
|
+
(Activity.WASHING_UP, [Activity.CIRCULATING]),
|
|
102
|
+
(Activity.DRY_REAMING_DOWN, [Activity.DRY_ROTARY_OFF_BOTTOM]),
|
|
103
|
+
(Activity.DRY_REAMING_UP, [Activity.DRY_ROTARY_OFF_BOTTOM]),
|
|
104
|
+
(Activity.REAMING_DOWN, [Activity.ROTARY_OFF_BOTTOM]),
|
|
105
|
+
(Activity.REAMING_UP, [Activity.ROTARY_OFF_BOTTOM]),
|
|
106
|
+
(Activity.STATIC_OFF_BOTTOM, [Activity.RUN_IN_HOLE, Activity.PULL_OUT_OF_HOLE]),
|
|
107
|
+
(Activity.CIRCULATING, [Activity.WASHING_DOWN, Activity.WASHING_UP]),
|
|
108
|
+
(Activity.DRY_ROTARY_OFF_BOTTOM, [Activity.DRY_REAMING_DOWN, Activity.DRY_REAMING_UP]),
|
|
109
|
+
(Activity.ROTARY_OFF_BOTTOM, [Activity.REAMING_DOWN, Activity.REAMING_UP]),
|
|
110
|
+
]
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
class ActivityGrouping:
|
|
115
|
+
def __init__(
|
|
116
|
+
self,
|
|
117
|
+
min_group_duration: int = 3,
|
|
118
|
+
apply_neutral_grouping: bool = True,
|
|
119
|
+
apply_gap_filling: bool = True,
|
|
120
|
+
merging_dict: dict = None,
|
|
121
|
+
time_step=None,
|
|
122
|
+
):
|
|
123
|
+
"""
|
|
124
|
+
:param min_group_duration: minimum duration of each group
|
|
125
|
+
:param apply_neutral_grouping:
|
|
126
|
+
:param apply_gap_filling:
|
|
127
|
+
:param merging_dict: A dictionary representing the activities to be merged. For instance in the following
|
|
128
|
+
example the Pull Out of Hole, Washing Up and Reaming Up are merged into one group and the activity of the
|
|
129
|
+
whole group is PULL_OUT_OF_HOLE.
|
|
130
|
+
{
|
|
131
|
+
Activity.PULL_OUT_OF_HOLE : [Activity.PULL_OUT_OF_HOLE, Activity.WASHING_UP, Activity.REAMING_UP],
|
|
132
|
+
...
|
|
133
|
+
}
|
|
134
|
+
"""
|
|
135
|
+
self.min_group_duration = min_group_duration
|
|
136
|
+
self.apply_neutral_grouping = apply_neutral_grouping
|
|
137
|
+
self.apply_gap_filling = apply_gap_filling
|
|
138
|
+
self.merging_dict = merging_dict
|
|
139
|
+
self.time_step = time_step
|
|
140
|
+
|
|
141
|
+
def group(self, wits_records: List[WITS]) -> List[ActivityGroup]:
|
|
142
|
+
"""
|
|
143
|
+
to categorize a stream of wits data into groups of similar activities
|
|
144
|
+
:param wits_records: wits records
|
|
145
|
+
:return:
|
|
146
|
+
"""
|
|
147
|
+
timestamps = [wits.timestamp for wits in wits_records]
|
|
148
|
+
activities = [wits.state for wits in wits_records]
|
|
149
|
+
|
|
150
|
+
if self.apply_neutral_grouping:
|
|
151
|
+
activities = perform_neutral_grouping(activities)
|
|
152
|
+
|
|
153
|
+
if not self.time_step:
|
|
154
|
+
self.time_step = operations.compute_time_step(timestamps, percent=95)
|
|
155
|
+
|
|
156
|
+
unique_activities = unique_everseen(activities)
|
|
157
|
+
groups = []
|
|
158
|
+
for activity in unique_activities:
|
|
159
|
+
# Finding timestamps of matching activities
|
|
160
|
+
valid_timestamps = [timestamps[i] for i in range(len(timestamps)) if activities[i] == activity]
|
|
161
|
+
# Grouping and finding the edges of the activities
|
|
162
|
+
edges = split_zip_edges(
|
|
163
|
+
valid_timestamps, separation_length=self.time_step, min_segment_length=self.min_group_duration
|
|
164
|
+
)
|
|
165
|
+
this_groups = [ActivityGroup(activity=activity, start=start, end=end) for (start, end) in edges]
|
|
166
|
+
groups.extend(this_groups)
|
|
167
|
+
|
|
168
|
+
groups.sort(key=lambda x: x.start)
|
|
169
|
+
|
|
170
|
+
if self.merging_dict:
|
|
171
|
+
# 1. mapping the activities to their merging activities
|
|
172
|
+
for grp in groups:
|
|
173
|
+
grp.activity = self.merge_activity_map(grp.activity)
|
|
174
|
+
|
|
175
|
+
# 2. merging adjacent activities with similar merging activities
|
|
176
|
+
index = 1
|
|
177
|
+
while index < len(groups):
|
|
178
|
+
grp1, grp2 = groups[index - 1 : index + 1]
|
|
179
|
+
merged_group = ActivityGroup.merge(grp1, grp2, max_duration_gap=max(self.time_step, 2))
|
|
180
|
+
if not merged_group:
|
|
181
|
+
index += 1
|
|
182
|
+
continue
|
|
183
|
+
# update one and remove the next
|
|
184
|
+
groups[index - 1] = merged_group
|
|
185
|
+
groups.pop(index)
|
|
186
|
+
|
|
187
|
+
if self.apply_gap_filling:
|
|
188
|
+
groups = perform_gap_filling(groups)
|
|
189
|
+
|
|
190
|
+
return groups
|
|
191
|
+
|
|
192
|
+
def merge_activity_map(self, activity):
|
|
193
|
+
"""
|
|
194
|
+
Map the constituent activity to the group activity
|
|
195
|
+
:param activity:
|
|
196
|
+
:return:
|
|
197
|
+
"""
|
|
198
|
+
if not self.merging_dict:
|
|
199
|
+
return activity
|
|
200
|
+
|
|
201
|
+
for merging_group, constituent_activities in self.merging_dict.items():
|
|
202
|
+
if activity in constituent_activities:
|
|
203
|
+
return merging_group
|
|
204
|
+
|
|
205
|
+
return activity
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
def perform_neutral_grouping(activities: List[Activity]):
|
|
209
|
+
for bound_activity, neutral_activities in NEUTRAL_ACTIVITIES.items():
|
|
210
|
+
for i in range(1, len(activities) - 1):
|
|
211
|
+
if bound_activity == activities[i - 1] == activities[i + 1] and activities[i] in neutral_activities:
|
|
212
|
+
activities[i] = bound_activity
|
|
213
|
+
return activities
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def perform_gap_filling(groups: List[ActivityGroup]):
|
|
217
|
+
# TODO something to consider
|
|
218
|
+
return groups
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def unique_everseen(iterable, key=None):
|
|
222
|
+
"""
|
|
223
|
+
from: https://docs.python.org/3.3/library/itertools.html
|
|
224
|
+
List unique elements, preserving order. Remember all elements ever seen.
|
|
225
|
+
:param iterable:
|
|
226
|
+
:param key:
|
|
227
|
+
:return:
|
|
228
|
+
Example:
|
|
229
|
+
unique_everseen('AAAABBBCCDAABBB') --> A B C D
|
|
230
|
+
unique_everseen('ABBCcAD', str.lower) --> A B C D
|
|
231
|
+
"""
|
|
232
|
+
seen = set()
|
|
233
|
+
if key is None:
|
|
234
|
+
for element in itertools.filterfalse(seen.__contains__, iterable):
|
|
235
|
+
seen.add(element)
|
|
236
|
+
yield element
|
|
237
|
+
else:
|
|
238
|
+
for element in iterable:
|
|
239
|
+
k = key(element)
|
|
240
|
+
if k not in seen:
|
|
241
|
+
seen.add(k)
|
|
242
|
+
yield element
|
worker/data/alert.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import simplejson as json
|
|
2
|
+
|
|
3
|
+
from worker import API
|
|
4
|
+
from worker.data.json_encoder import JsonEncoder
|
|
5
|
+
from worker.mixins.logging import LoggingMixin
|
|
6
|
+
from worker.mixins.rollbar import RollbarMixin
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Alert(LoggingMixin, RollbarMixin):
|
|
10
|
+
"""
|
|
11
|
+
Alert class can be used to trigger alerts from a data app.
|
|
12
|
+
An event based alert has to be created on the Corva UI with a unique identifier.
|
|
13
|
+
|
|
14
|
+
How to use:
|
|
15
|
+
1. Create an instance of Alert with the same unique identifier and asset_id.
|
|
16
|
+
2. Call trigger_alert and pass the required parameters to trigger the alert.
|
|
17
|
+
3. To trigger an alert on Corva, the well must be active and visible to the user. (Current rules)
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
def __init__(self, asset_id: int, identifier: str, last_trigger_timestamp=None, *args, **kwargs):
|
|
21
|
+
self.asset_id = asset_id
|
|
22
|
+
self.identifier = identifier
|
|
23
|
+
self.last_trigger_timestamp = last_trigger_timestamp
|
|
24
|
+
|
|
25
|
+
super().__init__(*args, **kwargs)
|
|
26
|
+
|
|
27
|
+
def trigger_alert(self, timestamp: int, timestamp_read: int, context: dict):
|
|
28
|
+
"""
|
|
29
|
+
Function used to trigger the alert for the unique identifier.
|
|
30
|
+
|
|
31
|
+
:param timestamp: timestamp at which the event for the alert is detected
|
|
32
|
+
:param timestamp_read: timestamp at which the wits record was read by the source app
|
|
33
|
+
:param context: A dict of data to be sent to the alerts engine
|
|
34
|
+
:return:
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
context = context or {}
|
|
38
|
+
context.update(
|
|
39
|
+
{
|
|
40
|
+
"identifier": self.identifier,
|
|
41
|
+
"asset_id": self.asset_id,
|
|
42
|
+
"timestamp": timestamp,
|
|
43
|
+
"timestamp_read": timestamp_read,
|
|
44
|
+
}
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
api = API()
|
|
48
|
+
try:
|
|
49
|
+
trigger = api.post(
|
|
50
|
+
"/v1/alerts/definitions/trigger/", data=json.dumps(context, cls=JsonEncoder, ignore_nan=True)
|
|
51
|
+
).data
|
|
52
|
+
self.debug(self.asset_id, f"Triggered alert with context -> {context} and received response {trigger}")
|
|
53
|
+
self.last_trigger_timestamp = timestamp
|
|
54
|
+
|
|
55
|
+
return trigger
|
|
56
|
+
except Exception as ex:
|
|
57
|
+
message = f"Error while triggering alert for context {context} with exception {ex}"
|
|
58
|
+
self.debug(self.asset_id, f"Failed to trigger alert with context -> {context}")
|
|
59
|
+
self.track_error(message)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class Alerter:
|
|
63
|
+
alerter = None
|
|
64
|
+
asset_id = None
|
|
65
|
+
|
|
66
|
+
@classmethod
|
|
67
|
+
def set_asset_id(cls, asset_id):
|
|
68
|
+
cls.asset_id = asset_id
|
|
69
|
+
|
|
70
|
+
@classmethod
|
|
71
|
+
def get_alerter(cls):
|
|
72
|
+
if cls.alerter is None:
|
|
73
|
+
cls.alerter = Alert(0, "")
|
|
74
|
+
|
|
75
|
+
return cls.alerter
|
|
76
|
+
|
|
77
|
+
@classmethod
|
|
78
|
+
def trigger_alert(
|
|
79
|
+
cls, identifier: str, message: str, timestamp: int = None, timestamp_read: int = None, context: dict = None
|
|
80
|
+
):
|
|
81
|
+
if not cls.asset_id:
|
|
82
|
+
return
|
|
83
|
+
|
|
84
|
+
context = context or {}
|
|
85
|
+
context.update({"message": message})
|
|
86
|
+
|
|
87
|
+
cls.get_alerter().asset_id = cls.asset_id
|
|
88
|
+
cls.get_alerter().identifier = identifier
|
|
89
|
+
cls.get_alerter().trigger_alert(timestamp, timestamp_read, context)
|
worker/data/api.py
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
import requests
|
|
4
|
+
from requests.adapters import HTTPAdapter
|
|
5
|
+
from urllib3 import Retry
|
|
6
|
+
|
|
7
|
+
from worker import exceptions
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class API(object):
|
|
11
|
+
HTTP_METHODS = ("get", "post", "patch", "put", "delete")
|
|
12
|
+
options = {"api_url": "API_ROOT_URL", "api_key": "API_KEY", "app_name": "APP_NAME"}
|
|
13
|
+
|
|
14
|
+
def __init__(self, *args, **kwargs):
|
|
15
|
+
self.configure(self.options, kwargs)
|
|
16
|
+
|
|
17
|
+
def configure(self, options, values):
|
|
18
|
+
for attribute, environment_key in options.items():
|
|
19
|
+
value = values.pop(attribute, os.getenv(environment_key, None))
|
|
20
|
+
if not value:
|
|
21
|
+
error = "No {0} parameter or {1} environment variable defined".format(attribute, environment_key)
|
|
22
|
+
raise exceptions.Misconfigured(error)
|
|
23
|
+
|
|
24
|
+
setattr(self, attribute, value)
|
|
25
|
+
|
|
26
|
+
def get(self, *args, **kwargs):
|
|
27
|
+
return self.call("get", *args, **kwargs)
|
|
28
|
+
|
|
29
|
+
def post(self, *args, **kwargs):
|
|
30
|
+
return self.call("post", *args, **kwargs)
|
|
31
|
+
|
|
32
|
+
def patch(self, *args, **kwargs):
|
|
33
|
+
return self.call("patch", *args, **kwargs)
|
|
34
|
+
|
|
35
|
+
def put(self, *args, **kwargs):
|
|
36
|
+
return self.call("put", *args, **kwargs)
|
|
37
|
+
|
|
38
|
+
def delete(self, *args, **kwargs):
|
|
39
|
+
return self.call("delete", *args, **kwargs)
|
|
40
|
+
|
|
41
|
+
def call(self, method, path, **kwargs):
|
|
42
|
+
content_type = kwargs.pop("content_type", "application/json")
|
|
43
|
+
headers = {
|
|
44
|
+
"Authorization": "API {0}".format(self.api_key),
|
|
45
|
+
"Content-Type": content_type,
|
|
46
|
+
"X-Corva-App": self.app_name,
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
method = method.lower()
|
|
50
|
+
if method not in self.HTTP_METHODS:
|
|
51
|
+
raise exceptions.APIError("Invalid HTTP method {0}".format(method))
|
|
52
|
+
|
|
53
|
+
retry_count = kwargs.pop("retry_count", 5)
|
|
54
|
+
retry_strategy = Retry(
|
|
55
|
+
total=retry_count,
|
|
56
|
+
status_forcelist=[408, 429, 500, 502, 503, 504],
|
|
57
|
+
allowed_methods=["GET", "POST", "PATCH", "PUT", "DELETE"],
|
|
58
|
+
backoff_factor=0.3,
|
|
59
|
+
raise_on_status=False,
|
|
60
|
+
)
|
|
61
|
+
adapter = HTTPAdapter(max_retries=retry_strategy)
|
|
62
|
+
http = requests.Session()
|
|
63
|
+
http.mount("http://", adapter)
|
|
64
|
+
http.mount("https://", adapter)
|
|
65
|
+
http_method = getattr(http, method)
|
|
66
|
+
|
|
67
|
+
data = kwargs.pop("data", None)
|
|
68
|
+
if not path.startswith(self.api_url):
|
|
69
|
+
path = "{0}{1}".format(self.api_url, path)
|
|
70
|
+
|
|
71
|
+
response = http_method(url=path, data=data, params=kwargs, headers=headers)
|
|
72
|
+
|
|
73
|
+
# closing the http connection after a request
|
|
74
|
+
http.close()
|
|
75
|
+
|
|
76
|
+
asset_id = kwargs.get("asset_id", "Unknown") or "Unknown"
|
|
77
|
+
|
|
78
|
+
if response.status_code == 401:
|
|
79
|
+
raise exceptions.APIError("Unable to reach Corva API")
|
|
80
|
+
|
|
81
|
+
if response.status_code == 403:
|
|
82
|
+
raise exceptions.Forbidden("No access to asset {0}".format(asset_id))
|
|
83
|
+
|
|
84
|
+
if response.status_code == 404:
|
|
85
|
+
raise exceptions.NotFound("Not Found")
|
|
86
|
+
|
|
87
|
+
if not response.ok:
|
|
88
|
+
raise exceptions.APIError(f"{response.status_code} - {response.reason}")
|
|
89
|
+
|
|
90
|
+
result = Result(response, **kwargs)
|
|
91
|
+
return result
|
|
92
|
+
|
|
93
|
+
def get_by_id(self, path, **kwargs):
|
|
94
|
+
"""
|
|
95
|
+
Get a document with the given kwargs ('collection', 'id')
|
|
96
|
+
:param path: API path
|
|
97
|
+
:param kwargs: 'collection' and 'id' kwargs are required
|
|
98
|
+
:return: a result object
|
|
99
|
+
"""
|
|
100
|
+
collection = kwargs.pop("collection", None)
|
|
101
|
+
component_id = kwargs.pop("id", None)
|
|
102
|
+
path = "{0}{1}{2}/{3}".format(self.api_url, path, collection, component_id)
|
|
103
|
+
|
|
104
|
+
result = self.get(path)
|
|
105
|
+
return result
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class Result(object):
|
|
109
|
+
def __init__(self, response, **kwargs):
|
|
110
|
+
self.response = response
|
|
111
|
+
self.params = kwargs
|
|
112
|
+
self.data = None
|
|
113
|
+
|
|
114
|
+
try:
|
|
115
|
+
self.data = response.json()
|
|
116
|
+
except Exception:
|
|
117
|
+
if not (response.status_code == 200 and response.content == b""):
|
|
118
|
+
raise exceptions.APIError("Invalid API response")
|
|
119
|
+
|
|
120
|
+
def __repr__(self):
|
|
121
|
+
return repr(self.data)
|
|
122
|
+
|
|
123
|
+
def __iter__(self):
|
|
124
|
+
return iter(self.data)
|
|
125
|
+
|
|
126
|
+
@property
|
|
127
|
+
def status(self):
|
|
128
|
+
return self.response.status_code
|
|
129
|
+
|
|
130
|
+
@property
|
|
131
|
+
def count(self):
|
|
132
|
+
if not self.data:
|
|
133
|
+
return 0
|
|
134
|
+
|
|
135
|
+
if isinstance(self.data, list):
|
|
136
|
+
return len(self.data)
|
|
137
|
+
|
|
138
|
+
if isinstance(self.data, dict):
|
|
139
|
+
return 1
|
|
140
|
+
|
|
141
|
+
return 0
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def convert_python_sdk_api_to_worker_api(python_sdk_api) -> API:
|
|
145
|
+
"""
|
|
146
|
+
In cases that the python sdk api is used,
|
|
147
|
+
this function converts it to worker api.
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
python_sdk_api (Api): Corva Python SDK Api
|
|
151
|
+
|
|
152
|
+
Returns:
|
|
153
|
+
API: Worker API object
|
|
154
|
+
"""
|
|
155
|
+
return API(api_url=python_sdk_api.api_url, api_key=python_sdk_api.api_key, app_name=python_sdk_api.app_key)
|
worker/data/enums.py
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
from enum import Enum, auto
|
|
2
|
+
from typing import List
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class LambdaStates(Enum):
|
|
6
|
+
"""
|
|
7
|
+
An enumeration of possible states for a Lambda function.
|
|
8
|
+
|
|
9
|
+
Attributes:
|
|
10
|
+
TIMED_OUT (str): The Lambda function timed out.
|
|
11
|
+
SUCCEEDED (str): The Lambda function completed successfully.
|
|
12
|
+
FAILED (str): The Lambda function failed to complete.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
TIMED_OUT = "Lambda timed out."
|
|
16
|
+
SUCCEEDED = "Lambda process succeeded."
|
|
17
|
+
FAILED = "Lambda process failed."
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class EventType(Enum):
|
|
21
|
+
"""
|
|
22
|
+
Enum class representing the different types of events that can be handled.
|
|
23
|
+
The values of this enum are used as the keys in the event dictionary.
|
|
24
|
+
For instance in the worker apps, the 'event-type' in the constants file
|
|
25
|
+
should be either 'wits_stream' or 'scheduler'. For partial rerun merge
|
|
26
|
+
events, the 'event_type' node is set to 'partial-well-rerun-merge'.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
STREAM = "wits_stream"
|
|
30
|
+
SCHEDULER = "scheduler"
|
|
31
|
+
PARTIAL_RERUN = "partial-well-rerun-merge"
|
|
32
|
+
TASK = "task_event"
|
|
33
|
+
GENERIC = "generic_event"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class ChannelStatus(Enum):
|
|
37
|
+
"""
|
|
38
|
+
Enum class representing the status of a channel in WITS data.
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
ON = "on"
|
|
42
|
+
OFF = "off"
|
|
43
|
+
MISSING = "missing"
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class DataStatus(Enum):
|
|
47
|
+
"""
|
|
48
|
+
Enum representing the status of data.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
VALID = "valid"
|
|
52
|
+
MISSING = "missing"
|
|
53
|
+
OVERRIDDEN = "overridden"
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class Environment(Enum):
|
|
57
|
+
"""
|
|
58
|
+
An enumeration of the different environments.
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
QA = "qa"
|
|
62
|
+
STAGING = "staging"
|
|
63
|
+
PRODUCTION = "production"
|
|
64
|
+
LOCAL = "local"
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class CollectionRecordDataScope(Enum):
|
|
68
|
+
"""
|
|
69
|
+
This enumeration is used to indicate the type of the
|
|
70
|
+
collection record scope.
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
BHA_OR_CASING = auto() # Active BHA or Casing
|
|
74
|
+
SINCE_START = auto() # Since the start of the well
|
|
75
|
+
FORMATION = auto() # Active Formation
|
|
76
|
+
FROM_LAST_CASING = auto() # Since the last casing
|
|
77
|
+
BHA = auto() # Active BHA
|
|
78
|
+
CURRENT = auto() # A few minutes ago to current
|
|
79
|
+
SEMI_CURRENT = auto() # A couple of hours ago to current instant
|
|
80
|
+
|
|
81
|
+
@classmethod
|
|
82
|
+
def current_modes(cls) -> List["CollectionRecordDataScope"]:
|
|
83
|
+
"""
|
|
84
|
+
Gets the items that are considered as current mode.
|
|
85
|
+
"""
|
|
86
|
+
return [
|
|
87
|
+
cls.CURRENT,
|
|
88
|
+
cls.SEMI_CURRENT,
|
|
89
|
+
]
|
|
90
|
+
|
|
91
|
+
@classmethod
|
|
92
|
+
def not_current_modes(cls) -> List["CollectionRecordDataScope"]:
|
|
93
|
+
"""
|
|
94
|
+
Gets the items that are not considered as current mode.
|
|
95
|
+
"""
|
|
96
|
+
return [item for item in cls.__members__.values() if item not in cls.current_modes()]
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
class CountOfCollectionRecord(Enum):
|
|
100
|
+
"""
|
|
101
|
+
An enumeration representing the count of collection records.
|
|
102
|
+
|
|
103
|
+
Attributes:
|
|
104
|
+
ONE_PER_BHA_OR_CASING: Represents one record per BHA or casing.
|
|
105
|
+
ONE_PER_WELL: Represents one record per well.
|
|
106
|
+
"""
|
|
107
|
+
|
|
108
|
+
ONE_PER_BHA_OR_CASING = auto()
|
|
109
|
+
ONE_PER_WELL = auto()
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class RerunMode(Enum):
|
|
113
|
+
"""
|
|
114
|
+
An enumeration representing the mode of rerun.
|
|
115
|
+
|
|
116
|
+
Attributes:
|
|
117
|
+
REALTIME: When the rerun asset catches up with the original asset,
|
|
118
|
+
so the end time of the rerun asset is the same as the original asset
|
|
119
|
+
HISTORICAL: When an old portion of the original asset runs on the
|
|
120
|
+
rerun asset, so the end time of the rerun asset is not the same as
|
|
121
|
+
the end time of the original asset
|
|
122
|
+
"""
|
|
123
|
+
|
|
124
|
+
REALTIME = "realtime"
|
|
125
|
+
HISTORICAL = "historical"
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
class PartialRerunStatus(Enum):
|
|
129
|
+
"""
|
|
130
|
+
An enumeration representing the different states of a partial rerun.
|
|
131
|
+
Note that in this project, only the MERGING, FAILED and COMPLETED statuses are used.
|
|
132
|
+
"""
|
|
133
|
+
|
|
134
|
+
INITIALIZED = "initialized"
|
|
135
|
+
RUNNING = "running"
|
|
136
|
+
PENDING_MERGE = "pending_merge"
|
|
137
|
+
MERGE_INITIALIZED = "merge_initialized"
|
|
138
|
+
FAILED = "failed"
|
|
139
|
+
STOPPED = "stopped"
|
|
140
|
+
MERGING = "merging"
|
|
141
|
+
COMPLETED = "completed"
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import numpy
|
|
2
|
+
import simplejson as json
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class JsonEncoder(json.JSONEncoder):
|
|
6
|
+
"""
|
|
7
|
+
This encoder can be used to convert incompatible data types to types compatible with json.dumps()
|
|
8
|
+
Use like json.dumps(output, cls=JsonEncoder)
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
def default(self, obj):
|
|
12
|
+
if isinstance(obj, numpy.integer):
|
|
13
|
+
return int(obj)
|
|
14
|
+
elif isinstance(obj, numpy.floating):
|
|
15
|
+
return float(obj)
|
|
16
|
+
elif isinstance(obj, numpy.ndarray):
|
|
17
|
+
return obj.tolist()
|
|
18
|
+
return json.JSONEncoder.default(self, obj)
|