corva-worker-python 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. corva_worker_python-2.0.0.dist-info/METADATA +30 -0
  2. corva_worker_python-2.0.0.dist-info/RECORD +63 -0
  3. corva_worker_python-2.0.0.dist-info/WHEEL +5 -0
  4. corva_worker_python-2.0.0.dist-info/top_level.txt +1 -0
  5. worker/__init__.py +5 -0
  6. worker/app/__init__.py +291 -0
  7. worker/app/modules/__init__.py +265 -0
  8. worker/app/modules/activity_module.py +141 -0
  9. worker/app/modules/connection_module.py +21 -0
  10. worker/app/modules/depth_activity_module.py +21 -0
  11. worker/app/modules/scheduler.py +44 -0
  12. worker/app/modules/time_activity_module.py +21 -0
  13. worker/app/modules/trigger.py +43 -0
  14. worker/constants.py +51 -0
  15. worker/data/__init__.py +0 -0
  16. worker/data/activity/__init__.py +132 -0
  17. worker/data/activity/activity_grouping.py +242 -0
  18. worker/data/alert.py +89 -0
  19. worker/data/api.py +155 -0
  20. worker/data/enums.py +141 -0
  21. worker/data/json_encoder.py +18 -0
  22. worker/data/math.py +104 -0
  23. worker/data/operations.py +477 -0
  24. worker/data/serialization.py +110 -0
  25. worker/data/task_handler.py +82 -0
  26. worker/data/two_way_dict.py +17 -0
  27. worker/data/unit_conversions.py +5 -0
  28. worker/data/wits.py +323 -0
  29. worker/event/__init__.py +53 -0
  30. worker/event/event_handler.py +90 -0
  31. worker/event/scheduled.py +64 -0
  32. worker/event/stream.py +48 -0
  33. worker/exceptions.py +26 -0
  34. worker/mixins/__init__.py +0 -0
  35. worker/mixins/logging.py +119 -0
  36. worker/mixins/rollbar.py +87 -0
  37. worker/partial_rerun_merge/__init__.py +0 -0
  38. worker/partial_rerun_merge/merge.py +500 -0
  39. worker/partial_rerun_merge/models.py +91 -0
  40. worker/partial_rerun_merge/progress.py +241 -0
  41. worker/state/__init__.py +96 -0
  42. worker/state/mixins.py +111 -0
  43. worker/state/state.py +46 -0
  44. worker/test/__init__.py +3 -0
  45. worker/test/lambda_function_test_run.py +196 -0
  46. worker/test/local_testing/__init__.py +0 -0
  47. worker/test/local_testing/to_local_transfer.py +360 -0
  48. worker/test/utils.py +51 -0
  49. worker/wellbore/__init__.py +0 -0
  50. worker/wellbore/factory.py +496 -0
  51. worker/wellbore/measured_depth_finder.py +12 -0
  52. worker/wellbore/model/__init__.py +0 -0
  53. worker/wellbore/model/ann.py +103 -0
  54. worker/wellbore/model/annulus.py +113 -0
  55. worker/wellbore/model/drillstring.py +196 -0
  56. worker/wellbore/model/drillstring_components.py +439 -0
  57. worker/wellbore/model/element.py +102 -0
  58. worker/wellbore/model/enums.py +92 -0
  59. worker/wellbore/model/hole.py +297 -0
  60. worker/wellbore/model/hole_section.py +51 -0
  61. worker/wellbore/model/riser.py +22 -0
  62. worker/wellbore/sections_mixin.py +64 -0
  63. worker/wellbore/wellbore.py +289 -0
@@ -0,0 +1,360 @@
1
+ import argparse
2
+ import os
3
+ from functools import cached_property
4
+
5
+ import simplejson as json
6
+ from tqdm import tqdm
7
+
8
+ from worker.data import operations
9
+ from worker.data.json_encoder import JsonEncoder
10
+ from worker.mixins.logging import LoggingMixin
11
+
12
+ API_DATA_PATH = "/v1/data/corva/"
13
+ API_ASSET_PATH = "/v1/assets/"
14
+
15
+
16
+ def generate_transfer_parser():
17
+ """
18
+ Creating the supporting arguments
19
+ :return:
20
+ """
21
+ parser = argparse.ArgumentParser(description="Run your tests locally on an asset.")
22
+ parser.add_argument(
23
+ "-v",
24
+ "--source_environment",
25
+ "--env",
26
+ type=str,
27
+ required=True,
28
+ help="source environment, options: 'qa', 'staging', 'production'",
29
+ )
30
+ parser.add_argument("-a", "--source_asset_id", "--id", type=int, required=True, help="set source asset_id")
31
+ parser.add_argument(
32
+ "-s",
33
+ "--start_timestamp",
34
+ "--start",
35
+ type=int,
36
+ required=False,
37
+ default=None,
38
+ help="start timestamp for the main collections to be copied over",
39
+ )
40
+ parser.add_argument(
41
+ "-e",
42
+ "--end_timestamp",
43
+ "--end",
44
+ type=int,
45
+ required=False,
46
+ default=None,
47
+ help="end timestamp for the main collections to be copied over",
48
+ )
49
+ parser.add_argument(
50
+ "-c",
51
+ "--config_collections",
52
+ "--config",
53
+ type=json.loads,
54
+ required=False,
55
+ default=None,
56
+ help="setting a new config collection list that override the existing one",
57
+ )
58
+ parser.add_argument(
59
+ "-m",
60
+ "--main_collections",
61
+ "--main",
62
+ type=json.loads,
63
+ required=False,
64
+ default=None,
65
+ help="setting a new main collection list that override the existing one",
66
+ )
67
+ return parser
68
+
69
+
70
+ class ToLocalTransfer(LoggingMixin):
71
+ # config collection: this can be modified in __init__
72
+ CONFIG_COLLECTIONS = [
73
+ "data.surface-equipment",
74
+ "data.actual_survey",
75
+ "data.plan_survey",
76
+ # in the following collections the setting timestamps need to be removed
77
+ # but for the purpose of this test analysis is not important.
78
+ "data.casing",
79
+ "data.drillstring",
80
+ "data.mud",
81
+ # not required for most of the cases
82
+ # "data.formations",
83
+ # "data.pressure-gradient",
84
+ # "data.npt-events",
85
+ # "data.costs",
86
+ # "data.well-sections",
87
+ ]
88
+
89
+ # collections other than configs: this can be modified in __init__
90
+ MAIN_COLLECTIONS = ["wits"]
91
+
92
+ def __init__(self, args):
93
+ super().__init__()
94
+
95
+ self.source_env = None
96
+ self.source_asset_id = None
97
+ self.source_company_id = None
98
+
99
+ self.local_env = "local"
100
+ self.local_asset_id = None
101
+
102
+ # this is used to avoid copying configs if it is not a new well
103
+ self.is_new_well = True
104
+
105
+ self.app_name = os.getenv("APP_NAME", "LocalTesting")
106
+
107
+ # The purpose of this variable is to change the config _ids
108
+ # For instance when the wits records are copied over to local
109
+ # the drillstring node under metadata is still referring to source
110
+ # _id so keeping a mapping is required to replace the _ids.
111
+ # In addition this variable is stored on local redis.
112
+ self.config_id_mapper = {}
113
+
114
+ self.start_timestamp = None
115
+ self.end_timestamp = None
116
+
117
+ if args is None:
118
+ parser = generate_transfer_parser()
119
+ args = parser.parse_args()
120
+
121
+ self.initialize(args)
122
+
123
+ def initialize(self, args):
124
+ if args.config_collections:
125
+ self.CONFIG_COLLECTIONS = args.config_collections
126
+
127
+ if args.main_collections:
128
+ self.MAIN_COLLECTIONS = args.main_collections
129
+
130
+ self.source_env = args.source_environment
131
+ self.source_asset_id = args.source_asset_id
132
+
133
+ self.start_timestamp = args.start_timestamp
134
+ self.end_timestamp = args.end_timestamp
135
+
136
+ def run(self):
137
+ self.setup_local_well()
138
+ self.store_config_data()
139
+ self.store_other_collections()
140
+
141
+ @cached_property
142
+ def source_api_worker(self):
143
+ return operations.setup_api_worker(self.source_env, self.app_name)
144
+
145
+ @cached_property
146
+ def local_api_worker(self):
147
+ return operations.setup_api_worker(self.local_env, self.app_name)
148
+
149
+ @cached_property
150
+ def local_redis_worker(self):
151
+ return operations.setup_redis_worker(self.local_env)
152
+
153
+ def _get_well_properties(self):
154
+ path = "%s?ids[]=%s" % (API_ASSET_PATH, self.source_asset_id)
155
+
156
+ well_properties = self.source_api_worker.get(path=path)
157
+ if well_properties.count != 1:
158
+ raise Exception(f"Count mismatch: there should be only one asset! {well_properties.count} found!")
159
+
160
+ well_properties = well_properties.data[0]
161
+
162
+ if well_properties["asset_type"] != "well":
163
+ raise Exception("Not a well")
164
+
165
+ self.well_name = well_properties["name"]
166
+ self.source_company_id = well_properties["company_id"]
167
+
168
+ def setup_local_well(self):
169
+ """
170
+ Set up the local well. If the well is not already setup, a new one will be created.
171
+ :return:
172
+ """
173
+ self._get_well_properties()
174
+
175
+ path = "%s?order=asc&page=1&per_page=100&search=%s&sort=name&types[]=well" % (API_ASSET_PATH, self.well_name)
176
+ wells = self.local_api_worker.get(path=path)
177
+
178
+ found = False
179
+ if wells.count >= 1:
180
+ well = wells.data[0]
181
+ well_name2 = well["name"]
182
+ if self.well_name == well_name2:
183
+ found = True
184
+ print("Found local asset!")
185
+
186
+ if not found:
187
+ print("Well not found! Trying to create a new asset ...")
188
+ data = {
189
+ "name": self.well_name,
190
+ "parent_asset_id": None,
191
+ "company_id": 1,
192
+ "visibility": "visible",
193
+ "asset_type": "well",
194
+ "type": "Asset::Well",
195
+ }
196
+ data_json = json.dumps(data, cls=JsonEncoder, ignore_nan=True)
197
+ well = self.local_api_worker.post(path=API_ASSET_PATH, data=data_json).data
198
+ print("Well is created!")
199
+
200
+ self.is_new_well = not found
201
+ self.local_asset_id = well["id"]
202
+ print(f"Local asset_id={self.local_asset_id}")
203
+
204
+ def store_config_data(self):
205
+ """
206
+ Store the config collection data into local database
207
+ :return:
208
+ """
209
+ if not self.is_new_well:
210
+ print("Not a new well.")
211
+ config_id_mapper = self.local_redis_worker.get(self.get_config_mapper_redis_key())
212
+ if config_id_mapper:
213
+ self.config_id_mapper = json.loads(config_id_mapper)
214
+ print("Copying configs are skipped.")
215
+ return
216
+
217
+ # maps the source to local ids in a dict of dicts in the format {collection:{source_id: local_id}}
218
+ config_id_mapper = {}
219
+
220
+ # get records
221
+ print("Copying configs to local ...")
222
+ print(f"{'Collection':30} {'Count':10} {'Post':10} ")
223
+ print(f"{'-' * 30:30} {'-' * 10:10} {'-' * 10:10} ")
224
+
225
+ count_records = 0
226
+ for col in self.CONFIG_COLLECTIONS:
227
+ records = []
228
+ print(f"{col:30} ", end="")
229
+ col_data = self.source_api_worker.get(
230
+ path=API_DATA_PATH, collection=col, asset_id=self.source_asset_id, limit=10000
231
+ )
232
+ print(f"{col_data.count:>10} ", end="")
233
+ if col_data.count < 1:
234
+ print()
235
+ continue
236
+
237
+ records.extend(col_data.data)
238
+
239
+ source_ids = [record["_id"] for record in records]
240
+
241
+ for record in records:
242
+ record["company_id"] = 1
243
+ record["asset_id"] = self.local_asset_id
244
+ record.pop("_id", None)
245
+
246
+ # post
247
+ results = self.local_api_worker.post(
248
+ path=API_DATA_PATH, data=json.dumps(records, cls=JsonEncoder, ignore_nan=True)
249
+ )
250
+ print(f"{'successful':10} ", end="\n")
251
+
252
+ local_ids = results.data["ids"]
253
+ count_records += len(local_ids)
254
+
255
+ if len(source_ids) != len(local_ids):
256
+ raise IndexError(
257
+ f"Number of source ids ({len(source_ids)}) does not match local ids ({len(local_ids)})"
258
+ )
259
+
260
+ collection_id_mapper = {source_ids[i]: local_ids[i] for i in range(len(source_ids))}
261
+ config_id_mapper[col] = collection_id_mapper
262
+
263
+ print(f"{count_records} records are posted!")
264
+ self.debug(self.local_asset_id, f"id Mapper: {config_id_mapper}")
265
+
266
+ self.config_id_mapper = config_id_mapper
267
+ self.local_redis_worker.set(
268
+ self.get_config_mapper_redis_key(), json.dumps(self.config_id_mapper, cls=JsonEncoder, ignore_nan=True)
269
+ )
270
+
271
+ def store_other_collections(self, show_progress: bool = True):
272
+ """
273
+ Store the other config data on local
274
+ :param show_progress: to show progress bar or not
275
+ :return:
276
+ """
277
+ collections = self.MAIN_COLLECTIONS
278
+
279
+ # to remove duplicate collections
280
+ collections = list(set(collections))
281
+
282
+ start_timestamp = self.start_timestamp
283
+ end_timestamp = self.end_timestamp
284
+
285
+ for col in collections:
286
+ # Finding the start and end if not available.
287
+ if not start_timestamp:
288
+ res = self.source_api_worker.get(
289
+ path=API_DATA_PATH, collection=col, asset_id=self.source_asset_id, sort="{timestamp:+1}", limit=1
290
+ )
291
+
292
+ if res.count < 1:
293
+ continue
294
+ start_timestamp = res.data[0].get("timestamp")
295
+
296
+ if not end_timestamp:
297
+ res = self.source_api_worker.get(
298
+ path=API_DATA_PATH, collection=col, asset_id=self.source_asset_id, sort="{timestamp:-1}", limit=1
299
+ )
300
+ end_timestamp = res.data[0].get("timestamp")
301
+
302
+ res = self.local_api_worker.get(
303
+ path=API_DATA_PATH, collection=col, asset_id=self.local_asset_id, sort="{timestamp:-1}", limit=1
304
+ )
305
+ if res.count > 0:
306
+ ts = res.data[0].get("timestamp", 0)
307
+ else:
308
+ ts = start_timestamp - 1
309
+
310
+ print(f"'{col}' collection, Source Range: [{start_timestamp}, {end_timestamp}], Local Start: {ts}")
311
+
312
+ limit = 3600 if col == "wits" else 100
313
+
314
+ pbar = tqdm(total=100, ncols=100, disable=not show_progress)
315
+
316
+ proceed = True
317
+ while proceed:
318
+ query = "{timestamp#gt#%s}AND{timestamp#lte#%s}" % (ts, end_timestamp)
319
+
320
+ res = self.source_api_worker.get(
321
+ path=API_DATA_PATH,
322
+ collection=col,
323
+ asset_id=self.source_asset_id,
324
+ query=query,
325
+ sort="{timestamp:+1}",
326
+ limit=limit,
327
+ )
328
+
329
+ count = res.count
330
+ if count < 1:
331
+ break
332
+
333
+ records = res.data
334
+
335
+ data_str = json.dumps(records)
336
+ data_str = data_str.replace('"company_id": %s' % self.source_company_id, '"company_id": %s' % 1)
337
+ data_str = data_str.replace(
338
+ '"asset_id": %s' % self.source_asset_id, '"asset_id": %s' % self.local_asset_id
339
+ )
340
+ if col == "wits":
341
+ for config_name, mapped_ids in self.config_id_mapper.items():
342
+ for id_source, id_local in mapped_ids.items():
343
+ data_str = data_str.replace(id_source, id_local)
344
+
345
+ records = json.loads(data_str)
346
+
347
+ for record in records:
348
+ record.pop("_id", None)
349
+
350
+ res = self.local_api_worker.post(path=API_DATA_PATH, data=json.dumps(records))
351
+
352
+ pbar_added = round((records[-1]["timestamp"] - ts) / (end_timestamp - start_timestamp) * 100)
353
+ pbar.update(pbar_added)
354
+
355
+ ts = records[-1]["timestamp"]
356
+
357
+ pbar.update(100 - pbar.n)
358
+
359
+ def get_config_mapper_redis_key(self):
360
+ return "%s-config_id_mapper" % self.local_asset_id
worker/test/utils.py ADDED
@@ -0,0 +1,51 @@
1
+ import traceback
2
+
3
+ import simplejson as json
4
+
5
+ from worker.state.mixins import RedisMixin
6
+
7
+
8
+ def file_to_json(file_name):
9
+ with open(file_name, mode="r") as file:
10
+ _json = json.load(file)
11
+ return _json
12
+
13
+
14
+ def get_last_processed_timestamp(asset_id: int, state_key: str):
15
+ """
16
+ Get the last_processed_timestamp from state storage
17
+ :param asset_id:
18
+ :param state_key:
19
+ """
20
+ try:
21
+ state_app = RedisMixin()
22
+ state_app.asset_id = asset_id
23
+ previous_state = state_app.load_state(state_key=state_key)
24
+ return previous_state.get("last_processed_timestamp", None)
25
+ except Exception:
26
+ print("Error occurred while reading state from Redis!")
27
+ traceback.print_exc()
28
+
29
+ return None
30
+
31
+
32
+ def create_scheduler_events(asset_id, start_timestamp, end_timestamp, step):
33
+ """
34
+ Creating scheduler events
35
+ :param asset_id:
36
+ :param start_timestamp:
37
+ :param end_timestamp:
38
+ :param step:
39
+ :return:
40
+ """
41
+ if start_timestamp > end_timestamp:
42
+ raise ValueError(f"start_timestamp ({start_timestamp}) is greater than end_timestamp ({end_timestamp})!")
43
+ if step <= 0 or step > 3600:
44
+ raise ValueError(f"step ({step}) is outside the (0, 3600] range.")
45
+
46
+ triggers = range(start_timestamp, end_timestamp, step)
47
+ events = [
48
+ [[{"asset_id": asset_id, "schedule_start": 1000 * trigger, "schedule_end": 1000 * (trigger + step)}]]
49
+ for trigger in triggers
50
+ ]
51
+ return events
File without changes