sift-stack-py 0.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (291) hide show
  1. google/__init__.py +1 -0
  2. google/api/__init__.py +0 -0
  3. google/api/annotations_pb2.py +27 -0
  4. google/api/annotations_pb2.pyi +29 -0
  5. google/api/annotations_pb2_grpc.py +4 -0
  6. google/api/annotations_pb2_grpc.pyi +30 -0
  7. google/api/field_behavior_pb2.py +30 -0
  8. google/api/field_behavior_pb2.pyi +175 -0
  9. google/api/field_behavior_pb2_grpc.py +4 -0
  10. google/api/field_behavior_pb2_grpc.pyi +30 -0
  11. google/api/http_pb2.py +31 -0
  12. google/api/http_pb2.pyi +433 -0
  13. google/api/http_pb2_grpc.py +4 -0
  14. google/api/http_pb2_grpc.pyi +30 -0
  15. protoc_gen_openapiv2/__init__.py +0 -0
  16. protoc_gen_openapiv2/options/__init__.py +0 -0
  17. protoc_gen_openapiv2/options/annotations_pb2.py +27 -0
  18. protoc_gen_openapiv2/options/annotations_pb2.pyi +48 -0
  19. protoc_gen_openapiv2/options/annotations_pb2_grpc.py +4 -0
  20. protoc_gen_openapiv2/options/annotations_pb2_grpc.pyi +17 -0
  21. protoc_gen_openapiv2/options/openapiv2_pb2.py +132 -0
  22. protoc_gen_openapiv2/options/openapiv2_pb2.pyi +1533 -0
  23. protoc_gen_openapiv2/options/openapiv2_pb2_grpc.py +4 -0
  24. protoc_gen_openapiv2/options/openapiv2_pb2_grpc.pyi +17 -0
  25. sift/__init__.py +0 -0
  26. sift/annotation_logs/__init__.py +0 -0
  27. sift/annotation_logs/v1/__init__.py +0 -0
  28. sift/annotation_logs/v1/annotation_logs_pb2.py +115 -0
  29. sift/annotation_logs/v1/annotation_logs_pb2.pyi +370 -0
  30. sift/annotation_logs/v1/annotation_logs_pb2_grpc.py +135 -0
  31. sift/annotation_logs/v1/annotation_logs_pb2_grpc.pyi +84 -0
  32. sift/annotations/__init__.py +0 -0
  33. sift/annotations/v1/__init__.py +0 -0
  34. sift/annotations/v1/annotations_pb2.py +180 -0
  35. sift/annotations/v1/annotations_pb2.pyi +539 -0
  36. sift/annotations/v1/annotations_pb2_grpc.py +237 -0
  37. sift/annotations/v1/annotations_pb2_grpc.pyi +144 -0
  38. sift/assets/__init__.py +0 -0
  39. sift/assets/v1/__init__.py +0 -0
  40. sift/assets/v1/assets_pb2.py +90 -0
  41. sift/assets/v1/assets_pb2.pyi +235 -0
  42. sift/assets/v1/assets_pb2_grpc.py +168 -0
  43. sift/assets/v1/assets_pb2_grpc.pyi +101 -0
  44. sift/calculated_channels/__init__.py +0 -0
  45. sift/calculated_channels/v1/__init__.py +0 -0
  46. sift/calculated_channels/v1/calculated_channels_pb2.py +99 -0
  47. sift/calculated_channels/v1/calculated_channels_pb2.pyi +280 -0
  48. sift/calculated_channels/v1/calculated_channels_pb2_grpc.py +101 -0
  49. sift/calculated_channels/v1/calculated_channels_pb2_grpc.pyi +64 -0
  50. sift/campaigns/__init__.py +0 -0
  51. sift/campaigns/v1/__init__.py +0 -0
  52. sift/campaigns/v1/campaigns_pb2.py +144 -0
  53. sift/campaigns/v1/campaigns_pb2.pyi +383 -0
  54. sift/campaigns/v1/campaigns_pb2_grpc.py +169 -0
  55. sift/campaigns/v1/campaigns_pb2_grpc.pyi +104 -0
  56. sift/channel_schemas/__init__.py +0 -0
  57. sift/channel_schemas/v1/__init__.py +0 -0
  58. sift/channel_schemas/v1/channel_schemas_pb2.py +69 -0
  59. sift/channel_schemas/v1/channel_schemas_pb2.pyi +117 -0
  60. sift/channel_schemas/v1/channel_schemas_pb2_grpc.py +101 -0
  61. sift/channel_schemas/v1/channel_schemas_pb2_grpc.pyi +64 -0
  62. sift/channels/__init__.py +0 -0
  63. sift/channels/v2/__init__.py +0 -0
  64. sift/channels/v2/channels_pb2.py +88 -0
  65. sift/channels/v2/channels_pb2.pyi +183 -0
  66. sift/channels/v2/channels_pb2_grpc.py +101 -0
  67. sift/channels/v2/channels_pb2_grpc.pyi +64 -0
  68. sift/common/__init__.py +0 -0
  69. sift/common/type/__init__.py +0 -0
  70. sift/common/type/v1/__init__.py +0 -0
  71. sift/common/type/v1/channel_bit_field_element_pb2.py +34 -0
  72. sift/common/type/v1/channel_bit_field_element_pb2.pyi +33 -0
  73. sift/common/type/v1/channel_bit_field_element_pb2_grpc.py +4 -0
  74. sift/common/type/v1/channel_bit_field_element_pb2_grpc.pyi +17 -0
  75. sift/common/type/v1/channel_data_type_pb2.py +29 -0
  76. sift/common/type/v1/channel_data_type_pb2.pyi +50 -0
  77. sift/common/type/v1/channel_data_type_pb2_grpc.py +4 -0
  78. sift/common/type/v1/channel_data_type_pb2_grpc.pyi +17 -0
  79. sift/common/type/v1/channel_enum_type_pb2.py +32 -0
  80. sift/common/type/v1/channel_enum_type_pb2.pyi +29 -0
  81. sift/common/type/v1/channel_enum_type_pb2_grpc.py +4 -0
  82. sift/common/type/v1/channel_enum_type_pb2_grpc.pyi +17 -0
  83. sift/common/type/v1/organization_pb2.py +27 -0
  84. sift/common/type/v1/organization_pb2.pyi +29 -0
  85. sift/common/type/v1/organization_pb2_grpc.py +4 -0
  86. sift/common/type/v1/organization_pb2_grpc.pyi +17 -0
  87. sift/common/type/v1/resource_identifier_pb2.py +46 -0
  88. sift/common/type/v1/resource_identifier_pb2.pyi +145 -0
  89. sift/common/type/v1/resource_identifier_pb2_grpc.py +4 -0
  90. sift/common/type/v1/resource_identifier_pb2_grpc.pyi +17 -0
  91. sift/common/type/v1/user_pb2.py +33 -0
  92. sift/common/type/v1/user_pb2.pyi +36 -0
  93. sift/common/type/v1/user_pb2_grpc.py +4 -0
  94. sift/common/type/v1/user_pb2_grpc.pyi +17 -0
  95. sift/data/__init__.py +0 -0
  96. sift/data/v1/__init__.py +0 -0
  97. sift/data/v1/data_pb2.py +212 -0
  98. sift/data/v1/data_pb2.pyi +745 -0
  99. sift/data/v1/data_pb2_grpc.py +67 -0
  100. sift/data/v1/data_pb2_grpc.pyi +44 -0
  101. sift/ingest/__init__.py +0 -0
  102. sift/ingest/v1/__init__.py +0 -0
  103. sift/ingest/v1/ingest_pb2.py +35 -0
  104. sift/ingest/v1/ingest_pb2.pyi +118 -0
  105. sift/ingest/v1/ingest_pb2_grpc.py +66 -0
  106. sift/ingest/v1/ingest_pb2_grpc.pyi +41 -0
  107. sift/ingestion_configs/__init__.py +0 -0
  108. sift/ingestion_configs/v1/__init__.py +0 -0
  109. sift/ingestion_configs/v1/ingestion_configs_pb2.py +115 -0
  110. sift/ingestion_configs/v1/ingestion_configs_pb2.pyi +332 -0
  111. sift/ingestion_configs/v1/ingestion_configs_pb2_grpc.py +203 -0
  112. sift/ingestion_configs/v1/ingestion_configs_pb2_grpc.pyi +124 -0
  113. sift/notifications/__init__.py +0 -0
  114. sift/notifications/v1/__init__.py +0 -0
  115. sift/notifications/v1/notifications_pb2.py +64 -0
  116. sift/notifications/v1/notifications_pb2.pyi +225 -0
  117. sift/notifications/v1/notifications_pb2_grpc.py +101 -0
  118. sift/notifications/v1/notifications_pb2_grpc.pyi +64 -0
  119. sift/ping/__init__.py +0 -0
  120. sift/ping/v1/__init__.py +0 -0
  121. sift/ping/v1/ping_pb2.py +38 -0
  122. sift/ping/v1/ping_pb2.pyi +36 -0
  123. sift/ping/v1/ping_pb2_grpc.py +66 -0
  124. sift/ping/v1/ping_pb2_grpc.pyi +41 -0
  125. sift/remote_files/__init__.py +0 -0
  126. sift/remote_files/v1/__init__.py +0 -0
  127. sift/remote_files/v1/remote_files_pb2.py +174 -0
  128. sift/remote_files/v1/remote_files_pb2.pyi +472 -0
  129. sift/remote_files/v1/remote_files_pb2_grpc.py +271 -0
  130. sift/remote_files/v1/remote_files_pb2_grpc.pyi +164 -0
  131. sift/report_templates/__init__.py +0 -0
  132. sift/report_templates/v1/__init__.py +0 -0
  133. sift/report_templates/v1/report_templates_pb2.py +146 -0
  134. sift/report_templates/v1/report_templates_pb2.pyi +381 -0
  135. sift/report_templates/v1/report_templates_pb2_grpc.py +169 -0
  136. sift/report_templates/v1/report_templates_pb2_grpc.pyi +104 -0
  137. sift/reports/__init__.py +0 -0
  138. sift/reports/v1/__init__.py +0 -0
  139. sift/reports/v1/reports_pb2.py +193 -0
  140. sift/reports/v1/reports_pb2.pyi +562 -0
  141. sift/reports/v1/reports_pb2_grpc.py +205 -0
  142. sift/reports/v1/reports_pb2_grpc.pyi +136 -0
  143. sift/rule_evaluation/__init__.py +0 -0
  144. sift/rule_evaluation/v1/__init__.py +0 -0
  145. sift/rule_evaluation/v1/rule_evaluation_pb2.py +89 -0
  146. sift/rule_evaluation/v1/rule_evaluation_pb2.pyi +263 -0
  147. sift/rule_evaluation/v1/rule_evaluation_pb2_grpc.py +101 -0
  148. sift/rule_evaluation/v1/rule_evaluation_pb2_grpc.pyi +64 -0
  149. sift/rules/__init__.py +0 -0
  150. sift/rules/v1/__init__.py +0 -0
  151. sift/rules/v1/rules_pb2.py +420 -0
  152. sift/rules/v1/rules_pb2.pyi +1355 -0
  153. sift/rules/v1/rules_pb2_grpc.py +577 -0
  154. sift/rules/v1/rules_pb2_grpc.pyi +351 -0
  155. sift/runs/__init__.py +0 -0
  156. sift/runs/v2/__init__.py +0 -0
  157. sift/runs/v2/runs_pb2.py +150 -0
  158. sift/runs/v2/runs_pb2.pyi +413 -0
  159. sift/runs/v2/runs_pb2_grpc.py +271 -0
  160. sift/runs/v2/runs_pb2_grpc.pyi +164 -0
  161. sift/saved_searches/__init__.py +0 -0
  162. sift/saved_searches/v1/__init__.py +0 -0
  163. sift/saved_searches/v1/saved_searches_pb2.py +144 -0
  164. sift/saved_searches/v1/saved_searches_pb2.pyi +385 -0
  165. sift/saved_searches/v1/saved_searches_pb2_grpc.py +237 -0
  166. sift/saved_searches/v1/saved_searches_pb2_grpc.pyi +144 -0
  167. sift/tags/__init__.py +0 -0
  168. sift/tags/v1/__init__.py +0 -0
  169. sift/tags/v1/tags_pb2.py +49 -0
  170. sift/tags/v1/tags_pb2.pyi +71 -0
  171. sift/tags/v1/tags_pb2_grpc.py +4 -0
  172. sift/tags/v1/tags_pb2_grpc.pyi +17 -0
  173. sift/users/__init__.py +0 -0
  174. sift/users/v2/__init__.py +0 -0
  175. sift/users/v2/users_pb2.py +61 -0
  176. sift/users/v2/users_pb2.pyi +142 -0
  177. sift/users/v2/users_pb2_grpc.py +135 -0
  178. sift/users/v2/users_pb2_grpc.pyi +84 -0
  179. sift/views/__init__.py +0 -0
  180. sift/views/v1/__init__.py +0 -0
  181. sift/views/v1/views_pb2.py +130 -0
  182. sift/views/v1/views_pb2.pyi +466 -0
  183. sift/views/v1/views_pb2_grpc.py +305 -0
  184. sift/views/v1/views_pb2_grpc.pyi +184 -0
  185. sift_grafana/py.typed +0 -0
  186. sift_grafana/sift_query_model.py +64 -0
  187. sift_py/__init__.py +923 -0
  188. sift_py/_internal/__init__.py +5 -0
  189. sift_py/_internal/cel.py +18 -0
  190. sift_py/_internal/channel.py +42 -0
  191. sift_py/_internal/convert/__init__.py +3 -0
  192. sift_py/_internal/convert/json.py +24 -0
  193. sift_py/_internal/convert/protobuf.py +34 -0
  194. sift_py/_internal/convert/timestamp.py +9 -0
  195. sift_py/_internal/test_util/__init__.py +0 -0
  196. sift_py/_internal/test_util/channel.py +136 -0
  197. sift_py/_internal/test_util/fn.py +14 -0
  198. sift_py/_internal/test_util/server_interceptor.py +62 -0
  199. sift_py/_internal/time.py +48 -0
  200. sift_py/_internal/user.py +39 -0
  201. sift_py/data/__init__.py +171 -0
  202. sift_py/data/_channel.py +38 -0
  203. sift_py/data/_deserialize.py +208 -0
  204. sift_py/data/_deserialize_test.py +134 -0
  205. sift_py/data/_service_test.py +276 -0
  206. sift_py/data/_validate.py +10 -0
  207. sift_py/data/error.py +5 -0
  208. sift_py/data/query.py +299 -0
  209. sift_py/data/service.py +497 -0
  210. sift_py/data_import/__init__.py +130 -0
  211. sift_py/data_import/_config.py +167 -0
  212. sift_py/data_import/_config_test.py +166 -0
  213. sift_py/data_import/_csv_test.py +395 -0
  214. sift_py/data_import/_status_test.py +176 -0
  215. sift_py/data_import/_tdms_test.py +238 -0
  216. sift_py/data_import/ch10.py +157 -0
  217. sift_py/data_import/config.py +19 -0
  218. sift_py/data_import/csv.py +259 -0
  219. sift_py/data_import/status.py +113 -0
  220. sift_py/data_import/tdms.py +206 -0
  221. sift_py/data_import/tempfile.py +30 -0
  222. sift_py/data_import/time_format.py +39 -0
  223. sift_py/error.py +11 -0
  224. sift_py/file_attachment/__init__.py +88 -0
  225. sift_py/file_attachment/_internal/__init__.py +0 -0
  226. sift_py/file_attachment/_internal/download.py +13 -0
  227. sift_py/file_attachment/_internal/upload.py +100 -0
  228. sift_py/file_attachment/_service_test.py +161 -0
  229. sift_py/file_attachment/entity.py +30 -0
  230. sift_py/file_attachment/metadata.py +107 -0
  231. sift_py/file_attachment/service.py +142 -0
  232. sift_py/grpc/__init__.py +15 -0
  233. sift_py/grpc/_async_interceptors/__init__.py +0 -0
  234. sift_py/grpc/_async_interceptors/base.py +72 -0
  235. sift_py/grpc/_async_interceptors/metadata.py +36 -0
  236. sift_py/grpc/_interceptors/__init__.py +0 -0
  237. sift_py/grpc/_interceptors/base.py +61 -0
  238. sift_py/grpc/_interceptors/context.py +25 -0
  239. sift_py/grpc/_interceptors/metadata.py +33 -0
  240. sift_py/grpc/_retry.py +70 -0
  241. sift_py/grpc/keepalive.py +34 -0
  242. sift_py/grpc/transport.py +250 -0
  243. sift_py/grpc/transport_test.py +170 -0
  244. sift_py/ingestion/__init__.py +6 -0
  245. sift_py/ingestion/_internal/__init__.py +6 -0
  246. sift_py/ingestion/_internal/channel.py +12 -0
  247. sift_py/ingestion/_internal/error.py +10 -0
  248. sift_py/ingestion/_internal/ingest.py +350 -0
  249. sift_py/ingestion/_internal/ingest_test.py +357 -0
  250. sift_py/ingestion/_internal/ingestion_config.py +130 -0
  251. sift_py/ingestion/_internal/run.py +46 -0
  252. sift_py/ingestion/_service_test.py +478 -0
  253. sift_py/ingestion/buffer.py +189 -0
  254. sift_py/ingestion/channel.py +422 -0
  255. sift_py/ingestion/config/__init__.py +3 -0
  256. sift_py/ingestion/config/telemetry.py +281 -0
  257. sift_py/ingestion/config/telemetry_test.py +405 -0
  258. sift_py/ingestion/config/yaml/__init__.py +0 -0
  259. sift_py/ingestion/config/yaml/error.py +44 -0
  260. sift_py/ingestion/config/yaml/load.py +126 -0
  261. sift_py/ingestion/config/yaml/spec.py +58 -0
  262. sift_py/ingestion/config/yaml/test_load.py +25 -0
  263. sift_py/ingestion/flow.py +73 -0
  264. sift_py/ingestion/manager.py +99 -0
  265. sift_py/ingestion/rule/__init__.py +4 -0
  266. sift_py/ingestion/rule/config.py +11 -0
  267. sift_py/ingestion/service.py +237 -0
  268. sift_py/py.typed +0 -0
  269. sift_py/report_templates/__init__.py +0 -0
  270. sift_py/report_templates/_config_test.py +34 -0
  271. sift_py/report_templates/_service_test.py +94 -0
  272. sift_py/report_templates/config.py +36 -0
  273. sift_py/report_templates/service.py +171 -0
  274. sift_py/rest.py +29 -0
  275. sift_py/rule/__init__.py +0 -0
  276. sift_py/rule/_config_test.py +109 -0
  277. sift_py/rule/_service_test.py +168 -0
  278. sift_py/rule/config.py +229 -0
  279. sift_py/rule/service.py +484 -0
  280. sift_py/yaml/__init__.py +0 -0
  281. sift_py/yaml/_channel_test.py +169 -0
  282. sift_py/yaml/_rule_test.py +207 -0
  283. sift_py/yaml/channel.py +224 -0
  284. sift_py/yaml/report_templates.py +73 -0
  285. sift_py/yaml/rule.py +321 -0
  286. sift_py/yaml/utils.py +15 -0
  287. sift_stack_py-0.3.2.dist-info/LICENSE +7 -0
  288. sift_stack_py-0.3.2.dist-info/METADATA +109 -0
  289. sift_stack_py-0.3.2.dist-info/RECORD +291 -0
  290. sift_stack_py-0.3.2.dist-info/WHEEL +5 -0
  291. sift_stack_py-0.3.2.dist-info/top_level.txt +5 -0
@@ -0,0 +1,238 @@
1
+ import json
2
+ from typing import Any, Dict, List, Optional
3
+
4
+ import pandas as pd
5
+ import pytest
6
+ from nptdms import TdmsFile, types # type: ignore
7
+ from pytest_mock import MockFixture
8
+
9
+ from sift_py.data_import.tdms import TdmsUploadService
10
+ from sift_py.rest import SiftRestConfig
11
+
12
+
13
+ class MockTdmsChannel:
14
+ def __init__(
15
+ self,
16
+ name: str,
17
+ group_name: str,
18
+ properties: Optional[Dict[str, Any]] = None,
19
+ data: Optional[List[int]] = None,
20
+ data_type: type = types.Int32,
21
+ ):
22
+ self.name: str = name
23
+ self.group_name: str = group_name
24
+ self.properties: Optional[Dict[str, str]] = properties or {}
25
+ self.data: Optional[List[int]] = data or []
26
+ self.data_type: type = data_type
27
+
28
+
29
+ class MockTdmsGroup:
30
+ def __init__(self, name, channels: List[MockTdmsChannel]):
31
+ self.name: str = name
32
+ self.path: str = f"/'{name}'"
33
+ self._channels: List[MockTdmsChannel] = channels
34
+ self.properties: Optional[Dict[str, str]] = {}
35
+
36
+ def channels(self) -> List[MockTdmsChannel]:
37
+ return self._channels
38
+
39
+
40
+ class MockTdmsFile:
41
+ def __init__(self, groups: List[MockTdmsGroup]):
42
+ self._groups: List[MockTdmsGroup] = groups
43
+ self.properties: Dict[str, str] = {}
44
+
45
+ def groups(self) -> List[MockTdmsGroup]:
46
+ return self._groups
47
+
48
+ def as_dataframe(self, *_, **__):
49
+ return pd.DataFrame()
50
+
51
+
52
+ class MockResponse:
53
+ def __init__(self):
54
+ self.status_code = 200
55
+ self.text = json.dumps({"uploadUrl": "some_url.com", "dataImportId": "123-123-123"})
56
+
57
+ def json(self) -> dict:
58
+ return json.loads(self.text)
59
+
60
+
61
+ @pytest.fixture
62
+ def mock_tdms_file():
63
+ mock_tdms_groups = [
64
+ MockTdmsGroup(
65
+ name=f"Group {g}",
66
+ channels=[
67
+ MockTdmsChannel(
68
+ name=f"Test/channel_{c}",
69
+ group_name=f"Group {g}",
70
+ data=[1, 2, 3],
71
+ properties={
72
+ "wf_start_time": 0,
73
+ "wf_increment": 0.1,
74
+ "wf_start_offset": 0,
75
+ "extra": "info",
76
+ },
77
+ )
78
+ for c in range(5)
79
+ ],
80
+ )
81
+ for g in range(5)
82
+ ]
83
+
84
+ return MockTdmsFile(mock_tdms_groups)
85
+
86
+
87
+ rest_config: SiftRestConfig = {
88
+ "uri": "some_uri.com",
89
+ "apikey": "123123123",
90
+ }
91
+
92
+
93
+ def test_tdms_upload_service_upload_validate_path(mocker: MockFixture):
94
+ mock_path_is_file = mocker.patch("sift_py.data_import.tdms.Path.is_file")
95
+ mock_path_is_file.return_value = False
96
+
97
+ with pytest.raises(Exception, match="does not point to a regular file"):
98
+ svc = TdmsUploadService(rest_config)
99
+ svc.upload("some_data.tdms", "asset_name")
100
+
101
+
102
+ def test_tdms_upload_success(mocker: MockFixture, mock_tdms_file: MockTdmsFile):
103
+ mock_path_is_file = mocker.patch("sift_py.data_import.tdms.Path.is_file")
104
+ mock_path_is_file.return_value = True
105
+
106
+ mock_requests_post = mocker.patch("sift_py.data_import.csv.requests.post")
107
+ mock_requests_post.return_value = MockResponse()
108
+
109
+ def mock_tdms_file_constructor(path):
110
+ """The first call should always return the mocked object since
111
+ it is mocking a call to open the orignal tdms file.
112
+
113
+ The second call should return a real TdmsFile since the unit
114
+ test will actually create one with filtered channels.
115
+ """
116
+ print(path)
117
+ if path == "some_tdms.tdms":
118
+ return mock_tdms_file
119
+ else:
120
+ return TdmsFile(path)
121
+
122
+ mocker.patch("sift_py.data_import.tdms.TdmsFile", mock_tdms_file_constructor)
123
+
124
+ svc = TdmsUploadService(rest_config)
125
+
126
+ def get_csv_config(mock, n):
127
+ """Return the CSV config that was created and uploaded under the hood."""
128
+ return json.loads(mock_requests_post.call_args_list[n].kwargs["data"])["csv_config"]
129
+
130
+ # Test without grouping
131
+ svc.upload("some_tdms.tdms", "asset_name")
132
+ config = get_csv_config(mock_requests_post, 0)
133
+ expected_config: Dict[str, Any] = {
134
+ "asset_name": "asset_name",
135
+ "run_name": "",
136
+ "run_id": "",
137
+ "first_data_row": 2,
138
+ "time_column": {
139
+ "format": "TIME_FORMAT_ABSOLUTE_DATETIME",
140
+ "column_number": 1,
141
+ "relative_start_time": None,
142
+ },
143
+ "data_columns": {},
144
+ }
145
+ for i in range(5):
146
+ for j in range(5):
147
+ expected_config["data_columns"][str(2 + (i * 5) + j)] = {
148
+ "name": f"Test channel_{j}",
149
+ "data_type": "CHANNEL_DATA_TYPE_INT_32",
150
+ "component": "",
151
+ "units": "",
152
+ "description": "None\nwf_start_time: 0\nwf_increment: 0.1\nwf_start_offset: 0\nextra: info\n",
153
+ "enum_types": [],
154
+ "bit_field_elements": [],
155
+ }
156
+ assert config == expected_config
157
+
158
+ # Test with grouping
159
+ svc.upload("some_tdms.tdms", "asset_name", group_into_components=True)
160
+ config = get_csv_config(mock_requests_post, 2)
161
+ for i in range(5):
162
+ for j in range(5):
163
+ expected_config["data_columns"][str(2 + (i * 5) + j)]["component"] = f"Group {i}"
164
+ assert config == expected_config
165
+
166
+ # Test with run information
167
+ svc.upload(
168
+ "some_tdms.tdms",
169
+ "asset_name",
170
+ group_into_components=True,
171
+ run_name="Run Name",
172
+ run_id="Run ID",
173
+ )
174
+ expected_config["run_name"] = "Run Name"
175
+ expected_config["run_id"] = "Run ID"
176
+ config = get_csv_config(mock_requests_post, 4)
177
+ assert config == expected_config
178
+
179
+
180
+ def test_tdms_upload_ignore_errors(mocker: MockFixture):
181
+ mock_path_is_file = mocker.patch("sift_py.data_import.tdms.Path.is_file")
182
+ mock_path_is_file.return_value = True
183
+
184
+ mocker.patch("sift_py.data_import.tdms.TdmsWriter")
185
+
186
+ mock_requests_post = mocker.patch("sift_py.data_import.csv.requests.post")
187
+ mock_requests_post.return_value = MockResponse()
188
+
189
+ # Start with all invalid channels
190
+ mock_tdms_groups = [
191
+ MockTdmsGroup(
192
+ name=f"Group {g}",
193
+ channels=[MockTdmsChannel(f"channel_{c}", f"Group {g}") for c in range(5)],
194
+ )
195
+ for g in range(5)
196
+ ]
197
+ mock_tdms_file = MockTdmsFile(mock_tdms_groups)
198
+
199
+ def mock_tdms_file_constructor(path):
200
+ """The first call should always return the mocked object since
201
+ it is mocking a call to open the orignal tdms file.
202
+
203
+ The second call should return a real TdmsFile since the unit
204
+ test will actually create one with filtered channels.
205
+ """
206
+ print(path)
207
+ if path == "some_tdms.tdms":
208
+ return mock_tdms_file
209
+ else:
210
+ return TdmsFile(path)
211
+
212
+ mocker.patch("sift_py.data_import.tdms.TdmsFile", mock_tdms_file_constructor)
213
+
214
+ svc = TdmsUploadService(rest_config)
215
+
216
+ with pytest.raises(Exception, match="does not contain timing information"):
217
+ svc.upload("some_tdms.tdms", "asset_name")
218
+
219
+ with pytest.raises(Exception, match="No valid channels remaining"):
220
+ svc.upload("some_tdms.tdms", "asset_name", ignore_errors=True)
221
+
222
+
223
+ def test_tdms_upload_unknown_data_type(mocker: MockFixture, mock_tdms_file: MockTdmsFile):
224
+ mock_path_is_file = mocker.patch("sift_py.data_import.tdms.Path.is_file")
225
+ mock_path_is_file.return_value = True
226
+
227
+ mocker.patch("sift_py.data_import.tdms.TdmsWriter")
228
+
229
+ mock_requests_post = mocker.patch("sift_py.data_import.csv.requests.post")
230
+ mock_requests_post.return_value = MockResponse()
231
+
232
+ mock_tdms_file.groups()[0].channels()[0].data_type = types.ComplexDoubleFloat
233
+ mocker.patch("sift_py.data_import.tdms.TdmsFile").return_value = mock_tdms_file
234
+
235
+ svc = TdmsUploadService(rest_config)
236
+
237
+ with pytest.raises(Exception, match="data type not supported"):
238
+ svc.upload("some_tdms.tdms", "asset_name")
@@ -0,0 +1,157 @@
1
+ import json
2
+ from typing import Any, Dict, Optional
3
+
4
+ import requests
5
+
6
+ from sift_py.data_import.config import CsvConfig
7
+ from sift_py.data_import.csv import CsvUploadService
8
+ from sift_py.data_import.status import DataImportService
9
+ from sift_py.data_import.time_format import TimeFormatType
10
+
11
+
12
+ class BaseCh10File:
13
+ """
14
+ Base class for uploading IRIG Chapter 10/Chapter 11 files.
15
+
16
+ Implement a concrete version of this class that parses a ch10 stream and returns
17
+ a csv row of data on each iteration.
18
+
19
+ Set `gzip` to `True` if sending a compressed stream.
20
+
21
+ Example:
22
+ ```python
23
+
24
+ class Ch10(BaseCh10File):
25
+
26
+ def __init__(self, path):
27
+ self.file = open(path, "rb")
28
+ self.initialize_csv_data_columns = None
29
+
30
+ def initialize_csv_data_columns(self):
31
+ self.csv_config_data_columns = self.process_ch10_computer_f1_packet()
32
+
33
+ def process_ch10_computer_f1_packet(self) -> Dict[int, dict]:
34
+ # Processes the first Computer F1 packet
35
+ # and returns the measurements as a dict.
36
+ ...
37
+
38
+ def process_ch10_pcm_packet(self) -> str:
39
+ # Processed the data packets and returns
40
+ # a CSV row.
41
+ ...
42
+
43
+ def __next__(self) -> str:
44
+ # On all iterations, return data for the CSV file.
45
+ if end_of_file:
46
+ raise StopIteration()
47
+ else:
48
+ return self.process_ch10_data_packet()
49
+ ```
50
+ """
51
+
52
+ csv_config_data_columns: Dict[int, dict]
53
+ gzip: bool = False
54
+
55
+ def initialize_csv_data_columns(self) -> None:
56
+ """
57
+ Must populate the `csv_config_data_columns` attribute
58
+ that is the data_columns entry in the CsvConfig.
59
+
60
+ See the Sift data_import module or API docs for the schema.
61
+ """
62
+ raise NotImplementedError
63
+
64
+ def __iter__(self):
65
+ return self
66
+
67
+ def __next__(self) -> str:
68
+ raise NotImplementedError
69
+
70
+
71
+ class Ch10UploadService(CsvUploadService):
72
+ """Service to upload ch10 files."""
73
+
74
+ def upload( # type: ignore
75
+ self,
76
+ ch10_file: BaseCh10File,
77
+ asset_name: str,
78
+ time_format: TimeFormatType = TimeFormatType.ABSOLUTE_UNIX_NANOSECONDS,
79
+ run_name: Optional[str] = None,
80
+ run_id: Optional[str] = None,
81
+ ) -> DataImportService:
82
+ """
83
+ Uploads the ch10 file to the specified asset.
84
+
85
+ Override `time_format` to specify the time data format. Default is `TimeFormatType.ABSOLUTE_UNIX_NANOSECONDS`.
86
+ Override `run_name` to specify the name of the run to create for this data. Default is None.
87
+ Override `run_id` to specify the id of the run to add this data to. Default is None.
88
+ """
89
+ ch10_file.initialize_csv_data_columns()
90
+
91
+ assert getattr(ch10_file, "csv_config_data_columns"), (
92
+ "`csv_config_data_columns` was not set correctly on the first iteration"
93
+ )
94
+
95
+ config_info: Dict[str, Any] = {
96
+ "asset_name": asset_name,
97
+ "first_data_row": 2,
98
+ "time_column": {
99
+ "format": time_format,
100
+ "column_number": 1,
101
+ },
102
+ "data_columns": ch10_file.csv_config_data_columns,
103
+ }
104
+ if run_name:
105
+ config_info["run_name"] = run_name
106
+
107
+ if run_id:
108
+ config_info["run_id"] = run_name
109
+
110
+ csv_config = CsvConfig(config_info)
111
+
112
+ response = requests.post(
113
+ url=self._upload_uri,
114
+ headers={
115
+ "Authorization": f"Bearer {self._apikey}",
116
+ "Content-Encoding": "application/octet-stream",
117
+ },
118
+ data=json.dumps({"csv_config": csv_config.to_dict()}),
119
+ )
120
+
121
+ if response.status_code != 200:
122
+ raise Exception(
123
+ f"Config file upload request failed with status code {response.status_code}. {response.text}"
124
+ )
125
+
126
+ try:
127
+ upload_info = response.json()
128
+ except (json.decoder.JSONDecodeError, KeyError) as e:
129
+ raise Exception(f"Invalid response: {response.text}.\n{e}")
130
+
131
+ try:
132
+ upload_url: str = upload_info["uploadUrl"]
133
+ data_import_id: str = upload_info["dataImportId"]
134
+ except KeyError as e:
135
+ raise Exception(
136
+ f"Response missing required keys: {e}. This is unexpected. Please reach out to the Sift team about this error."
137
+ )
138
+
139
+ headers = {
140
+ "Authorization": f"Bearer {self._apikey}",
141
+ }
142
+
143
+ if ch10_file.gzip:
144
+ headers["Content-Encoding"] = "gzip"
145
+
146
+ response = requests.post(
147
+ url=upload_url,
148
+ headers=headers,
149
+ data=ch10_file,
150
+ )
151
+
152
+ if response.status_code != 200:
153
+ raise Exception(
154
+ f"Data file upload request failed with status code {response.status_code}. {response.text}"
155
+ )
156
+
157
+ return DataImportService(self._rest_conf, data_import_id)
@@ -0,0 +1,19 @@
1
+ from typing import Any, Dict
2
+
3
+ from sift_py.data_import._config import CsvConfigImpl
4
+
5
+
6
+ class CsvConfig:
7
+ """
8
+ Defines the CSV config for data imports.
9
+ """
10
+
11
+ def __init__(self, config_info: Dict[str, Any]):
12
+ self._config_info = config_info
13
+ self._csv_config = CsvConfigImpl(**self._config_info)
14
+
15
+ def to_json(self) -> str:
16
+ return self._csv_config.model_dump_json()
17
+
18
+ def to_dict(self) -> Dict[str, Any]:
19
+ return self._csv_config.model_dump()
@@ -0,0 +1,259 @@
1
+ import json
2
+ import mimetypes
3
+ from pathlib import Path
4
+ from typing import Any, Dict, List, Optional, Tuple, Union, cast
5
+ from urllib.parse import urljoin, urlparse
6
+
7
+ import pandas as pd
8
+ import requests
9
+
10
+ from sift_py.data_import.config import CsvConfig
11
+ from sift_py.data_import.status import DataImportService
12
+ from sift_py.data_import.time_format import TimeFormatType
13
+ from sift_py.ingestion.channel import ChannelDataType
14
+ from sift_py.rest import SiftRestConfig, compute_uri
15
+
16
+
17
+ class CsvUploadService:
18
+ UPLOAD_PATH = "/api/v1/data-imports:upload"
19
+ URL_PATH = "/api/v1/data-imports:url"
20
+
21
+ _rest_conf: SiftRestConfig
22
+ _upload_uri: str
23
+ _url_uri: str
24
+ _apikey: str
25
+
26
+ def __init__(self, rest_conf: SiftRestConfig):
27
+ self._rest_conf = rest_conf
28
+ base_uri = compute_uri(rest_conf)
29
+ self._apikey = rest_conf["apikey"]
30
+ self._upload_uri = urljoin(base_uri, self.UPLOAD_PATH)
31
+ self._url_uri = urljoin(base_uri, self.URL_PATH)
32
+
33
+ def upload(
34
+ self,
35
+ path: Union[str, Path],
36
+ csv_config: CsvConfig,
37
+ ) -> DataImportService:
38
+ """
39
+ Uploads the CSV file pointed to by `path` using a custom CSV config.
40
+ """
41
+ content_encoding = self._validate_file_type(path)
42
+
43
+ response = requests.post(
44
+ url=self._upload_uri,
45
+ headers={
46
+ "Authorization": f"Bearer {self._apikey}",
47
+ "Content-Encoding": "application/octet-stream",
48
+ },
49
+ data=json.dumps({"csv_config": csv_config.to_dict()}),
50
+ )
51
+
52
+ if response.status_code != 200:
53
+ raise Exception(
54
+ f"Config file upload request failed with status code {response.status_code}. {response.text}"
55
+ )
56
+
57
+ try:
58
+ upload_info = response.json()
59
+ except (json.decoder.JSONDecodeError, KeyError):
60
+ raise Exception(f"Invalid response: {response.text}")
61
+
62
+ try:
63
+ upload_url: str = upload_info["uploadUrl"]
64
+ data_import_id: str = upload_info["dataImportId"]
65
+ except KeyError as e:
66
+ raise Exception(f"Response missing required keys: {e}")
67
+
68
+ with open(path, "rb") as f:
69
+ headers = {
70
+ "Authorization": f"Bearer {self._apikey}",
71
+ "Content-Encoding": content_encoding,
72
+ }
73
+
74
+ response = requests.post(
75
+ url=upload_url,
76
+ headers=headers,
77
+ data=f,
78
+ )
79
+
80
+ if response.status_code != 200:
81
+ raise Exception(
82
+ f"Data file upload request failed with status code {response.status_code}. {response.text}"
83
+ )
84
+
85
+ return DataImportService(self._rest_conf, data_import_id)
86
+
87
+ def upload_from_url(
88
+ self,
89
+ url: str,
90
+ csv_config: CsvConfig,
91
+ ) -> DataImportService:
92
+ """
93
+ Uploads the CSV file pointed to by `url` using a custom CSV config.
94
+ """
95
+ parsed_url = urlparse(url)
96
+ if parsed_url.scheme not in ["s3", "http", "https"]:
97
+ raise Exception(
98
+ f"Invalid URL scheme: '{parsed_url.scheme}'. Only S3 and HTTP(S) URLs are supported."
99
+ )
100
+
101
+ headers = {"Authorization": f"Bearer {self._apikey}"}
102
+
103
+ response = requests.post(
104
+ url=self._url_uri,
105
+ headers=headers,
106
+ data=json.dumps(
107
+ (
108
+ {
109
+ "url": url,
110
+ "csv_config": csv_config.to_dict(),
111
+ }
112
+ )
113
+ ),
114
+ )
115
+
116
+ if response.status_code != 200:
117
+ raise Exception(
118
+ f"URL upload request failed with status code {response.status_code}. {response.text}"
119
+ )
120
+
121
+ try:
122
+ upload_info = response.json()
123
+ except (json.decoder.JSONDecodeError, KeyError) as e:
124
+ raise Exception(f"Invalid response: {e}")
125
+
126
+ try:
127
+ data_import_id: str = upload_info["dataImportId"]
128
+ except KeyError as e:
129
+ raise Exception(f"Response missing required keys: {e}")
130
+
131
+ return DataImportService(self._rest_conf, data_import_id)
132
+
133
+ def simple_upload(
134
+ self,
135
+ asset_name: str,
136
+ path: Union[str, Path],
137
+ first_data_row: int = 2,
138
+ time_column: int = 1,
139
+ time_format: TimeFormatType = TimeFormatType.ABSOLUTE_DATETIME,
140
+ run_name: Optional[str] = None,
141
+ run_id: Optional[str] = None,
142
+ units_row: Optional[int] = None,
143
+ descriptions_row: Optional[int] = None,
144
+ relative_start_time: Optional[str] = None,
145
+ ) -> DataImportService:
146
+ """
147
+ Uploads the CSV file pointed to by `path` to the specified asset. This function will
148
+ infer the data types and assume certain things about how the data is formatted. See the options
149
+ below for what parameters can be overridden. Use `upload` if you need to specify a custom CSV config.
150
+
151
+ Override `first_data_row` to specify which is the first row with data. Default is 2.
152
+ Override `time_column` to specify which column contains timestamp information. Default is 1.
153
+ Override `time_format` to specify the time data format. Default is `TimeFormatType.ABSOLUTE_DATETIME`.
154
+ Override `run_name` to specify the name of the run to create for this data. Default is None.
155
+ Override `run_id` to specify the id of the run to add this data to. Default is None.
156
+ Override `units_row` to specify which row contains unit information. Default is None.
157
+ Override `descriptions_row` to specify which row contains channel description information. Default is None.
158
+ Override `relative_start_time` if a relative time format is used. Default is None.
159
+ """
160
+ self._validate_file_type(path)
161
+
162
+ # Convert to 0 index
163
+ skip_rows: List[int] = []
164
+ if units_row is not None:
165
+ units_row -= 1
166
+ skip_rows.append(units_row)
167
+ if descriptions_row is not None:
168
+ descriptions_row -= 1
169
+ skip_rows.append(descriptions_row)
170
+
171
+ data_config = {}
172
+ df = pd.read_csv(path, skiprows=skip_rows)
173
+
174
+ units: List[str] = []
175
+ if units_row is not None:
176
+ df_units = pd.read_csv(path, nrows=units_row)
177
+ units = list(cast(List[str], df_units.iloc[units_row - 1].astype(str)))
178
+
179
+ descriptions: List[str] = []
180
+ if descriptions_row is not None:
181
+ df_descriptions = pd.read_csv(path, nrows=descriptions_row)
182
+ descriptions = list(
183
+ cast(List[str], df_descriptions.iloc[descriptions_row - 1].astype(str))
184
+ )
185
+
186
+ for i, header in enumerate(df.columns):
187
+ if i + 1 == time_column:
188
+ continue
189
+
190
+ raw_dtype = str(df[df.columns[i]].dtype)
191
+ if raw_dtype == "float64":
192
+ raw_dtype = "double"
193
+ # String columns are set to 'object'. Use infer_dtypes
194
+ # to verify this is a string column
195
+ elif raw_dtype == "object":
196
+ raw_dtype = pd.api.types.infer_dtype(df[df.columns[i]], skipna=False)
197
+
198
+ data_type = ChannelDataType.from_str(raw_dtype)
199
+ if data_type is None:
200
+ raise Exception(
201
+ f"Unable to upload data type in column {i + 1} {header}: Type: {raw_dtype}."
202
+ )
203
+ data_config[i + 1] = {"name": header, "data_type": data_type}
204
+
205
+ if units:
206
+ data_config[i + 1]["units"] = units[i] if units[i] != "nan" else ""
207
+
208
+ if descriptions:
209
+ data_config[i + 1]["description"] = (
210
+ descriptions[i] if descriptions[i] != "nan" else ""
211
+ )
212
+
213
+ config_info: Dict[str, Any] = {
214
+ "asset_name": asset_name,
215
+ "first_data_row": first_data_row,
216
+ "time_column": {
217
+ "format": time_format,
218
+ "column_number": time_column,
219
+ },
220
+ "data_columns": data_config,
221
+ }
222
+
223
+ if run_name is not None:
224
+ config_info["run_name"] = run_name
225
+
226
+ if run_id is not None:
227
+ config_info["run_id"] = run_id
228
+
229
+ if relative_start_time is not None:
230
+ config_info["time_column"]["relative_start_time"] = relative_start_time
231
+
232
+ csv_config = CsvConfig(config_info)
233
+
234
+ return self.upload(path, csv_config)
235
+
236
+ def _validate_file_type(self, path: Union[str, Path]) -> Optional[str]:
237
+ posix_path = Path(path) if isinstance(path, str) else path
238
+
239
+ if not posix_path.is_file():
240
+ raise Exception(f"Provided path, '{path}', does not point to a regular file.")
241
+
242
+ _, mimetype, content_encoding = self.__class__._mime_and_content_type_from_path(posix_path)
243
+
244
+ if not mimetype:
245
+ raise Exception(f"The MIME-type of '{posix_path}' could not be computed.")
246
+
247
+ valid_types = ["test/plain", "text/csv", "application/vnd.ms-excel"]
248
+ if mimetype not in valid_types:
249
+ raise Exception(
250
+ f"{path} is not a valid file type ({mimetype}). Must be {', '.join(valid_types)}."
251
+ )
252
+
253
+ return content_encoding
254
+
255
+ @staticmethod
256
+ def _mime_and_content_type_from_path(path: Path) -> Tuple[str, Optional[str], Optional[str]]:
257
+ file_name = path.name
258
+ mime, encoding = mimetypes.guess_type(path)
259
+ return file_name, mime, encoding