sift-stack-py 0.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (291) hide show
  1. google/__init__.py +1 -0
  2. google/api/__init__.py +0 -0
  3. google/api/annotations_pb2.py +27 -0
  4. google/api/annotations_pb2.pyi +29 -0
  5. google/api/annotations_pb2_grpc.py +4 -0
  6. google/api/annotations_pb2_grpc.pyi +30 -0
  7. google/api/field_behavior_pb2.py +30 -0
  8. google/api/field_behavior_pb2.pyi +175 -0
  9. google/api/field_behavior_pb2_grpc.py +4 -0
  10. google/api/field_behavior_pb2_grpc.pyi +30 -0
  11. google/api/http_pb2.py +31 -0
  12. google/api/http_pb2.pyi +433 -0
  13. google/api/http_pb2_grpc.py +4 -0
  14. google/api/http_pb2_grpc.pyi +30 -0
  15. protoc_gen_openapiv2/__init__.py +0 -0
  16. protoc_gen_openapiv2/options/__init__.py +0 -0
  17. protoc_gen_openapiv2/options/annotations_pb2.py +27 -0
  18. protoc_gen_openapiv2/options/annotations_pb2.pyi +48 -0
  19. protoc_gen_openapiv2/options/annotations_pb2_grpc.py +4 -0
  20. protoc_gen_openapiv2/options/annotations_pb2_grpc.pyi +17 -0
  21. protoc_gen_openapiv2/options/openapiv2_pb2.py +132 -0
  22. protoc_gen_openapiv2/options/openapiv2_pb2.pyi +1533 -0
  23. protoc_gen_openapiv2/options/openapiv2_pb2_grpc.py +4 -0
  24. protoc_gen_openapiv2/options/openapiv2_pb2_grpc.pyi +17 -0
  25. sift/__init__.py +0 -0
  26. sift/annotation_logs/__init__.py +0 -0
  27. sift/annotation_logs/v1/__init__.py +0 -0
  28. sift/annotation_logs/v1/annotation_logs_pb2.py +115 -0
  29. sift/annotation_logs/v1/annotation_logs_pb2.pyi +370 -0
  30. sift/annotation_logs/v1/annotation_logs_pb2_grpc.py +135 -0
  31. sift/annotation_logs/v1/annotation_logs_pb2_grpc.pyi +84 -0
  32. sift/annotations/__init__.py +0 -0
  33. sift/annotations/v1/__init__.py +0 -0
  34. sift/annotations/v1/annotations_pb2.py +180 -0
  35. sift/annotations/v1/annotations_pb2.pyi +539 -0
  36. sift/annotations/v1/annotations_pb2_grpc.py +237 -0
  37. sift/annotations/v1/annotations_pb2_grpc.pyi +144 -0
  38. sift/assets/__init__.py +0 -0
  39. sift/assets/v1/__init__.py +0 -0
  40. sift/assets/v1/assets_pb2.py +90 -0
  41. sift/assets/v1/assets_pb2.pyi +235 -0
  42. sift/assets/v1/assets_pb2_grpc.py +168 -0
  43. sift/assets/v1/assets_pb2_grpc.pyi +101 -0
  44. sift/calculated_channels/__init__.py +0 -0
  45. sift/calculated_channels/v1/__init__.py +0 -0
  46. sift/calculated_channels/v1/calculated_channels_pb2.py +99 -0
  47. sift/calculated_channels/v1/calculated_channels_pb2.pyi +280 -0
  48. sift/calculated_channels/v1/calculated_channels_pb2_grpc.py +101 -0
  49. sift/calculated_channels/v1/calculated_channels_pb2_grpc.pyi +64 -0
  50. sift/campaigns/__init__.py +0 -0
  51. sift/campaigns/v1/__init__.py +0 -0
  52. sift/campaigns/v1/campaigns_pb2.py +144 -0
  53. sift/campaigns/v1/campaigns_pb2.pyi +383 -0
  54. sift/campaigns/v1/campaigns_pb2_grpc.py +169 -0
  55. sift/campaigns/v1/campaigns_pb2_grpc.pyi +104 -0
  56. sift/channel_schemas/__init__.py +0 -0
  57. sift/channel_schemas/v1/__init__.py +0 -0
  58. sift/channel_schemas/v1/channel_schemas_pb2.py +69 -0
  59. sift/channel_schemas/v1/channel_schemas_pb2.pyi +117 -0
  60. sift/channel_schemas/v1/channel_schemas_pb2_grpc.py +101 -0
  61. sift/channel_schemas/v1/channel_schemas_pb2_grpc.pyi +64 -0
  62. sift/channels/__init__.py +0 -0
  63. sift/channels/v2/__init__.py +0 -0
  64. sift/channels/v2/channels_pb2.py +88 -0
  65. sift/channels/v2/channels_pb2.pyi +183 -0
  66. sift/channels/v2/channels_pb2_grpc.py +101 -0
  67. sift/channels/v2/channels_pb2_grpc.pyi +64 -0
  68. sift/common/__init__.py +0 -0
  69. sift/common/type/__init__.py +0 -0
  70. sift/common/type/v1/__init__.py +0 -0
  71. sift/common/type/v1/channel_bit_field_element_pb2.py +34 -0
  72. sift/common/type/v1/channel_bit_field_element_pb2.pyi +33 -0
  73. sift/common/type/v1/channel_bit_field_element_pb2_grpc.py +4 -0
  74. sift/common/type/v1/channel_bit_field_element_pb2_grpc.pyi +17 -0
  75. sift/common/type/v1/channel_data_type_pb2.py +29 -0
  76. sift/common/type/v1/channel_data_type_pb2.pyi +50 -0
  77. sift/common/type/v1/channel_data_type_pb2_grpc.py +4 -0
  78. sift/common/type/v1/channel_data_type_pb2_grpc.pyi +17 -0
  79. sift/common/type/v1/channel_enum_type_pb2.py +32 -0
  80. sift/common/type/v1/channel_enum_type_pb2.pyi +29 -0
  81. sift/common/type/v1/channel_enum_type_pb2_grpc.py +4 -0
  82. sift/common/type/v1/channel_enum_type_pb2_grpc.pyi +17 -0
  83. sift/common/type/v1/organization_pb2.py +27 -0
  84. sift/common/type/v1/organization_pb2.pyi +29 -0
  85. sift/common/type/v1/organization_pb2_grpc.py +4 -0
  86. sift/common/type/v1/organization_pb2_grpc.pyi +17 -0
  87. sift/common/type/v1/resource_identifier_pb2.py +46 -0
  88. sift/common/type/v1/resource_identifier_pb2.pyi +145 -0
  89. sift/common/type/v1/resource_identifier_pb2_grpc.py +4 -0
  90. sift/common/type/v1/resource_identifier_pb2_grpc.pyi +17 -0
  91. sift/common/type/v1/user_pb2.py +33 -0
  92. sift/common/type/v1/user_pb2.pyi +36 -0
  93. sift/common/type/v1/user_pb2_grpc.py +4 -0
  94. sift/common/type/v1/user_pb2_grpc.pyi +17 -0
  95. sift/data/__init__.py +0 -0
  96. sift/data/v1/__init__.py +0 -0
  97. sift/data/v1/data_pb2.py +212 -0
  98. sift/data/v1/data_pb2.pyi +745 -0
  99. sift/data/v1/data_pb2_grpc.py +67 -0
  100. sift/data/v1/data_pb2_grpc.pyi +44 -0
  101. sift/ingest/__init__.py +0 -0
  102. sift/ingest/v1/__init__.py +0 -0
  103. sift/ingest/v1/ingest_pb2.py +35 -0
  104. sift/ingest/v1/ingest_pb2.pyi +118 -0
  105. sift/ingest/v1/ingest_pb2_grpc.py +66 -0
  106. sift/ingest/v1/ingest_pb2_grpc.pyi +41 -0
  107. sift/ingestion_configs/__init__.py +0 -0
  108. sift/ingestion_configs/v1/__init__.py +0 -0
  109. sift/ingestion_configs/v1/ingestion_configs_pb2.py +115 -0
  110. sift/ingestion_configs/v1/ingestion_configs_pb2.pyi +332 -0
  111. sift/ingestion_configs/v1/ingestion_configs_pb2_grpc.py +203 -0
  112. sift/ingestion_configs/v1/ingestion_configs_pb2_grpc.pyi +124 -0
  113. sift/notifications/__init__.py +0 -0
  114. sift/notifications/v1/__init__.py +0 -0
  115. sift/notifications/v1/notifications_pb2.py +64 -0
  116. sift/notifications/v1/notifications_pb2.pyi +225 -0
  117. sift/notifications/v1/notifications_pb2_grpc.py +101 -0
  118. sift/notifications/v1/notifications_pb2_grpc.pyi +64 -0
  119. sift/ping/__init__.py +0 -0
  120. sift/ping/v1/__init__.py +0 -0
  121. sift/ping/v1/ping_pb2.py +38 -0
  122. sift/ping/v1/ping_pb2.pyi +36 -0
  123. sift/ping/v1/ping_pb2_grpc.py +66 -0
  124. sift/ping/v1/ping_pb2_grpc.pyi +41 -0
  125. sift/remote_files/__init__.py +0 -0
  126. sift/remote_files/v1/__init__.py +0 -0
  127. sift/remote_files/v1/remote_files_pb2.py +174 -0
  128. sift/remote_files/v1/remote_files_pb2.pyi +472 -0
  129. sift/remote_files/v1/remote_files_pb2_grpc.py +271 -0
  130. sift/remote_files/v1/remote_files_pb2_grpc.pyi +164 -0
  131. sift/report_templates/__init__.py +0 -0
  132. sift/report_templates/v1/__init__.py +0 -0
  133. sift/report_templates/v1/report_templates_pb2.py +146 -0
  134. sift/report_templates/v1/report_templates_pb2.pyi +381 -0
  135. sift/report_templates/v1/report_templates_pb2_grpc.py +169 -0
  136. sift/report_templates/v1/report_templates_pb2_grpc.pyi +104 -0
  137. sift/reports/__init__.py +0 -0
  138. sift/reports/v1/__init__.py +0 -0
  139. sift/reports/v1/reports_pb2.py +193 -0
  140. sift/reports/v1/reports_pb2.pyi +562 -0
  141. sift/reports/v1/reports_pb2_grpc.py +205 -0
  142. sift/reports/v1/reports_pb2_grpc.pyi +136 -0
  143. sift/rule_evaluation/__init__.py +0 -0
  144. sift/rule_evaluation/v1/__init__.py +0 -0
  145. sift/rule_evaluation/v1/rule_evaluation_pb2.py +89 -0
  146. sift/rule_evaluation/v1/rule_evaluation_pb2.pyi +263 -0
  147. sift/rule_evaluation/v1/rule_evaluation_pb2_grpc.py +101 -0
  148. sift/rule_evaluation/v1/rule_evaluation_pb2_grpc.pyi +64 -0
  149. sift/rules/__init__.py +0 -0
  150. sift/rules/v1/__init__.py +0 -0
  151. sift/rules/v1/rules_pb2.py +420 -0
  152. sift/rules/v1/rules_pb2.pyi +1355 -0
  153. sift/rules/v1/rules_pb2_grpc.py +577 -0
  154. sift/rules/v1/rules_pb2_grpc.pyi +351 -0
  155. sift/runs/__init__.py +0 -0
  156. sift/runs/v2/__init__.py +0 -0
  157. sift/runs/v2/runs_pb2.py +150 -0
  158. sift/runs/v2/runs_pb2.pyi +413 -0
  159. sift/runs/v2/runs_pb2_grpc.py +271 -0
  160. sift/runs/v2/runs_pb2_grpc.pyi +164 -0
  161. sift/saved_searches/__init__.py +0 -0
  162. sift/saved_searches/v1/__init__.py +0 -0
  163. sift/saved_searches/v1/saved_searches_pb2.py +144 -0
  164. sift/saved_searches/v1/saved_searches_pb2.pyi +385 -0
  165. sift/saved_searches/v1/saved_searches_pb2_grpc.py +237 -0
  166. sift/saved_searches/v1/saved_searches_pb2_grpc.pyi +144 -0
  167. sift/tags/__init__.py +0 -0
  168. sift/tags/v1/__init__.py +0 -0
  169. sift/tags/v1/tags_pb2.py +49 -0
  170. sift/tags/v1/tags_pb2.pyi +71 -0
  171. sift/tags/v1/tags_pb2_grpc.py +4 -0
  172. sift/tags/v1/tags_pb2_grpc.pyi +17 -0
  173. sift/users/__init__.py +0 -0
  174. sift/users/v2/__init__.py +0 -0
  175. sift/users/v2/users_pb2.py +61 -0
  176. sift/users/v2/users_pb2.pyi +142 -0
  177. sift/users/v2/users_pb2_grpc.py +135 -0
  178. sift/users/v2/users_pb2_grpc.pyi +84 -0
  179. sift/views/__init__.py +0 -0
  180. sift/views/v1/__init__.py +0 -0
  181. sift/views/v1/views_pb2.py +130 -0
  182. sift/views/v1/views_pb2.pyi +466 -0
  183. sift/views/v1/views_pb2_grpc.py +305 -0
  184. sift/views/v1/views_pb2_grpc.pyi +184 -0
  185. sift_grafana/py.typed +0 -0
  186. sift_grafana/sift_query_model.py +64 -0
  187. sift_py/__init__.py +923 -0
  188. sift_py/_internal/__init__.py +5 -0
  189. sift_py/_internal/cel.py +18 -0
  190. sift_py/_internal/channel.py +42 -0
  191. sift_py/_internal/convert/__init__.py +3 -0
  192. sift_py/_internal/convert/json.py +24 -0
  193. sift_py/_internal/convert/protobuf.py +34 -0
  194. sift_py/_internal/convert/timestamp.py +9 -0
  195. sift_py/_internal/test_util/__init__.py +0 -0
  196. sift_py/_internal/test_util/channel.py +136 -0
  197. sift_py/_internal/test_util/fn.py +14 -0
  198. sift_py/_internal/test_util/server_interceptor.py +62 -0
  199. sift_py/_internal/time.py +48 -0
  200. sift_py/_internal/user.py +39 -0
  201. sift_py/data/__init__.py +171 -0
  202. sift_py/data/_channel.py +38 -0
  203. sift_py/data/_deserialize.py +208 -0
  204. sift_py/data/_deserialize_test.py +134 -0
  205. sift_py/data/_service_test.py +276 -0
  206. sift_py/data/_validate.py +10 -0
  207. sift_py/data/error.py +5 -0
  208. sift_py/data/query.py +299 -0
  209. sift_py/data/service.py +497 -0
  210. sift_py/data_import/__init__.py +130 -0
  211. sift_py/data_import/_config.py +167 -0
  212. sift_py/data_import/_config_test.py +166 -0
  213. sift_py/data_import/_csv_test.py +395 -0
  214. sift_py/data_import/_status_test.py +176 -0
  215. sift_py/data_import/_tdms_test.py +238 -0
  216. sift_py/data_import/ch10.py +157 -0
  217. sift_py/data_import/config.py +19 -0
  218. sift_py/data_import/csv.py +259 -0
  219. sift_py/data_import/status.py +113 -0
  220. sift_py/data_import/tdms.py +206 -0
  221. sift_py/data_import/tempfile.py +30 -0
  222. sift_py/data_import/time_format.py +39 -0
  223. sift_py/error.py +11 -0
  224. sift_py/file_attachment/__init__.py +88 -0
  225. sift_py/file_attachment/_internal/__init__.py +0 -0
  226. sift_py/file_attachment/_internal/download.py +13 -0
  227. sift_py/file_attachment/_internal/upload.py +100 -0
  228. sift_py/file_attachment/_service_test.py +161 -0
  229. sift_py/file_attachment/entity.py +30 -0
  230. sift_py/file_attachment/metadata.py +107 -0
  231. sift_py/file_attachment/service.py +142 -0
  232. sift_py/grpc/__init__.py +15 -0
  233. sift_py/grpc/_async_interceptors/__init__.py +0 -0
  234. sift_py/grpc/_async_interceptors/base.py +72 -0
  235. sift_py/grpc/_async_interceptors/metadata.py +36 -0
  236. sift_py/grpc/_interceptors/__init__.py +0 -0
  237. sift_py/grpc/_interceptors/base.py +61 -0
  238. sift_py/grpc/_interceptors/context.py +25 -0
  239. sift_py/grpc/_interceptors/metadata.py +33 -0
  240. sift_py/grpc/_retry.py +70 -0
  241. sift_py/grpc/keepalive.py +34 -0
  242. sift_py/grpc/transport.py +250 -0
  243. sift_py/grpc/transport_test.py +170 -0
  244. sift_py/ingestion/__init__.py +6 -0
  245. sift_py/ingestion/_internal/__init__.py +6 -0
  246. sift_py/ingestion/_internal/channel.py +12 -0
  247. sift_py/ingestion/_internal/error.py +10 -0
  248. sift_py/ingestion/_internal/ingest.py +350 -0
  249. sift_py/ingestion/_internal/ingest_test.py +357 -0
  250. sift_py/ingestion/_internal/ingestion_config.py +130 -0
  251. sift_py/ingestion/_internal/run.py +46 -0
  252. sift_py/ingestion/_service_test.py +478 -0
  253. sift_py/ingestion/buffer.py +189 -0
  254. sift_py/ingestion/channel.py +422 -0
  255. sift_py/ingestion/config/__init__.py +3 -0
  256. sift_py/ingestion/config/telemetry.py +281 -0
  257. sift_py/ingestion/config/telemetry_test.py +405 -0
  258. sift_py/ingestion/config/yaml/__init__.py +0 -0
  259. sift_py/ingestion/config/yaml/error.py +44 -0
  260. sift_py/ingestion/config/yaml/load.py +126 -0
  261. sift_py/ingestion/config/yaml/spec.py +58 -0
  262. sift_py/ingestion/config/yaml/test_load.py +25 -0
  263. sift_py/ingestion/flow.py +73 -0
  264. sift_py/ingestion/manager.py +99 -0
  265. sift_py/ingestion/rule/__init__.py +4 -0
  266. sift_py/ingestion/rule/config.py +11 -0
  267. sift_py/ingestion/service.py +237 -0
  268. sift_py/py.typed +0 -0
  269. sift_py/report_templates/__init__.py +0 -0
  270. sift_py/report_templates/_config_test.py +34 -0
  271. sift_py/report_templates/_service_test.py +94 -0
  272. sift_py/report_templates/config.py +36 -0
  273. sift_py/report_templates/service.py +171 -0
  274. sift_py/rest.py +29 -0
  275. sift_py/rule/__init__.py +0 -0
  276. sift_py/rule/_config_test.py +109 -0
  277. sift_py/rule/_service_test.py +168 -0
  278. sift_py/rule/config.py +229 -0
  279. sift_py/rule/service.py +484 -0
  280. sift_py/yaml/__init__.py +0 -0
  281. sift_py/yaml/_channel_test.py +169 -0
  282. sift_py/yaml/_rule_test.py +207 -0
  283. sift_py/yaml/channel.py +224 -0
  284. sift_py/yaml/report_templates.py +73 -0
  285. sift_py/yaml/rule.py +321 -0
  286. sift_py/yaml/utils.py +15 -0
  287. sift_stack_py-0.3.2.dist-info/LICENSE +7 -0
  288. sift_stack_py-0.3.2.dist-info/METADATA +109 -0
  289. sift_stack_py-0.3.2.dist-info/RECORD +291 -0
  290. sift_stack_py-0.3.2.dist-info/WHEEL +5 -0
  291. sift_stack_py-0.3.2.dist-info/top_level.txt +5 -0
@@ -0,0 +1,497 @@
1
+ import asyncio
2
+ from collections import defaultdict
3
+ from typing import Dict, Iterable, List, Optional, Set, Tuple, Union, cast
4
+
5
+ from google.protobuf.any_pb2 import Any
6
+ from sift.assets.v1.assets_pb2 import Asset, ListAssetsRequest, ListAssetsResponse
7
+ from sift.assets.v1.assets_pb2_grpc import AssetServiceStub
8
+ from sift.calculated_channels.v1.calculated_channels_pb2 import (
9
+ ExpressionChannelReference,
10
+ ExpressionRequest,
11
+ )
12
+ from sift.calculated_channels.v1.calculated_channels_pb2_grpc import CalculatedChannelsServiceStub
13
+ from sift.channels.v2.channels_pb2 import Channel, ListChannelsRequest, ListChannelsResponse
14
+ from sift.channels.v2.channels_pb2_grpc import ChannelServiceStub
15
+ from sift.data.v1.data_pb2 import CalculatedChannelQuery as CalculatedChannelQueryPb
16
+ from sift.data.v1.data_pb2 import ChannelQuery as ChannelQueryPb
17
+ from sift.data.v1.data_pb2 import GetDataRequest, GetDataResponse, Query
18
+ from sift.data.v1.data_pb2_grpc import DataServiceStub
19
+ from sift.runs.v2.runs_pb2 import ListRunsRequest, ListRunsResponse, Run
20
+ from sift.runs.v2.runs_pb2_grpc import RunServiceStub
21
+ from typing_extensions import TypeAlias
22
+
23
+ from sift_py._internal.cel import cel_in
24
+ from sift_py._internal.channel import channel_fqn
25
+ from sift_py._internal.convert.timestamp import to_pb_timestamp
26
+ from sift_py.data._channel import ChannelTimeSeries
27
+ from sift_py.data._deserialize import try_deserialize_channel_data
28
+ from sift_py.data._validate import validate_channel_reference
29
+ from sift_py.data.error import DataError
30
+ from sift_py.data.query import CalculatedChannelQuery, ChannelQuery, DataQuery, DataQueryResult
31
+ from sift_py.error import SiftError
32
+ from sift_py.grpc.transport import SiftAsyncChannel
33
+ from sift_py.ingestion.channel import ChannelDataType
34
+
35
+
36
+ class DataService:
37
+ """
38
+ A service that asynchronously executes a `sift_py.data.query.DataQuery` to retrieve telemetry
39
+ for an arbitrary amount of channels (or calculated channels) within a user-specified time-range
40
+ and sampling rate.
41
+ """
42
+
43
+ # TODO: There is a pagination issue API side when requesting multiple channels in single request.
44
+ # If all data points for all channels in a single request don't fit into a single page, then
45
+ # paging seems to omit all but a single channel. We can increase this batch size once that issue
46
+ # has been resolved. In the mean time each channel gets its own request.
47
+ REQUEST_BATCH_SIZE = 1
48
+
49
+ AssetName: TypeAlias = str
50
+ ChannelFqn: TypeAlias = str
51
+ RunName: TypeAlias = str
52
+
53
+ _asset_service_stub: AssetServiceStub
54
+ _channel_service_stub: ChannelServiceStub
55
+ _calculated_channel_service_stub: CalculatedChannelsServiceStub
56
+ _data_service_stub: DataServiceStub
57
+ _run_service_stub: RunServiceStub
58
+
59
+ _cached_assets: Dict[AssetName, Asset]
60
+ _cached_channels: Dict[AssetName, Dict[ChannelFqn, List[Channel]]]
61
+ _cached_runs: Dict[RunName, Run]
62
+
63
+ def __init__(self, channel: SiftAsyncChannel):
64
+ self._asset_service_stub = AssetServiceStub(channel)
65
+ self._channel_service_stub = ChannelServiceStub(channel)
66
+ self._calculated_channel_service_stub = CalculatedChannelsServiceStub(channel)
67
+ self._data_service_stub = DataServiceStub(channel)
68
+ self._run_service_stub = RunServiceStub(channel)
69
+
70
+ self._cached_assets = {}
71
+ self._cached_channels = {}
72
+ self._cached_runs = {}
73
+
74
+ async def execute(self, query: DataQuery, bust_cache: bool = False) -> DataQueryResult:
75
+ """
76
+ Performs the actual query to retrieve telemetry.
77
+ """
78
+
79
+ if bust_cache:
80
+ self._bust_cache()
81
+
82
+ asset = await self._load_asset(query.asset_name)
83
+
84
+ channel_queries: List[ChannelQuery] = []
85
+ for c in query.channels:
86
+ if isinstance(c, ChannelQuery):
87
+ channel_queries.append(c)
88
+ elif isinstance(c, CalculatedChannelQuery):
89
+ for ref in c.expression_channel_references:
90
+ channel_name = ref["channel_name"]
91
+ component = ref.get("component")
92
+ channel_queries.append(
93
+ ChannelQuery(channel_name=channel_name, component=component)
94
+ )
95
+
96
+ channels = await self._load_channels(asset, channel_queries)
97
+ runs = await self._load_runs(query.channels)
98
+
99
+ queries: List[Query] = []
100
+
101
+ for channel_query in query.channels:
102
+ if isinstance(channel_query, ChannelQuery):
103
+ fqn = channel_query.fqn()
104
+ run_name = channel_query.run_name
105
+ targets = channels.get(fqn)
106
+
107
+ if not targets:
108
+ raise SiftError(
109
+ f"An unexpected error occurred. Expected channel '{fqn}' to have been loaded."
110
+ )
111
+ cqueries = [ChannelQueryPb(channel_id=channel.channel_id) for channel in targets]
112
+
113
+ if run_name is not None:
114
+ run = runs.get(run_name)
115
+
116
+ if run is None:
117
+ raise SiftError(
118
+ f"An unexpected error occurred. Expected run '{run_name}' to have been loaded."
119
+ )
120
+
121
+ for cquery in cqueries:
122
+ cquery.run_id = run.run_id
123
+
124
+ for cquery in cqueries:
125
+ queries.append(Query(channel=cquery))
126
+
127
+ elif isinstance(channel_query, CalculatedChannelQuery):
128
+ expression_channel_references = []
129
+
130
+ for expr_ref in channel_query.expression_channel_references:
131
+ validate_channel_reference(expr_ref["reference"])
132
+
133
+ fqn = channel_fqn(expr_ref["channel_name"], expr_ref.get("component"))
134
+
135
+ targets = channels.get(fqn)
136
+
137
+ if not targets:
138
+ raise SiftError(
139
+ f"An unexpected error occurred. Expected channel '{fqn}' to have been loaded."
140
+ )
141
+
142
+ channel_id = targets[0].channel_id
143
+
144
+ if len(targets) > 1:
145
+ target_data_type = expr_ref.get("data_type")
146
+
147
+ if target_data_type is None:
148
+ raise ValueError(
149
+ f"Found multiple channels with the fully qualified name '{fqn}'. A 'data_type' must be provided in `ExpressionChannelReference`."
150
+ )
151
+
152
+ for target in targets:
153
+ if ChannelDataType.from_pb(target.data_type) == target_data_type:
154
+ channel_id = target.channel_id
155
+ break
156
+
157
+ expression_channel_references.append(
158
+ ExpressionChannelReference(
159
+ channel_reference=expr_ref["reference"], channel_id=channel_id
160
+ )
161
+ )
162
+
163
+ expression_request = ExpressionRequest(
164
+ expression=channel_query.expression,
165
+ expression_channel_references=expression_channel_references,
166
+ )
167
+
168
+ calculated_cquery = CalculatedChannelQueryPb(
169
+ channel_key=channel_query.channel_key,
170
+ expression=expression_request,
171
+ )
172
+
173
+ run_name = channel_query.run_name
174
+
175
+ if run_name is not None:
176
+ run = runs.get(run_name)
177
+
178
+ if run is None:
179
+ raise SiftError(
180
+ f"An unexpected error occurred. Expected run '{run_name}' to have been loaded."
181
+ )
182
+
183
+ calculated_cquery.run_id = run.run_id
184
+
185
+ queries.append(Query(calculated_channel=calculated_cquery))
186
+
187
+ else:
188
+ raise DataError("Unknown channel query type.")
189
+
190
+ await self._validate_queries(queries)
191
+
192
+ start_time = to_pb_timestamp(query.start_time)
193
+ end_time = to_pb_timestamp(query.end_time)
194
+ sample_ms = query.sample_ms
195
+ page_size = query.page_size
196
+
197
+ tasks = []
198
+
199
+ for batch in self._batch_queries(queries):
200
+ req = GetDataRequest(
201
+ start_time=start_time,
202
+ end_time=end_time,
203
+ sample_ms=sample_ms,
204
+ page_size=page_size,
205
+ queries=batch,
206
+ )
207
+ task = asyncio.create_task(self._get_data(req))
208
+ tasks.append(task)
209
+
210
+ data_pages: List[Iterable[Any]] = []
211
+
212
+ for pages in await asyncio.gather(*tasks):
213
+ # Empty pages will have no effect
214
+ data_pages.extend(pages)
215
+
216
+ return DataQueryResult(self._merge_and_sort_pages(data_pages))
217
+
218
+ async def _get_data(self, req: GetDataRequest) -> List[Iterable[Any]]:
219
+ pages: List[Iterable[Any]] = []
220
+
221
+ start_time = req.start_time
222
+ end_time = req.end_time
223
+ sample_ms = req.sample_ms
224
+ page_size = req.page_size
225
+ queries = req.queries
226
+ next_page_token = ""
227
+
228
+ while True:
229
+ next_page_req = GetDataRequest(
230
+ start_time=start_time,
231
+ end_time=end_time,
232
+ sample_ms=sample_ms,
233
+ page_size=page_size,
234
+ queries=queries,
235
+ page_token=next_page_token,
236
+ )
237
+ response = cast(GetDataResponse, await self._data_service_stub.GetData(next_page_req))
238
+
239
+ pages.append(response.data)
240
+ next_page_token = response.next_page_token
241
+
242
+ if len(next_page_token) == 0:
243
+ break
244
+
245
+ return pages
246
+
247
+ def _merge_and_sort_pages(
248
+ self, pages: List[Iterable[Any]]
249
+ ) -> Dict[str, List[ChannelTimeSeries]]:
250
+ if len(pages) == 0:
251
+ return {}
252
+
253
+ merged_values_by_channel: Dict[str, List[ChannelTimeSeries]] = {}
254
+
255
+ for page in pages:
256
+ for raw_channel_values in page:
257
+ parsed_channel_data = try_deserialize_channel_data(cast(Any, raw_channel_values))
258
+
259
+ for metadata, cvalues in parsed_channel_data:
260
+ channel = metadata.channel
261
+ fqn = channel_fqn(channel.name, channel.component)
262
+
263
+ if not fqn:
264
+ fqn = channel.channel_id
265
+
266
+ time_series = merged_values_by_channel.get(fqn)
267
+
268
+ if time_series is None:
269
+ merged_values_by_channel[fqn] = [
270
+ ChannelTimeSeries(
271
+ data_type=cvalues.data_type,
272
+ time_column=cvalues.time_column,
273
+ value_column=cvalues.value_column,
274
+ ),
275
+ ]
276
+ else:
277
+ for series in time_series:
278
+ if series.data_type == cvalues.data_type:
279
+ series.time_column.extend(cvalues.time_column)
280
+ series.value_column.extend(cvalues.value_column)
281
+ break
282
+ else: # for-else
283
+ # Situation in which multiple channels with identical fully-qualified names but different types.
284
+ time_series.append(
285
+ ChannelTimeSeries(
286
+ data_type=cvalues.data_type,
287
+ time_column=cvalues.time_column,
288
+ value_column=cvalues.value_column,
289
+ )
290
+ )
291
+
292
+ for data in merged_values_by_channel.values():
293
+ for channel_data in data:
294
+ channel_data.sort_time_series()
295
+
296
+ return merged_values_by_channel
297
+
298
+ def _bust_cache(self):
299
+ self._cached_assets.clear()
300
+ self._cached_channels.clear()
301
+ self._cached_runs.clear()
302
+
303
+ async def _load_asset(self, asset_name: str) -> Asset:
304
+ asset = self._cached_assets.get(asset_name)
305
+
306
+ if asset is None:
307
+ asset = await self._get_asset_by_name(asset_name)
308
+ self._cached_assets[asset.name] = asset
309
+
310
+ return asset
311
+
312
+ async def _load_channels(
313
+ self,
314
+ asset: Asset,
315
+ channel_queries: List[ChannelQuery],
316
+ ) -> Dict[ChannelFqn, List[Channel]]:
317
+ if self._cached_channels.get(asset.name) is None:
318
+ sift_channels = await self._get_channels_by_asset_id(asset.asset_id, channel_queries)
319
+
320
+ channels = defaultdict(list)
321
+
322
+ for c in sift_channels:
323
+ channels[channel_fqn(c.name, c.component)].append(c)
324
+
325
+ self._cached_channels[asset.name] = channels
326
+ return self._cached_channels[asset.name]
327
+
328
+ cached_channels = self._cached_channels[asset.name]
329
+ channels_to_retrieve: List[ChannelQuery] = []
330
+ for query in channel_queries:
331
+ fqn = channel_fqn(query.channel_name, query.component)
332
+ if cached_channels.get(fqn) is None:
333
+ channels_to_retrieve.append(query)
334
+
335
+ sift_channels = []
336
+ if len(channels_to_retrieve) > 0:
337
+ sift_channels = await self._get_channels_by_asset_id(
338
+ asset.asset_id, channels_to_retrieve
339
+ )
340
+
341
+ channels = defaultdict(list)
342
+
343
+ for c in sift_channels:
344
+ channels[channel_fqn(c.name, c.component)].append(c)
345
+
346
+ if len(channels) > 0:
347
+ self._cached_channels[asset.name].update(channels)
348
+
349
+ return self._cached_channels[asset.name]
350
+
351
+ async def _load_runs(
352
+ self, channel_queries: List[Union[ChannelQuery, CalculatedChannelQuery]]
353
+ ) -> Dict[RunName, Run]:
354
+ run_names: Set[str] = set()
355
+
356
+ for channel_query in channel_queries:
357
+ run_name = channel_query.run_name
358
+
359
+ if run_name is not None and len(run_name) > 0:
360
+ run_names.add(run_name)
361
+
362
+ runs = {}
363
+ run_names_to_fetch = set()
364
+
365
+ for run_name in run_names:
366
+ run = self._cached_runs.get(run_name)
367
+
368
+ if run is not None:
369
+ runs[run.name] = run
370
+ else:
371
+ run_names_to_fetch.add(run_name)
372
+
373
+ for run in await self._get_runs_by_names(run_names_to_fetch):
374
+ self._cached_runs[run.name] = run
375
+ runs[run.name] = run
376
+
377
+ return runs
378
+
379
+ async def _get_asset_by_name(self, asset_name: str) -> Asset:
380
+ req = ListAssetsRequest(
381
+ filter=f'name=="{asset_name}"',
382
+ page_size=1,
383
+ )
384
+ res = cast(ListAssetsResponse, await self._asset_service_stub.ListAssets(req))
385
+ assets = res.assets
386
+
387
+ if len(assets) == 0:
388
+ raise DataError(f"Asset of name '{asset_name}' does not exist.")
389
+
390
+ return res.assets[0]
391
+
392
+ async def _get_runs_by_names(self, run_names: Set[str]) -> List[Run]:
393
+ if len(run_names) == 0:
394
+ return []
395
+
396
+ runs: List[Run] = []
397
+
398
+ filter = cel_in("name", run_names)
399
+ page_size = 1_000
400
+ next_page_token = ""
401
+
402
+ while True:
403
+ req = ListRunsRequest(
404
+ filter=filter,
405
+ page_size=page_size,
406
+ page_token=next_page_token,
407
+ )
408
+ res = cast(ListRunsResponse, await self._run_service_stub.ListRuns(req))
409
+ runs.extend(res.runs)
410
+
411
+ next_page_token = res.next_page_token
412
+
413
+ if len(next_page_token) == 0:
414
+ break
415
+
416
+ seen_sift_runs = set()
417
+
418
+ for sift_run in runs:
419
+ seen_sift_runs.add(sift_run.name)
420
+
421
+ for run_name in run_names:
422
+ if run_name not in seen_sift_runs:
423
+ raise DataError(f"Run of name '{run_name}' does not exist.")
424
+
425
+ return runs
426
+
427
+ async def _get_channels_by_asset_id(
428
+ self, asset_id: str, channel_queries: List[ChannelQuery]
429
+ ) -> List[Channel]:
430
+ if len(asset_id) == 0 or len(channel_queries) == 0:
431
+ return []
432
+
433
+ channels: List[Channel] = []
434
+
435
+ channel_names = []
436
+
437
+ for query in channel_queries:
438
+ channel_names.append(query.channel_name)
439
+
440
+ name_in = cel_in("name", channel_names)
441
+
442
+ filter = f'asset_id=="{asset_id}" && {name_in}'
443
+ page_size = 1_000
444
+ next_page_token = ""
445
+
446
+ while True:
447
+ req = ListChannelsRequest(
448
+ filter=filter,
449
+ page_size=page_size,
450
+ page_token=next_page_token,
451
+ )
452
+ res = cast(ListChannelsResponse, await self._channel_service_stub.ListChannels(req))
453
+ channels.extend(res.channels)
454
+ next_page_token = res.next_page_token
455
+
456
+ if len(next_page_token) == 0:
457
+ break
458
+
459
+ return channels
460
+
461
+ def _batch_queries(self, queries: List[Query]) -> List[List[Query]]:
462
+ if len(queries) == 0:
463
+ return []
464
+
465
+ batches: List[List[Query]] = []
466
+ batch_size = self.__class__.REQUEST_BATCH_SIZE
467
+
468
+ for i in range(0, len(queries), batch_size):
469
+ batches.append(queries[i : i + batch_size])
470
+
471
+ return batches
472
+
473
+ async def _validate_queries(self, queries: List[Query]):
474
+ queries_to_validate: List[ExpressionRequest] = []
475
+
476
+ for query in queries:
477
+ if query.HasField("calculated_channel"):
478
+ queries_to_validate.append(query.calculated_channel.expression)
479
+
480
+ if len(queries_to_validate) > 0:
481
+ tasks = []
482
+
483
+ for to_validate in queries_to_validate:
484
+ task = asyncio.create_task(self._validate_expression(to_validate))
485
+ tasks.append(task)
486
+
487
+ for result in await asyncio.gather(*tasks):
488
+ if result is not None:
489
+ expr, err = result
490
+ raise ValueError(f"Encountered an invalid expression '{expr}': {err}")
491
+
492
+ async def _validate_expression(self, req: ExpressionRequest) -> Optional[Tuple[str, Exception]]:
493
+ try:
494
+ self._calculated_channel_service_stub.ValidateExpression(req)
495
+ return None
496
+ except Exception as err:
497
+ return (req.expression, err)
@@ -0,0 +1,130 @@
1
+ """
2
+ This module contains services to facilitate importing data.
3
+ It also provides utilities to easily query the import status.
4
+
5
+ The fundamental components of this module are the following:
6
+ - `sift_py.data_import.config.CsvConfig`
7
+ - `sift_py.data_import.csv.CsvUploadService`
8
+ - `sift_py.data_import.status.DataImportService`
9
+
10
+
11
+ ## Simple CSV Upload
12
+
13
+ A simple CSV upload without needing to craft a custom CSV config can be done like so:
14
+ ```python
15
+ from sift_py.data_import.csv import CsvUploadService
16
+ from sift_py.data_import.status import DataImportService
17
+ from sift_py.rest import SiftRestConfig
18
+
19
+ rest_config: SiftRestConfig = {
20
+ "uri": sift_uri,
21
+ "apikey": apikey,
22
+ }
23
+
24
+ asset_name = "Your Asset Name"
25
+ csv_upload_service = CsvUploadService(rest_config)
26
+ import_service: DataImportService = csv_upload_service.simple_upload(asset_name, "sample_data.csv")
27
+
28
+ # Blocks until the import is completed.
29
+ import_service.wait_until_complete()
30
+ ```
31
+
32
+ This example assumes several things about how the data is formatted. For example, that first column
33
+ contains datetime formatted time stamps. See docstring for `simple_upload` to see what can be overridden.
34
+
35
+ ## TDMS Upload
36
+
37
+ Specify `sift-stack-py[tdms]` in your dependencies to use the TDMS upload service.
38
+ TDMS files can be uploaded like so:
39
+ ```python
40
+ from sift_py.data_import.csv import TdmsUploadService
41
+ from sift_py.data_import.status import DataImportService
42
+ from sift_py.rest import SiftRestConfig
43
+
44
+ rest_config: SiftRestConfig = {
45
+ "uri": sift_uri,
46
+ "apikey": apikey,
47
+ }
48
+
49
+ asset_name = "Your Asset Name"
50
+ csv_upload_service = CsvUploadService(rest_config)
51
+ import_service: DataImportService = csv_upload_service.simple_upload(asset_name, "sample_data.tdms")
52
+
53
+ # Blocks until the import is completed.
54
+ import_service.wait_until_complete()
55
+ ```
56
+
57
+ If you want to upload TDMS groups as sift Components set `group_into_components` to True:
58
+ ```python
59
+ csv_upload_service.simple_upload(asset_name, "sample_data.tdms", group_into_components=True)
60
+ ```
61
+
62
+ Some times there are TDMS channels without valid data or timing information, you can skip these channels by
63
+ setting `ignore_errors` to True:
64
+ ```python
65
+ csv_upload_service.simple_upload(asset_name, "sample_data.tdms", ignore_errors=True)
66
+ ```
67
+ The channels being skipped will be printed out to stdout.
68
+
69
+ ## CSV Upload with custom CSV config
70
+
71
+ If your data is formatted a specific way you can create a CsvConfig that will be used to properly
72
+ parse your data:
73
+ ```python
74
+ from sift_py.data_import.csv import CsvUploadService
75
+ from sift_py.data_import.status import DataImportService
76
+ from sift_py.rest import SiftRestConfig
77
+ from sift_py.data_import.config import CsvConfig
78
+
79
+ rest_config: SiftRestConfig = {
80
+ "uri": sift_uri,
81
+ "apikey": apikey,
82
+ }
83
+
84
+ csv_upload_service = CsvUploadService(rest_config)
85
+
86
+ # Create CSV config.
87
+ input_csv = "sample_data.csv"
88
+
89
+ # Parse CSV to get channel names.
90
+ data_config = {}
91
+ with open(input_csv, "r") as f:
92
+ reader = csv.DictReader(f)
93
+ headers = next(reader)
94
+ for i, channel in enumerate(headers):
95
+ if channel == "timestamp":
96
+ continue
97
+ data_config[i + 1] = {
98
+ "name": channel,
99
+ # This example assumes all channels are doubles.
100
+ # Can also use `ChannelDoubleType.DOUBLE` or `double`
101
+ "data_type": "CHANNEL_DATA_TYPE_DOUBLE",
102
+ "description": f"Example channel {channel}",
103
+ }
104
+
105
+ csv_config = CsvConfig(
106
+ {
107
+ "asset_name": asset_name,
108
+ "first_data_row": 2,
109
+ "time_column": {
110
+ "format": "TIME_FORMAT_ABSOLUTE_DATETIME",
111
+ # Can also use `TimeFormatType.ABSOLUTE_DATETIME`
112
+ "column_number": 1,
113
+ },
114
+ "data_columns": data_config,
115
+ }
116
+ )
117
+
118
+ import_service: DataImportService = csv_upload_service.upload(input_csv, csv_config)
119
+ import_service.wait_until_complete()
120
+ ```
121
+
122
+ In this example the CSV can be created programmatically. You can also import use a json file directly:
123
+ ```python
124
+ import json
125
+ from sift_py.data_import.config import CsvConfig
126
+
127
+ with open("config.json") as f:
128
+ csv_config = CsvConfig(json.load(f))
129
+ ```
130
+ """