kelvin-python-api-client 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. kelvin/api/client/__init__.py +15 -0
  2. kelvin/api/client/api/app_manager.py +646 -0
  3. kelvin/api/client/api/app_registry.py +342 -0
  4. kelvin/api/client/api/asset.py +1012 -0
  5. kelvin/api/client/api/asset_insights.py +67 -0
  6. kelvin/api/client/api/bridge.py +306 -0
  7. kelvin/api/client/api/control_change.py +398 -0
  8. kelvin/api/client/api/data_tag.py +499 -0
  9. kelvin/api/client/api/datastreams.py +1021 -0
  10. kelvin/api/client/api/filestorage.py +234 -0
  11. kelvin/api/client/api/instance.py +559 -0
  12. kelvin/api/client/api/orchestration.py +717 -0
  13. kelvin/api/client/api/parameters.py +417 -0
  14. kelvin/api/client/api/recommendation.py +804 -0
  15. kelvin/api/client/api/secret.py +173 -0
  16. kelvin/api/client/api/thread.py +435 -0
  17. kelvin/api/client/api/timeseries.py +273 -0
  18. kelvin/api/client/api/user.py +382 -0
  19. kelvin/api/client/api/workload.py +437 -0
  20. kelvin/api/client/base_client.py +924 -0
  21. kelvin/api/client/base_model.py +187 -0
  22. kelvin/api/client/client.py +181 -0
  23. kelvin/api/client/config.py +709 -0
  24. kelvin/api/client/data_model.py +523 -0
  25. kelvin/api/client/dataframe_conversion.py +172 -0
  26. kelvin/api/client/deeplist.py +285 -0
  27. kelvin/api/client/error.py +77 -0
  28. kelvin/api/client/model/__init__.py +3 -0
  29. kelvin/api/client/model/enum.py +82 -0
  30. kelvin/api/client/model/pagination.py +61 -0
  31. kelvin/api/client/model/requests.py +3352 -0
  32. kelvin/api/client/model/response.py +68 -0
  33. kelvin/api/client/model/responses.py +4799 -0
  34. kelvin/api/client/model/type.py +2025 -0
  35. kelvin/api/client/py.typed +0 -0
  36. kelvin/api/client/retry.py +88 -0
  37. kelvin/api/client/serialize.py +222 -0
  38. kelvin/api/client/utils.py +316 -0
  39. kelvin/api/client/version.py +16 -0
  40. kelvin_python_api_client-0.0.1.dist-info/METADATA +75 -0
  41. kelvin_python_api_client-0.0.1.dist-info/RECORD +43 -0
  42. kelvin_python_api_client-0.0.1.dist-info/WHEEL +5 -0
  43. kelvin_python_api_client-0.0.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,523 @@
1
+ """
2
+ Data Model.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ import json
8
+ from collections import ChainMap
9
+ from datetime import datetime, timezone
10
+ from functools import wraps
11
+ from importlib import import_module
12
+ from inspect import signature
13
+ from types import FunctionType, MethodType
14
+ from typing import (
15
+ TYPE_CHECKING,
16
+ Any,
17
+ Callable,
18
+ Dict,
19
+ Generic,
20
+ Iterator,
21
+ List,
22
+ Mapping,
23
+ Optional,
24
+ Sequence,
25
+ Tuple,
26
+ Type,
27
+ TypeVar,
28
+ Union,
29
+ cast,
30
+ )
31
+
32
+ import structlog
33
+ from pydantic import Extra, ValidationError, validator
34
+ from pydantic.main import ModelField
35
+ from typing_inspect import get_args, get_origin
36
+
37
+ from kelvin.api.client.serialize import lower
38
+
39
+ from .base_model import BaseModel, BaseModelMeta
40
+ from .deeplist import deeplist
41
+ from .error import APIError, ResponseError
42
+ from .serialize import is_json
43
+ from .utils import file_tuple, instance_classmethod, metadata_tuple, snake_name
44
+
45
+ if TYPE_CHECKING:
46
+ from .client import Client
47
+
48
+ logger = structlog.get_logger(__name__)
49
+
50
+ T = TypeVar("T")
51
+
52
+ JSON_CONTENT_TYPES = (
53
+ "application/json",
54
+ "application/x-json-stream",
55
+ )
56
+ MODELS = "kelvin.api.client.model"
57
+
58
+
59
+ def resolve_fields(x: Mapping[str, Any]) -> Dict[str, Any]:
60
+ """Resolve fields from data models."""
61
+
62
+ result: Dict[str, Any] = {**x}
63
+ items = [*x.items()]
64
+
65
+ for name, value in items:
66
+ if "_" in name and isinstance(value, DataModel):
67
+ head, tail = name.rsplit("_", 1)
68
+ if head != type(value).__name__.lower():
69
+ raise TypeError(f"Unable to get {name!r} from {type(value).__name__!r} object")
70
+ value = result[name] = value[tail]
71
+ if isinstance(value, datetime):
72
+ suffix = "Z" if value.microsecond else ".000000Z"
73
+ result[name] = value.astimezone(timezone.utc).replace(tzinfo=None).isoformat() + suffix
74
+
75
+ return result
76
+
77
+
78
+ class DataModelMeta(BaseModelMeta):
79
+ """DataModel metaclass."""
80
+
81
+ def __new__(
82
+ metacls: Type[DataModelMeta], name: str, bases: Tuple[Type, ...], __dict__: Dict[str, Any]
83
+ ) -> DataModelMeta:
84
+ cls = cast(DataModelMeta, super().__new__(metacls, name, bases, __dict__))
85
+
86
+ # kill unused fields so that they can be used by models
87
+ cls.fields = cls.schema = None # type: ignore
88
+
89
+ return cls
90
+
91
+ def __repr__(self) -> str:
92
+ """Pretty representation."""
93
+
94
+ methods = "\n".join(
95
+ f" - {name}: " + x.__doc__.lstrip().split("\n")[0]
96
+ for name, x in ((name, getattr(self, name)) for name in sorted(vars(self)) if not name.startswith("_"))
97
+ if x.__doc__ is not None and isinstance(x, (FunctionType, MethodType))
98
+ )
99
+
100
+ return f"{self.__name__}:\n{methods}"
101
+
102
+ def __str__(self) -> str:
103
+ """Return str(self)."""
104
+
105
+ return f"<class {self.__name__!r}>"
106
+
107
+
108
+ def get_type(name: str) -> Type:
109
+ module_name, type_name = name.rsplit(".", 1)
110
+ return getattr(import_module(f"{MODELS}.{module_name}"), type_name)
111
+
112
+
113
+ class DataModel(BaseModel, metaclass=DataModelMeta):
114
+ """Model base-class."""
115
+
116
+ if TYPE_CHECKING:
117
+ fields: Any
118
+ schema: Any
119
+
120
+ __slots__ = ("_client",)
121
+
122
+ class Config(BaseModel.Config):
123
+ """Model config."""
124
+
125
+ extra = Extra.allow
126
+
127
+ def __init__(self, client: Optional[Client] = None, **kwargs: Any) -> None:
128
+ """Initialise model."""
129
+
130
+ super().__init__(**kwargs)
131
+
132
+ object.__setattr__(self, "_client", client)
133
+
134
+ @property
135
+ def client(self) -> Optional[Client]:
136
+ """Resource client."""
137
+
138
+ if self._client is not None:
139
+ return self._client
140
+
141
+ if self._owner is not None:
142
+ return self._owner.client
143
+
144
+ return None
145
+
146
+ def __getattribute__(self, name: str) -> Any:
147
+ """Get attribute."""
148
+
149
+ if name.startswith("_"):
150
+ return super().__getattribute__(name)
151
+
152
+ try:
153
+ result = super().__getattribute__(name)
154
+ except AttributeError:
155
+ if "_" in name:
156
+ # fall back to attribute on child field
157
+ head, tail = name.rsplit("_", 1)
158
+ if head in self.__fields__:
159
+ head = getattr(self, head)
160
+ try:
161
+ return getattr(head, tail)
162
+ except AttributeError:
163
+ pass
164
+ raise
165
+
166
+ return deeplist(result) if isinstance(result, list) else result
167
+
168
+ def __setattr__(self, name: str, value: Any) -> None:
169
+ """Set attribute."""
170
+
171
+ if name.startswith("_"):
172
+ super().__setattr__(name, value)
173
+
174
+ try:
175
+ super().__setattr__(name, value)
176
+ except ValueError:
177
+ if "_" in name:
178
+ # fall back to attribute on child field
179
+ head, tail = name.rsplit("_", 1)
180
+ if head in self.__fields__:
181
+ head = getattr(self, head)
182
+ try:
183
+ setattr(head, tail, value)
184
+ except ValueError:
185
+ pass
186
+ else:
187
+ return
188
+ raise
189
+
190
+ @staticmethod
191
+ def translate(names: Optional[Mapping[str, str]] = None) -> Callable[[Callable[..., T]], Callable[..., T]]:
192
+ """Translate names and obtain data from object."""
193
+
194
+ def outer(f: Callable[..., T]) -> Callable[..., T]:
195
+ positional_args = [
196
+ name
197
+ for name, x in signature(f).parameters.items()
198
+ if name not in {"_client", "_dry_run"} and x.default is x.empty
199
+ ][1:]
200
+
201
+ @wraps(f)
202
+ def inner(obj: Any, *args: Any, **kwargs: Any) -> T:
203
+ if isinstance(obj, DataModel):
204
+ owner_prefix = snake_name(type(obj._owner).__name__) + "_" if obj._owner is not None else None
205
+ for arg_name in positional_args[len(args) :]:
206
+ if names is not None and arg_name in names:
207
+ source = names[arg_name]
208
+ kwargs[arg_name] = obj[source]
209
+ elif arg_name in obj:
210
+ kwargs[arg_name] = obj[arg_name]
211
+ elif owner_prefix is not None and arg_name.startswith(owner_prefix):
212
+ try:
213
+ kwargs[arg_name] = obj._owner[arg_name.replace(owner_prefix, "")]
214
+ except KeyError:
215
+ pass
216
+
217
+ return f(obj, *args, **kwargs)
218
+
219
+ return inner
220
+
221
+ return outer
222
+
223
+ @instance_classmethod
224
+ def _make_request(
225
+ obj: Any,
226
+ client: Optional[Client],
227
+ method: str,
228
+ path: str,
229
+ values: Mapping[str, Any],
230
+ params: Mapping[str, Any],
231
+ files: Mapping[str, Any],
232
+ headers: Mapping[str, Any],
233
+ data: Optional[Union[Mapping[str, Any], Sequence[Mapping[str, Any]]]],
234
+ body_type: Optional[str],
235
+ array_body: bool,
236
+ result_types: Mapping[str, Optional[Type]],
237
+ stream: bool = False,
238
+ dry_run: bool = False,
239
+ kwargs: Optional[Dict[str, Any]] = None,
240
+ ) -> Any:
241
+ """Make request to API."""
242
+
243
+ if isinstance(obj, DataModel):
244
+ if client is None:
245
+ client = obj.client
246
+
247
+ if client is None:
248
+ raise ValueError("No client set.")
249
+
250
+ # check for fields that need to be dereferenced
251
+ values = resolve_fields(values)
252
+ params = resolve_fields(params)
253
+ files = resolve_fields(files)
254
+ headers = resolve_fields(headers)
255
+
256
+ if "{" in path:
257
+ path = path.format_map(values)
258
+
259
+ body_data: Any
260
+
261
+ if body_type is not None:
262
+ body_type_ = get_type(body_type)
263
+
264
+ def prepare(x: Mapping[str, Any]) -> Dict[str, Any]:
265
+ if kwargs:
266
+ x = ChainMap(kwargs, x) # type: ignore
267
+ return {
268
+ k: v
269
+ for k, v in ((name, x.get(name)) for name in cast(Type[DataModel], body_type_).__fields__)
270
+ if v is not None
271
+ }
272
+
273
+ if array_body:
274
+ if data is None:
275
+ data = [{}] if kwargs else []
276
+ elif not isinstance(data, Sequence) and all(isinstance(x, Mapping) for x in data):
277
+ raise ValueError("Data must be a sequence of mappings")
278
+
279
+ body_data = [
280
+ body_type_(**lower(prepare(x))).dict(by_alias=True) for x in cast(Sequence[Mapping[str, Any]], data)
281
+ ]
282
+ else:
283
+ if data is None:
284
+ data = {}
285
+ elif not isinstance(data, Mapping):
286
+ raise ValueError("Data must be a mapping")
287
+
288
+ if isinstance(obj, DataModel):
289
+ data = ChainMap(data, obj) # type: ignore
290
+
291
+ body_data = body_type_(**lower(prepare(data))).dict(by_alias=True)
292
+ else:
293
+ body_data = None
294
+ metadata = None
295
+ if "metadata" in files:
296
+ metadata = files.pop("metadata")
297
+ files = {k: file_tuple(v) for k, v in files.items()}
298
+ if metadata is not None:
299
+ files = {**files, "metadata": metadata_tuple(metadata)}
300
+ if dry_run:
301
+ return {
302
+ "path": path,
303
+ "method": method,
304
+ "data": body_data,
305
+ "params": params,
306
+ "files": files,
307
+ "headers": headers,
308
+ }
309
+
310
+ response = client.request(path, method, body_data, params, files, headers, raise_error=False, stream=stream)
311
+
312
+ try:
313
+ content_type = response.headers.get("Content-Type", "")
314
+ if content_type == "application/octet-stream":
315
+ return response.iter_content(1024)
316
+
317
+ status_code = response.status_code
318
+
319
+ result_type = result_types.get(str(status_code), ...)
320
+
321
+ if not response.ok:
322
+ if result_type is ...:
323
+ # try to fill gap with first not "OK" response
324
+ result_type = next(
325
+ (v for k, v in sorted(result_types.items()) if not 200 <= status_code < 300),
326
+ ...,
327
+ )
328
+ if result_type is ...:
329
+ logger.warning("Unknown response code", status_code=status_code)
330
+ result_type = None
331
+ if content_type == "application/json" or is_json(response.text):
332
+ raise APIError(response)
333
+ response.raise_for_status()
334
+
335
+ elif result_type is ...:
336
+ # try to fill gap with first "OK" response
337
+ result_type = next((v for k, v in sorted(result_types.items()) if 200 <= status_code < 300), ...)
338
+ if result_type is ...:
339
+ logger.warning("Unknown response code", status_code=status_code)
340
+ result_type = None
341
+
342
+ if isinstance(result_type, type):
343
+ if not content_type.startswith(JSON_CONTENT_TYPES):
344
+ with response:
345
+ raise ResponseError(
346
+ f"Unexpected response for {result_type.__name__}", # type: ignore
347
+ response,
348
+ )
349
+
350
+ def converter(x: Any) -> Any:
351
+ return result_type(client=client, **x) # type: ignore
352
+
353
+ elif get_origin(result_type) is list:
354
+ result_type, *_ = get_args(result_type)
355
+ if not content_type.startswith(JSON_CONTENT_TYPES):
356
+ with response:
357
+ raise ResponseError(
358
+ f"Unexpected response for {result_type.__name__}", # type: ignore
359
+ response,
360
+ )
361
+
362
+ def converter(x: Any) -> Any:
363
+ return [result_type(client=client, **v) for v in x] # type: ignore
364
+
365
+ else:
366
+ if not content_type.startswith(JSON_CONTENT_TYPES):
367
+ with response:
368
+ return response.text or None
369
+
370
+ def converter(x: Any) -> Any:
371
+ return x
372
+
373
+ if not response.ok:
374
+ with response:
375
+ raise APIError(response, converter)
376
+
377
+ if stream:
378
+
379
+ def results() -> Iterator[Any]:
380
+ i = -1
381
+ errors = []
382
+ success = False
383
+ with response:
384
+ for x in response.iter_lines():
385
+ if not x:
386
+ continue
387
+ i += 0
388
+ records = json.loads(x)
389
+ if isinstance(records, dict):
390
+ records = [records]
391
+
392
+ for record in records:
393
+ try:
394
+ yield converter(record)
395
+ except ValidationError as e:
396
+ errors += [(i, e)]
397
+ continue
398
+ else:
399
+ success = True
400
+
401
+ if not errors:
402
+ return
403
+
404
+ if not success:
405
+ raise errors[0][1] from None
406
+ elif errors:
407
+ summary = "\n".join(f" {i}: {x}" for i, x in errors)
408
+ logger.warning("Skipped items", result_type=result_type, summary=summary)
409
+
410
+ results.__qualname__ = "results"
411
+
412
+ return results()
413
+ else:
414
+ with response:
415
+ try:
416
+ return converter(response.json())
417
+ except ValidationError as e:
418
+ raise e from None
419
+
420
+ except Exception:
421
+ response.close()
422
+ raise
423
+
424
+ @validator("*", pre=True)
425
+ def convert_datetime(cls, value: Any, field: ModelField) -> Any:
426
+ """Correct data-type for datetime values."""
427
+
428
+ if not isinstance(value, datetime):
429
+ return value
430
+
431
+ field_type = field.type_
432
+
433
+ if not isinstance(field_type, type):
434
+ return value
435
+
436
+ if issubclass(field_type, str):
437
+ suffix = "Z" if value.microsecond else ".000000Z"
438
+ return value.astimezone(timezone.utc).replace(tzinfo=None).isoformat() + suffix
439
+ elif issubclass(field_type, float):
440
+ return value.timestamp()
441
+ elif issubclass(field_type, int):
442
+ return int(value.timestamp() * 1e9)
443
+ else:
444
+ return value
445
+
446
+
447
+ P = TypeVar("P", bound=DataModel)
448
+
449
+
450
+ class PaginatorDataModel(DataModel, Generic[P]):
451
+ """Paginator data-model."""
452
+
453
+ def __init__(self, **kwargs: Any) -> None:
454
+ """Initialise model."""
455
+
456
+ super().__init__(**kwargs)
457
+
458
+ for x in self.data:
459
+ object.__setattr__(x, "_client", self._client)
460
+
461
+ @validator("data", pre=True, check_fields=False)
462
+ def validate_data(cls, v: Sequence[Mapping[str, Any]], field: ModelField) -> List[P]:
463
+ """Validate data field."""
464
+
465
+ T = field.type_
466
+ results = []
467
+
468
+ for item in v:
469
+ try:
470
+ results += [T(**item)]
471
+ except ValidationError as e:
472
+ logger.warning("Skipped invalid item", name=T.__name__, item=item, error=e)
473
+
474
+ return results
475
+
476
+ def __getitem__(self, item: Union[str, int]) -> Any:
477
+ """Get item."""
478
+
479
+ if isinstance(item, int):
480
+ return self.data[item]
481
+
482
+ return super().__getitem__(item)
483
+
484
+ def scan(self, path: str, flatten: bool = True, method: str = "GET", data: Any = None) -> Iterator[P]:
485
+ """Iterate pages."""
486
+
487
+ result = self
488
+ client = self._client
489
+
490
+ while True:
491
+ if not result.data:
492
+ return
493
+
494
+ if flatten:
495
+ yield from result.data
496
+ else:
497
+ yield result.data
498
+
499
+ pagination = result.pagination
500
+ if pagination is None:
501
+ return
502
+
503
+ next_page = pagination.next_page
504
+ if next_page is None:
505
+ return
506
+
507
+ if "?" in next_page:
508
+ path = next_page
509
+ params = {}
510
+ else:
511
+ page_size = len(result.data)
512
+ params = {"next": next_page, "page_size": page_size}
513
+
514
+ with client.request(path, method=method, params=params, data=data) as response:
515
+ result = type(self)(**response.json(), client=client)
516
+
517
+ def fetch(self, path: str, method: str = "GET", data: Any = None) -> Sequence[P]:
518
+ """Fetch all data."""
519
+
520
+ return type(self.data)(self.scan(path, True, method=method, data=data))
521
+
522
+
523
+ DataModelBase = DataModel
@@ -0,0 +1,172 @@
1
+ """This module provides functions for converting data between timeseries and dataframes.
2
+
3
+ The module includes the following functions:
4
+ - `timeseries_to_dataframe`: Converts timeseries data into a dataframe.
5
+ - `dataframe_to_timeseries`: Converts a dataframe into timeseries data.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from datetime import tzinfo
11
+ from typing import Any, Dict, Iterable, Iterator, List, Mapping, Optional, Union, overload
12
+
13
+ from pandas import DataFrame, MultiIndex, concat, json_normalize, to_datetime
14
+ from typing_extensions import Literal
15
+ from tzlocal import get_localzone
16
+
17
+ from .utils import chunks, inflate, map_chunks
18
+
19
+ STORAGE_FIELDS = {"resource", "source", "timestamp", "type"}
20
+ DATA = [
21
+ "payload",
22
+ ]
23
+ INDEX = [
24
+ "resource",
25
+ "timestamp",
26
+ ]
27
+ DEFAULT_TZ = str(get_localzone())
28
+
29
+ ExpandType = Literal["column", "index"]
30
+
31
+
32
+ def parse_resource(resource: str) -> Dict[str, str]:
33
+ """Parse KRN resource string."""
34
+
35
+ try:
36
+ scheme, nid, nss = resource.split(":", 2)
37
+ except IndexError:
38
+ raise ValueError(f"Invalid resource {resource!r}")
39
+
40
+ if scheme != "krn":
41
+ raise ValueError(f"Invalid resource scheme {scheme!r}")
42
+
43
+ result: Dict[str, str] = {"nid": nid}
44
+
45
+ # Asset Datastream
46
+ if nid == "ad":
47
+ try:
48
+ asset_name, datastream_name = nss.split("/", 1)
49
+ except IndexError:
50
+ raise ValueError(f"Invalid resource string {nss!r} for resource {nid!r}")
51
+ result["asset_name"] = asset_name
52
+ result["datastream_name"] = datastream_name
53
+ else:
54
+ raise ValueError(f"Unsupported resource namespace {nid!r}")
55
+
56
+ return result
57
+
58
+
59
+ def _convert_timeseries(
60
+ x: Iterable[Mapping[str, Any]],
61
+ tz: Union[tzinfo, str],
62
+ expand_resource: Optional[ExpandType] = "index",
63
+ ) -> DataFrame:
64
+ """Convert timeseries into dataframe."""
65
+
66
+ data = DataFrame.from_records(x)
67
+ if data.empty:
68
+ return DataFrame(
69
+ {name: [] for name in DATA},
70
+ index=MultiIndex.from_tuples([], names=INDEX),
71
+ )
72
+
73
+ data["timestamp"] = to_datetime(data.timestamp, utc=True).dt.tz_convert(tz)
74
+ data.set_index(INDEX, inplace=True)
75
+
76
+ if expand_resource is None:
77
+ return data
78
+
79
+ data = data.reset_index("resource")
80
+ resource = json_normalize(data.pop("resource").apply(parse_resource)) # type: ignore
81
+ resource.index = data.index
82
+ data = concat([resource, data], axis=1)
83
+
84
+ if expand_resource == "column":
85
+ return data
86
+ elif expand_resource == "index":
87
+ return data.set_index([*resource.columns], append=True)
88
+ else:
89
+ raise ValueError(f"Unknown expansion-type {expand_resource!r}")
90
+
91
+ return data
92
+
93
+
94
+ @overload
95
+ def timeseries_to_dataframe(
96
+ x: Iterable[Mapping[str, Any]],
97
+ chunk_size: Literal[None] = None,
98
+ tz: Union[tzinfo, str] = DEFAULT_TZ,
99
+ expand_resource: Optional[ExpandType] = "index",
100
+ ) -> DataFrame: ...
101
+
102
+
103
+ @overload
104
+ def timeseries_to_dataframe(
105
+ x: Iterable[Mapping[str, Any]],
106
+ chunk_size: int,
107
+ tz: Union[tzinfo, str] = DEFAULT_TZ,
108
+ expand_resource: Optional[ExpandType] = "index",
109
+ ) -> Iterator[DataFrame]: ...
110
+
111
+
112
+ def timeseries_to_dataframe(
113
+ x: Iterable[Mapping[str, Any]],
114
+ chunk_size: Optional[int] = None,
115
+ tz: Union[tzinfo, str] = DEFAULT_TZ,
116
+ expand_resource: Optional[ExpandType] = "index",
117
+ ) -> Union[DataFrame, Iterator[DataFrame]]:
118
+ """Convert timeseries into dataframe, optionally in chunks."""
119
+
120
+ if chunk_size is None:
121
+ return _convert_timeseries(x, tz=tz, expand_resource=expand_resource)
122
+
123
+ return map_chunks(chunk_size, _convert_timeseries, x, tz=tz, expand_resource=expand_resource)
124
+
125
+
126
+ def _convert_dataframe(x: DataFrame) -> List[Dict[str, Any]]:
127
+ """Convert dataframe into timeseries."""
128
+
129
+ x = x.reset_index()
130
+
131
+ missing = STORAGE_FIELDS - {*x}
132
+ if missing:
133
+ raise ValueError(f"Missing fields: {', '.join(sorted(missing))}")
134
+
135
+ columns = {*x} - STORAGE_FIELDS
136
+
137
+ if not columns:
138
+ raise ValueError("No columns found")
139
+
140
+ if "payload" in columns:
141
+ extra = columns - {"payload"}
142
+ if extra:
143
+ raise ValueError(f"Unexpected columns: {', '.join(sorted(extra))}")
144
+
145
+ elif "field" in columns and "value" in columns:
146
+ extra = columns - {"field", "value"}
147
+ if extra:
148
+ raise ValueError(f"Unexpected columns: {', '.join(sorted(extra))}")
149
+ x = (
150
+ x.groupby([*STORAGE_FIELDS])[["field", "value"]]
151
+ .apply(lambda x: inflate(x.itertuples(index=False)))
152
+ .reset_index(name="payload")
153
+ )
154
+
155
+ else:
156
+ x["payload"] = [inflate(v.items()) for v in x[columns].to_dict(orient="records")]
157
+ x = x.drop(columns=columns)
158
+
159
+ x["timestamp"] = x.timestamp.astype(int)
160
+
161
+ return x.to_dict(orient="records")
162
+
163
+
164
+ def dataframe_to_timeseries(
165
+ x: DataFrame, chunk_size: Optional[int] = None
166
+ ) -> Union[List[Dict[str, Any]], Iterator[List[Dict[str, Any]]]]:
167
+ """Convert dataframe into timeseries, optionally in chunks."""
168
+
169
+ if chunk_size is None:
170
+ return _convert_dataframe(x)
171
+
172
+ return (_convert_dataframe(chunk) for chunk in chunks(x, chunk_size))