kelvin-python-api-client 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. kelvin/api/client/__init__.py +15 -0
  2. kelvin/api/client/api/app_manager.py +646 -0
  3. kelvin/api/client/api/app_registry.py +342 -0
  4. kelvin/api/client/api/asset.py +1012 -0
  5. kelvin/api/client/api/asset_insights.py +67 -0
  6. kelvin/api/client/api/bridge.py +306 -0
  7. kelvin/api/client/api/control_change.py +398 -0
  8. kelvin/api/client/api/data_tag.py +499 -0
  9. kelvin/api/client/api/datastreams.py +1021 -0
  10. kelvin/api/client/api/filestorage.py +234 -0
  11. kelvin/api/client/api/instance.py +559 -0
  12. kelvin/api/client/api/orchestration.py +717 -0
  13. kelvin/api/client/api/parameters.py +417 -0
  14. kelvin/api/client/api/recommendation.py +804 -0
  15. kelvin/api/client/api/secret.py +173 -0
  16. kelvin/api/client/api/thread.py +435 -0
  17. kelvin/api/client/api/timeseries.py +273 -0
  18. kelvin/api/client/api/user.py +382 -0
  19. kelvin/api/client/api/workload.py +437 -0
  20. kelvin/api/client/base_client.py +924 -0
  21. kelvin/api/client/base_model.py +187 -0
  22. kelvin/api/client/client.py +181 -0
  23. kelvin/api/client/config.py +709 -0
  24. kelvin/api/client/data_model.py +523 -0
  25. kelvin/api/client/dataframe_conversion.py +172 -0
  26. kelvin/api/client/deeplist.py +285 -0
  27. kelvin/api/client/error.py +77 -0
  28. kelvin/api/client/model/__init__.py +3 -0
  29. kelvin/api/client/model/enum.py +82 -0
  30. kelvin/api/client/model/pagination.py +61 -0
  31. kelvin/api/client/model/requests.py +3352 -0
  32. kelvin/api/client/model/response.py +68 -0
  33. kelvin/api/client/model/responses.py +4799 -0
  34. kelvin/api/client/model/type.py +2025 -0
  35. kelvin/api/client/py.typed +0 -0
  36. kelvin/api/client/retry.py +88 -0
  37. kelvin/api/client/serialize.py +222 -0
  38. kelvin/api/client/utils.py +316 -0
  39. kelvin/api/client/version.py +16 -0
  40. kelvin_python_api_client-0.0.1.dist-info/METADATA +75 -0
  41. kelvin_python_api_client-0.0.1.dist-info/RECORD +43 -0
  42. kelvin_python_api_client-0.0.1.dist-info/WHEEL +5 -0
  43. kelvin_python_api_client-0.0.1.dist-info/top_level.txt +1 -0
File without changes
@@ -0,0 +1,88 @@
1
+ """
2
+ Custom Retrier.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ from random import random
8
+ from typing import Any, Tuple, Union
9
+
10
+ import requests
11
+ from requests.adapters import Retry
12
+
13
+
14
+ class APIRetry(Retry):
15
+ """API retrier."""
16
+
17
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
18
+ """Initialise retry."""
19
+
20
+ # overrides
21
+ if "status_forcelist" not in kwargs:
22
+ kwargs["status_forcelist"] = [
23
+ requests.codes.bad_gateway,
24
+ requests.codes.service_unavailable,
25
+ requests.codes.gateway_timeout,
26
+ ]
27
+ if "backoff_factor" not in kwargs:
28
+ kwargs["backoff_factor"] = BackoffFactor((0.5, 1.0))
29
+
30
+ super().__init__(*args, **kwargs)
31
+
32
+
33
+ class BackoffFactor:
34
+ """Random backoff coefficient."""
35
+
36
+ def __init__(self, factor: Union[float, Tuple[float, float]] = (0.0, 1.0)) -> None:
37
+ """Initialise backof coefficient."""
38
+
39
+ if isinstance(factor, (tuple, list)):
40
+ if len(factor) != 2:
41
+ raise ValueError("Factor range must be a pair")
42
+ min, max = factor
43
+ if not isinstance(min, (int, float)) or min < 0:
44
+ raise TypeError("Factor range minimum must be a non-negative real number")
45
+ if not isinstance(max, (int, float)) or max < 0:
46
+ raise TypeError("Factor range maximum must be a non-negative real number")
47
+ if min > max:
48
+ raise ValueError("Factor range minimum must not be greater than maximum")
49
+ else:
50
+ if not isinstance(factor, (int, float)) or factor < 0:
51
+ raise TypeError("Factor must be a non-negative real number")
52
+ min = max = factor
53
+
54
+ self.min, self.range = min, max - min
55
+
56
+ @property
57
+ def _value(self) -> float:
58
+ return self.min + random() * self.range # nosec
59
+
60
+ def __mul__(self, x: float) -> float:
61
+ return self._value.__mul__(x)
62
+
63
+ def __rmul__(self, x: float) -> float:
64
+ return self._value.__rmul__(x)
65
+
66
+ def __truediv__(self, x: float) -> float:
67
+ return self._value.__truediv__(x)
68
+
69
+ def __rtruediv__(self, x: float) -> float:
70
+ return self._value.__rtruediv__(x)
71
+
72
+ def __add__(self, x: float) -> float:
73
+ return self._value.__add__(x)
74
+
75
+ def __radd__(self, x: float) -> float:
76
+ return self._value.__radd__(x)
77
+
78
+ def __sub__(self, x: float) -> float:
79
+ return self._value.__sub__(x)
80
+
81
+ def __rsub__(self, x: float) -> float:
82
+ return self._value.__rsub__(x)
83
+
84
+ def __int__(self) -> int:
85
+ return self._value.__int__()
86
+
87
+ def __float__(self) -> float:
88
+ return self._value.__float__()
@@ -0,0 +1,222 @@
1
+ """
2
+ Data serialisation.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ import json
8
+ import os
9
+ import re
10
+ from datetime import date, datetime, timedelta, timezone
11
+ from enum import Enum
12
+ from math import isfinite
13
+ from pathlib import Path
14
+ from typing import Any, Callable, Dict, Iterable, Mapping, Tuple, Type, cast
15
+
16
+ import yaml
17
+
18
+ from kelvin.api.client.base_model import BaseModelRoot
19
+
20
+ from .utils import chdir
21
+
22
+
23
+ def load_include(filename: str, loader: Callable[[Any], Any]) -> Any:
24
+ """Load include data."""
25
+
26
+ index: Any
27
+
28
+ if ":" in filename:
29
+ filename, index = filename.rsplit(":", 1)
30
+ else:
31
+ index = None
32
+
33
+ try:
34
+ filename = filename.format_map(os.environ)
35
+ except KeyError as e:
36
+ raise ValueError(f"Unknown path variable {e!s} in filename {filename!r}")
37
+
38
+ path = Path(filename).expanduser().resolve()
39
+
40
+ with path.open("rt") as file, chdir(path.parent):
41
+ result = loader(file) if path.suffix in (".yaml", ".yml", ".json") else file.read()
42
+
43
+ if index is None:
44
+ return result
45
+
46
+ for level in index.split("."):
47
+ if level[level[0] in "+-" :].isnumeric():
48
+ level = int(level)
49
+ try:
50
+ result = result[level]
51
+ except (IndexError, KeyError):
52
+ raise ValueError(f"Unknown level {level!r} in filename {str(filename)!r}")
53
+
54
+ return result
55
+
56
+
57
+ tokens = {"null", "true", "false", "NaN"}
58
+ delimiters = {("{", "}"), ("[", "]"), ('"', '"')}
59
+ number_re = re.compile(r"-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?$")
60
+
61
+
62
+ def is_json(x: str) -> bool:
63
+ """Check if value is probably a JSON string."""
64
+
65
+ if not x:
66
+ return False
67
+ if x in tokens:
68
+ return True
69
+ if x.isnumeric():
70
+ return True
71
+ if x[0].isalpha():
72
+ return False
73
+ if len(x) == 1:
74
+ return False
75
+ if (x[0], x[-1]) in delimiters:
76
+ return True
77
+ if number_re.match(x):
78
+ return True
79
+
80
+ return False
81
+
82
+
83
+ def lower(x: Any, skip: bool = True) -> Any:
84
+ """Lower data to a json-ready representation."""
85
+
86
+ if x is None:
87
+ return x
88
+ if isinstance(x, str):
89
+ return str(x)
90
+ if isinstance(x, bool):
91
+ return bool(x)
92
+ if isinstance(x, int):
93
+ return int(x)
94
+ if isinstance(x, float):
95
+ return x if isfinite(x) else None
96
+ if isinstance(x, Mapping):
97
+ if not skip:
98
+ return {lower(k, skip): lower(v, skip) for k, v in x.items()}
99
+ return {
100
+ k: v
101
+ for k, v in (
102
+ (lower(k, skip), lower(v, skip))
103
+ for k, v in x.items()
104
+ if not isinstance(k, str) or not k.startswith("__")
105
+ )
106
+ if v is not ...
107
+ }
108
+ if isinstance(x, datetime):
109
+ suffix = "Z" if x.microsecond else ".000000Z"
110
+ return x.astimezone(timezone.utc).replace(tzinfo=None).isoformat() + suffix
111
+ if isinstance(x, date):
112
+ return x.isoformat()
113
+ if isinstance(x, timedelta):
114
+ return x.total_seconds()
115
+ if isinstance(x, BaseModelRoot):
116
+ return {k: lower(v, skip) for k, v in x.dict().items()}
117
+ if isinstance(x, Iterable):
118
+ if not skip:
119
+ return [lower(v, skip) for v in x]
120
+ return [v for v in (lower(v, skip) for v in x) if v is not ...]
121
+ if isinstance(x, Enum):
122
+ return x.name
123
+
124
+ if skip:
125
+ return ...
126
+
127
+ raise ValueError(f"Un-lowerable type: {type(x).__name__}")
128
+
129
+
130
+ class JSONEncoder(json.JSONEncoder):
131
+ """Custom JSON encoder."""
132
+
133
+ def default(self, x: Any) -> Any:
134
+ """Return a JSON-serialisable object."""
135
+
136
+ try:
137
+ return lower(x, skip=False)
138
+ except ValueError:
139
+ return super().default(x)
140
+
141
+
142
+ def jsonify(x: Any, sort_keys: bool = True, cls: Type[json.JSONEncoder] = JSONEncoder, **kwargs: Any) -> str:
143
+ """Convert object to JSON."""
144
+
145
+ return json.dumps(x, sort_keys=sort_keys, cls=cls, **kwargs)
146
+
147
+
148
+ class LoaderMeta(type):
149
+ """Metaclass to add include constructor."""
150
+
151
+ def __new__(metacls: Type[LoaderMeta], name: str, bases: Tuple[Type, ...], __dict__: Dict[str, Any]) -> LoaderMeta:
152
+ """Add include constructer to class."""
153
+
154
+ result = cast(LoaderMeta, super().__new__(metacls, name, bases, __dict__))
155
+
156
+ # register the include constructor on the class
157
+ result.add_constructor("!include", result.construct_include) # type: ignore
158
+
159
+ return result
160
+
161
+
162
+ class Loader(yaml.SafeLoader, metaclass=LoaderMeta):
163
+ """YAML Loader with `!include` constructor."""
164
+
165
+ def construct_include(self, node: yaml.ScalarNode) -> Any:
166
+ """Include file referenced at node."""
167
+
168
+ x = self.construct_scalar(node), lambda x: yaml.load(x, Loader=Loader) # nosec
169
+ if not isinstance(x, str):
170
+ raise ValueError(f"Invalid filename type {type(x)!r}")
171
+
172
+ return load_include(*x)
173
+
174
+
175
+ class Dumper(yaml.Dumper):
176
+ """Custom YAML encoder."""
177
+
178
+ def represent_data(self, x: Any) -> Any:
179
+ """Return a YAML-serialisable representation."""
180
+
181
+ if x is None:
182
+ return super().represent_none(None)
183
+ if isinstance(x, str):
184
+ if "\n" in x:
185
+ # use indented style for strings with newlines
186
+ # strip trailing spaces, leading and trailing newlines
187
+ x = re.sub(r"[ \t]+\n", "\n", x).lstrip("\n").rstrip()
188
+ return self.represent_scalar("tag:yaml.org,2002:str", str(x), style="|")
189
+ return self.represent_str(str(x))
190
+ if isinstance(x, bool):
191
+ return self.represent_bool(bool(x))
192
+ if isinstance(x, int):
193
+ return self.represent_int(int(x))
194
+ if isinstance(x, float):
195
+ return self.represent_float(float(x))
196
+ if isinstance(x, Mapping):
197
+ return self.represent_dict({**x})
198
+ if isinstance(x, datetime):
199
+ return self.represent_str(x.astimezone(timezone.utc).replace(tzinfo=None).isoformat() + "Z")
200
+ if isinstance(x, date):
201
+ return self.represent_str(x.isoformat())
202
+ if isinstance(x, Iterable):
203
+ return self.represent_list([*x])
204
+ if isinstance(x, Enum):
205
+ return self.represent_str(x.name)
206
+
207
+ return super().represent_data(x)
208
+
209
+
210
+ YAML_SEPARATOR = "\n...\n"
211
+
212
+
213
+ def yamlify(x: Any, sort_keys: bool = False, Dumper: Type[Dumper] = Dumper, **kwargs: Any) -> str:
214
+ """Convert object to YAML."""
215
+
216
+ result = cast(str, yaml.dump(x, sort_keys=sort_keys, Dumper=Dumper, **kwargs))
217
+
218
+ return (
219
+ result[: -len(YAML_SEPARATOR)]
220
+ if result.endswith(YAML_SEPARATOR)
221
+ else result[:-1] if result.endswith("\n") else result
222
+ )
@@ -0,0 +1,316 @@
1
+ """
2
+ Utilities.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ import contextlib
8
+ import json
9
+ import os
10
+ import re
11
+ from datetime import timedelta
12
+ from functools import wraps
13
+ from io import IOBase
14
+ from itertools import islice, zip_longest
15
+ from mimetypes import guess_type
16
+ from operator import itemgetter
17
+ from pathlib import Path
18
+ from typing import (
19
+ Any,
20
+ Callable,
21
+ Dict,
22
+ Generic,
23
+ Iterable,
24
+ Iterator,
25
+ List,
26
+ Mapping,
27
+ MutableMapping,
28
+ Optional,
29
+ Sequence,
30
+ Tuple,
31
+ TypeVar,
32
+ Union,
33
+ )
34
+
35
+ T = TypeVar("T")
36
+ FileTuple = Tuple[Optional[str], Union[IOBase, bytes], Optional[str]]
37
+
38
+ MICROSECOND = int(1e6)
39
+ SCALE = {
40
+ "h": 60 * 60 * MICROSECOND,
41
+ "m": 60 * MICROSECOND,
42
+ "s": 1 * MICROSECOND,
43
+ "ms": MICROSECOND // 1000,
44
+ "us": 1,
45
+ }
46
+
47
+
48
+ def duration(x: timedelta) -> str:
49
+ """Convert to Go Duration."""
50
+
51
+ microseconds = int(x.total_seconds() * MICROSECOND)
52
+ if not microseconds:
53
+ return "0s"
54
+
55
+ result: List[str] = []
56
+ if microseconds < 0:
57
+ microseconds *= -1
58
+ result += ["-"]
59
+
60
+ for unit, scale in SCALE.items():
61
+ value, microseconds = divmod(microseconds, scale)
62
+ if value:
63
+ result += [f"{value}{unit}"]
64
+ if not microseconds:
65
+ break
66
+
67
+ return "".join(result)
68
+
69
+
70
+ def snake_name(name: str) -> str:
71
+ """Create underscore-separated name from camel-case."""
72
+
73
+ return re.sub(r"(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=[A-Z][a-z])", "_", name).lower()
74
+
75
+
76
+ @contextlib.contextmanager
77
+ def chdir(path: Optional[Path]) -> Iterator[None]:
78
+ """Changes working directory and returns to previous on exit."""
79
+
80
+ if path is None:
81
+ yield
82
+ else:
83
+ prev_cwd = Path.cwd()
84
+ try:
85
+ os.chdir(path if path.is_dir() else path.parent)
86
+ yield
87
+ finally:
88
+ os.chdir(prev_cwd)
89
+
90
+
91
+ def relative_to_home(path: Path) -> Path:
92
+ """Make path relative to HOME."""
93
+
94
+ try:
95
+ return Path("~").joinpath(path.relative_to(Path.home()))
96
+ except ValueError:
97
+ return path
98
+
99
+
100
+ class instance_classproperty(Generic[T]):
101
+ """Property that works on instances and classes."""
102
+
103
+ def __init__(self, fget: Callable[..., T]) -> None:
104
+ """Initialise instance-classproperty."""
105
+
106
+ self.fget = fget
107
+
108
+ def __get__(self, owner_self: Any, owner_cls: Any) -> T:
109
+ """Get descriptor."""
110
+
111
+ return self.fget(owner_self if owner_self is not None else owner_cls)
112
+
113
+
114
+ class instance_classmethod(classmethod, Generic[T]):
115
+ """Method that works on instances and classes."""
116
+
117
+ def __init__(self, f: Callable[..., T]) -> None:
118
+ """Initialise instance-classmethod."""
119
+
120
+ self.f = f
121
+
122
+ def __get__(self, owner_self: Any, owner_cls: Any) -> Callable[..., T]: # type: ignore
123
+ """Get descriptor."""
124
+
125
+ x = owner_self if owner_self is not None else owner_cls
126
+
127
+ @wraps(self.f)
128
+ def wrapper(*args: Any, **kwargs: Any) -> T:
129
+ return self.f(x, *args, **kwargs)
130
+
131
+ return wrapper
132
+
133
+
134
+ def update(data: MutableMapping[str, Any], *more: Mapping[str, Any]) -> MutableMapping[str, Any]:
135
+ """Merge mappings into data."""
136
+
137
+ if data is None:
138
+ data = {}
139
+
140
+ for x in more:
141
+ for k, v in x.items():
142
+ if isinstance(v, Mapping):
143
+ update(data.setdefault(k, {}), v)
144
+ else:
145
+ data[k] = v
146
+
147
+ return data
148
+
149
+
150
+ def merge(*args: Mapping[str, Any], **kwargs: Any) -> Dict[str, Any]:
151
+ """Merge dictionaries."""
152
+
153
+ result: Dict[str, Any] = {}
154
+
155
+ if kwargs:
156
+ args += (kwargs,)
157
+
158
+ for arg in args:
159
+ if arg is None:
160
+ continue
161
+ for k, v in arg.items():
162
+ result[k] = merge(result.get(k) or {}, v) if isinstance(v, Mapping) else v
163
+
164
+ return result
165
+
166
+
167
+ def _make_key(k: str, q: str, sep: str = ".") -> str:
168
+ """Make flattened key."""
169
+
170
+ if not q:
171
+ return k
172
+
173
+ if not q.startswith("["):
174
+ k += sep
175
+
176
+ return f"{k}{q}"
177
+
178
+
179
+ def flatten(x: Any, sep: str = ".", sequence: bool = True) -> List[Tuple[str, Any]]:
180
+ """Flatten nested mappings and sequences."""
181
+
182
+ # basic conversions
183
+ if isinstance(x, Mapping):
184
+ x = x.items()
185
+ elif sequence and isinstance(x, Sequence) and not isinstance(x, str):
186
+ x = ((f"[{i}]", v) for i, v in enumerate(x))
187
+ else:
188
+ return [("", x)]
189
+
190
+ return [(_make_key(k, q, sep), w) for k, v in x for q, w in flatten(v, sep, sequence)]
191
+
192
+
193
+ def inflate(x: Iterable[Tuple[str, Any]], separator: str = ".") -> Dict[str, Any]:
194
+ """Re-inflate flattened keys into nested object."""
195
+
196
+ result: Dict[str, Any] = {}
197
+ inputs: List[Tuple[Sequence[Union[int, str]], Any]] = []
198
+
199
+ delims = re.compile("|".join(re.escape(x) for x in [separator, "[", "]"]))
200
+
201
+ for key_, value_ in x:
202
+ split_key: List[Union[int, str]] = [
203
+ int(k) if k.isnumeric() else k for k in delims.split(key_) if k and not delims.match(k)
204
+ ]
205
+ inputs += [(split_key, value_)]
206
+
207
+ root: Any
208
+
209
+ for key, value in sorted(inputs, key=itemgetter(0)):
210
+ root = result
211
+ for k, l in zip_longest(key, key[1:]):
212
+ if isinstance(k, str):
213
+ if not isinstance(root, Dict):
214
+ raise ValueError("Invalid structure")
215
+
216
+ if k not in root:
217
+ if isinstance(l, str):
218
+ root[k] = {}
219
+ elif isinstance(l, int):
220
+ root[k] = []
221
+ else:
222
+ if not isinstance(root, List):
223
+ raise ValueError("Invalid structure")
224
+
225
+ n = len(root)
226
+ if k > n:
227
+ if isinstance(l, str):
228
+ root += [{} for _ in range(k - n + 1)]
229
+ elif isinstance(l, int):
230
+ root += [[] for _ in range(k - n + 1)]
231
+ else:
232
+ root += [None] * (k - n + 1)
233
+
234
+ if l is None:
235
+ root[k] = value
236
+ else:
237
+ root = root[k]
238
+
239
+ return result
240
+
241
+
242
+ def chunks(x: Sequence[T], n: int) -> Iterator[Sequence[T]]:
243
+ """Yield successive n-sized chunks from l."""
244
+
245
+ for i in range(0, len(x), n):
246
+ yield x[i : (i + n)]
247
+
248
+
249
+ def map_chunks(chunk_size: int, f: Callable[..., Any], x: Iterable[T], **kwargs: Any) -> Iterator[Any]:
250
+ """Map function to chunks or iterable."""
251
+
252
+ while True:
253
+ result = f(islice(x, chunk_size), **kwargs)
254
+ if result is None:
255
+ break
256
+ yield result
257
+
258
+
259
+ def deep_itemgetter(path: str) -> Callable[[str], Any]:
260
+ """Deep itemgetter, halting on first ``None``"""
261
+
262
+ if "." not in path:
263
+ return itemgetter(path)
264
+
265
+ def getter(x: Any) -> Any:
266
+ if x is None:
267
+ return None
268
+ for key in path.split("."):
269
+ x = x[key]
270
+ if x is None:
271
+ break
272
+ return x
273
+
274
+ return getter
275
+
276
+
277
+ def file_tuple(x: Union[str, IOBase, bytes, Tuple]) -> FileTuple:
278
+ """Create file tuple for multipart request."""
279
+
280
+ if isinstance(x, tuple):
281
+ name, data, *tail = x
282
+ if not tail:
283
+ mime_type = None
284
+ elif len(tail) == 1:
285
+ mime_type = tail[0]
286
+ else:
287
+ raise ValueError("Too many values")
288
+ else:
289
+ if isinstance(x, str):
290
+ name = x
291
+ data = open(x, "rb")
292
+ elif isinstance(x, IOBase):
293
+ name = getattr(x, "name", None)
294
+ data = x
295
+ else:
296
+ name = None
297
+ data = x
298
+
299
+ mime_type = None
300
+
301
+ if mime_type is None and name is not None:
302
+ mime_type = guess_type(name)
303
+
304
+ return (name, data, mime_type)
305
+
306
+
307
+ def metadata_tuple(x: Dict[str, Any]) -> Tuple[None, str]:
308
+ """Convert metadata string to tuple.
309
+
310
+ Args:
311
+ x (Dict[str, Any]): Metadata string to be converted to tuple
312
+
313
+ Returns:
314
+ Tuple[None, str]: Tuple with None and metadata string
315
+ """
316
+ return (None, json.dumps(x))
@@ -0,0 +1,16 @@
1
+ # file generated by setuptools_scm
2
+ # don't change, don't track in version control
3
+ TYPE_CHECKING = False
4
+ if TYPE_CHECKING:
5
+ from typing import Tuple, Union
6
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
7
+ else:
8
+ VERSION_TUPLE = object
9
+
10
+ version: str
11
+ __version__: str
12
+ __version_tuple__: VERSION_TUPLE
13
+ version_tuple: VERSION_TUPLE
14
+
15
+ __version__ = version = '0.0.1'
16
+ __version_tuple__ = version_tuple = (0, 0, 1)