pluto-ml 0.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mlop/__init__.py +17 -0
- mlop/__main__.py +14 -0
- mlop/compat/__init__.py +15 -0
- mlop/compat/lightning.py +15 -0
- mlop/compat/neptune.py +15 -0
- mlop/compat/torch.py +15 -0
- mlop/compat/transformers.py +15 -0
- pluto/__init__.py +73 -0
- pluto/__main__.py +53 -0
- pluto/api.py +261 -0
- pluto/auth.py +102 -0
- pluto/compat/__init__.py +0 -0
- pluto/compat/lightning.py +180 -0
- pluto/compat/neptune.py +708 -0
- pluto/compat/torch.py +576 -0
- pluto/compat/transformers.py +180 -0
- pluto/data.py +196 -0
- pluto/file.py +386 -0
- pluto/iface.py +522 -0
- pluto/init.py +75 -0
- pluto/log.py +152 -0
- pluto/op.py +461 -0
- pluto/sets.py +248 -0
- pluto/store.py +157 -0
- pluto/sys.py +336 -0
- pluto/util.py +276 -0
- pluto_ml-0.0.2.dist-info/METADATA +92 -0
- pluto_ml-0.0.2.dist-info/RECORD +31 -0
- pluto_ml-0.0.2.dist-info/WHEEL +4 -0
- pluto_ml-0.0.2.dist-info/entry_points.txt +4 -0
- pluto_ml-0.0.2.dist-info/licenses/LICENSE +188 -0
mlop/__init__.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Backward compatibility shim for mlop -> pluto migration.
|
|
3
|
+
This module is deprecated. Use `import pluto` instead.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import warnings
|
|
7
|
+
|
|
8
|
+
warnings.warn(
|
|
9
|
+
"The 'mlop' package is deprecated and will be removed in a future release. "
|
|
10
|
+
"Please use 'import pluto' instead.",
|
|
11
|
+
DeprecationWarning,
|
|
12
|
+
stacklevel=2,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
# Re-export everything from pluto
|
|
16
|
+
from pluto import * # noqa: E402, F401, F403
|
|
17
|
+
from pluto import __all__, __version__ # noqa: E402, F401
|
mlop/__main__.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"""Deprecated CLI entry point."""
|
|
2
|
+
|
|
3
|
+
import warnings
|
|
4
|
+
|
|
5
|
+
warnings.warn(
|
|
6
|
+
"The 'mlop' command is deprecated. Use 'pluto' instead.",
|
|
7
|
+
DeprecationWarning,
|
|
8
|
+
stacklevel=2,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
from pluto.__main__ import main # noqa: E402
|
|
12
|
+
|
|
13
|
+
if __name__ == '__main__':
|
|
14
|
+
main()
|
mlop/compat/__init__.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Backward compatibility shim for mlop.compat -> pluto.compat migration.
|
|
3
|
+
This module is deprecated. Use `import pluto.compat` instead.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import warnings
|
|
7
|
+
|
|
8
|
+
warnings.warn(
|
|
9
|
+
"The 'mlop.compat' module is deprecated. "
|
|
10
|
+
"Please use 'import pluto.compat' instead.",
|
|
11
|
+
DeprecationWarning,
|
|
12
|
+
stacklevel=2,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
from pluto.compat import * # noqa: E402, F401, F403
|
mlop/compat/lightning.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Backward compatibility shim for mlop.compat.lightning.
|
|
3
|
+
This module is deprecated. Use `import pluto.compat.lightning` instead.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import warnings
|
|
7
|
+
|
|
8
|
+
warnings.warn(
|
|
9
|
+
"The 'mlop.compat.lightning' module is deprecated. "
|
|
10
|
+
"Please use 'import pluto.compat.lightning' instead.",
|
|
11
|
+
DeprecationWarning,
|
|
12
|
+
stacklevel=2,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
from pluto.compat.lightning import * # noqa: E402, F401, F403
|
mlop/compat/neptune.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Backward compatibility shim for mlop.compat.neptune -> pluto.compat.neptune migration.
|
|
3
|
+
This module is deprecated. Use `import pluto.compat.neptune` instead.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import warnings
|
|
7
|
+
|
|
8
|
+
warnings.warn(
|
|
9
|
+
"The 'mlop.compat.neptune' module is deprecated. "
|
|
10
|
+
"Please use 'import pluto.compat.neptune' instead.",
|
|
11
|
+
DeprecationWarning,
|
|
12
|
+
stacklevel=2,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
from pluto.compat.neptune import * # noqa: E402, F401, F403
|
mlop/compat/torch.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Backward compatibility shim for mlop.compat.torch -> pluto.compat.torch migration.
|
|
3
|
+
This module is deprecated. Use `import pluto.compat.torch` instead.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import warnings
|
|
7
|
+
|
|
8
|
+
warnings.warn(
|
|
9
|
+
"The 'mlop.compat.torch' module is deprecated. "
|
|
10
|
+
"Please use 'import pluto.compat.torch' instead.",
|
|
11
|
+
DeprecationWarning,
|
|
12
|
+
stacklevel=2,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
from pluto.compat.torch import * # noqa: E402, F401, F403
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Backward compatibility shim for mlop.compat.transformers -> pluto.compat.transformers.
|
|
3
|
+
This module is deprecated. Use `import pluto.compat.transformers` instead.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import warnings
|
|
7
|
+
|
|
8
|
+
warnings.warn(
|
|
9
|
+
"The 'mlop.compat.transformers' module is deprecated. "
|
|
10
|
+
"Please use 'import pluto.compat.transformers' instead.",
|
|
11
|
+
DeprecationWarning,
|
|
12
|
+
stacklevel=2,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
from pluto.compat.transformers import * # noqa: E402, F401, F403
|
pluto/__init__.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import subprocess
|
|
3
|
+
from typing import Any, Callable, List, Optional
|
|
4
|
+
|
|
5
|
+
from .auth import login, logout
|
|
6
|
+
from .data import Data, Graph, Histogram, Table
|
|
7
|
+
from .file import Artifact, Audio, File, Image, Text, Video
|
|
8
|
+
from .init import finish, init
|
|
9
|
+
from .sets import Settings, setup
|
|
10
|
+
from .sys import System
|
|
11
|
+
|
|
12
|
+
_hooks: List[Any] = []
|
|
13
|
+
ops: Optional[List[Any]] = None
|
|
14
|
+
log: Optional[Callable[..., Any]] = None
|
|
15
|
+
watch: Optional[Callable[..., Any]] = None
|
|
16
|
+
alert: Optional[Callable[..., Any]] = None
|
|
17
|
+
|
|
18
|
+
__all__ = (
|
|
19
|
+
'Data',
|
|
20
|
+
'Graph',
|
|
21
|
+
'Histogram',
|
|
22
|
+
'Table',
|
|
23
|
+
'File',
|
|
24
|
+
'Artifact',
|
|
25
|
+
'Text',
|
|
26
|
+
'Image',
|
|
27
|
+
'Audio',
|
|
28
|
+
'Video',
|
|
29
|
+
'System',
|
|
30
|
+
'Settings',
|
|
31
|
+
'alert',
|
|
32
|
+
'init',
|
|
33
|
+
'login',
|
|
34
|
+
'logout',
|
|
35
|
+
'watch',
|
|
36
|
+
'finish',
|
|
37
|
+
'setup',
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
__version__ = '0.0.2'
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# Replaced with the current commit when building the wheels.
|
|
44
|
+
_PLUTO_COMMIT_SHA = 'e8c13ee426ab5c9a428bcb2285db1a552836a653'
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _get_git_commit():
|
|
48
|
+
if 'PLUTO_COMMIT_SHA' not in _PLUTO_COMMIT_SHA:
|
|
49
|
+
# This is a release build, so we don't need to get the commit hash from
|
|
50
|
+
# git, as it's already been set.
|
|
51
|
+
return _PLUTO_COMMIT_SHA
|
|
52
|
+
|
|
53
|
+
# This is a development build (pip install -e .), so we need to get the
|
|
54
|
+
# commit hash from git.
|
|
55
|
+
try:
|
|
56
|
+
cwd = os.path.dirname(__file__)
|
|
57
|
+
commit_hash = subprocess.check_output(
|
|
58
|
+
['git', 'rev-parse', 'HEAD'],
|
|
59
|
+
cwd=cwd,
|
|
60
|
+
universal_newlines=True,
|
|
61
|
+
stderr=subprocess.DEVNULL,
|
|
62
|
+
).strip()
|
|
63
|
+
changes = subprocess.check_output(
|
|
64
|
+
['git', 'status', '--porcelain'],
|
|
65
|
+
cwd=cwd,
|
|
66
|
+
universal_newlines=True,
|
|
67
|
+
stderr=subprocess.DEVNULL,
|
|
68
|
+
).strip()
|
|
69
|
+
if changes:
|
|
70
|
+
commit_hash += '-dirty'
|
|
71
|
+
return commit_hash
|
|
72
|
+
except Exception: # pylint: disable=broad-except
|
|
73
|
+
return _PLUTO_COMMIT_SHA
|
pluto/__main__.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import sys
|
|
5
|
+
|
|
6
|
+
from . import __version__, _get_git_commit
|
|
7
|
+
from .auth import login, logout
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def main():
|
|
11
|
+
parser = argparse.ArgumentParser(description='pluto')
|
|
12
|
+
parser.add_argument(
|
|
13
|
+
'-v',
|
|
14
|
+
'--version',
|
|
15
|
+
action='store_true',
|
|
16
|
+
help='show the installed pluto version',
|
|
17
|
+
)
|
|
18
|
+
parser.add_argument(
|
|
19
|
+
'-c',
|
|
20
|
+
'--commit',
|
|
21
|
+
action='store_true',
|
|
22
|
+
help='show the current git commit hash',
|
|
23
|
+
)
|
|
24
|
+
subparsers = parser.add_subparsers(dest='command', help='commands')
|
|
25
|
+
|
|
26
|
+
p_login = subparsers.add_parser('login', help='login to pluto')
|
|
27
|
+
p_login.add_argument('key', nargs='?', help='login key')
|
|
28
|
+
subparsers.add_parser('logout', help='logout from pluto')
|
|
29
|
+
|
|
30
|
+
args = parser.parse_args()
|
|
31
|
+
|
|
32
|
+
if args.version:
|
|
33
|
+
print(__version__)
|
|
34
|
+
return
|
|
35
|
+
|
|
36
|
+
if args.commit:
|
|
37
|
+
print(_get_git_commit())
|
|
38
|
+
return
|
|
39
|
+
|
|
40
|
+
if args.command == 'login':
|
|
41
|
+
if args.key:
|
|
42
|
+
login(settings={'_auth': args.key})
|
|
43
|
+
else:
|
|
44
|
+
login()
|
|
45
|
+
elif args.command == 'logout':
|
|
46
|
+
logout()
|
|
47
|
+
else:
|
|
48
|
+
parser.print_help()
|
|
49
|
+
sys.exit(1)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
if __name__ == '__main__':
|
|
53
|
+
main()
|
pluto/api.py
ADDED
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import re
|
|
4
|
+
import signal
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
|
|
7
|
+
from .util import clean_dict, find_node
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(f"{__name__.split('.')[0]}")
|
|
10
|
+
tag = 'API'
|
|
11
|
+
|
|
12
|
+
STATUS = {
|
|
13
|
+
-1: 'RUNNING',
|
|
14
|
+
0: 'COMPLETED',
|
|
15
|
+
1: 'FAILED',
|
|
16
|
+
signal.SIGINT.value: 'TERMINATED', # "INTERRUPTED",
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
ABBR = {
|
|
20
|
+
'pct': 'percentage',
|
|
21
|
+
'net': 'network',
|
|
22
|
+
'mem': 'memory',
|
|
23
|
+
'recv': 'received',
|
|
24
|
+
'bytes_': 'bytes.',
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def make_compat_trigger_v1(settings):
|
|
29
|
+
return json.dumps(
|
|
30
|
+
{
|
|
31
|
+
'runId': settings._op_id,
|
|
32
|
+
}
|
|
33
|
+
).encode()
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def make_compat_start_v1(config, settings, info, tags=None):
|
|
37
|
+
return json.dumps(
|
|
38
|
+
{
|
|
39
|
+
# "runId": settings._op_id,
|
|
40
|
+
'runName': settings._op_name,
|
|
41
|
+
'projectName': settings.project,
|
|
42
|
+
'config': json.dumps(config) if config is not None else None,
|
|
43
|
+
'loggerSettings': json.dumps(clean_dict(settings.to_dict())),
|
|
44
|
+
'systemMetadata': json.dumps(info) if info is not None else None,
|
|
45
|
+
'tags': tags if tags else None,
|
|
46
|
+
'createdAt': settings.compat.get('createdAt'),
|
|
47
|
+
'updatedAt': settings.compat.get('updatedAt'),
|
|
48
|
+
}
|
|
49
|
+
).encode()
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def make_compat_status_v1(settings, trace=None):
|
|
53
|
+
return json.dumps(
|
|
54
|
+
{
|
|
55
|
+
'runId': settings._op_id,
|
|
56
|
+
'status': STATUS[settings._op_status],
|
|
57
|
+
# "metadata": json.dumps(settings.meta),
|
|
58
|
+
'statusMetadata': json.dumps(trace) if trace is not None else None,
|
|
59
|
+
}
|
|
60
|
+
).encode()
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def make_compat_update_tags_v1(settings, tags):
|
|
64
|
+
return json.dumps(
|
|
65
|
+
{
|
|
66
|
+
'runId': settings._op_id,
|
|
67
|
+
'tags': tags,
|
|
68
|
+
}
|
|
69
|
+
).encode()
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def make_compat_update_config_v1(settings, config):
|
|
73
|
+
return json.dumps(
|
|
74
|
+
{
|
|
75
|
+
'runId': settings._op_id,
|
|
76
|
+
'config': json.dumps(config) if config else None,
|
|
77
|
+
}
|
|
78
|
+
).encode()
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def make_compat_meta_v1(meta, dtype, settings):
|
|
82
|
+
return json.dumps(
|
|
83
|
+
{
|
|
84
|
+
'runId': settings._op_id,
|
|
85
|
+
# "runName": settings._op_name,
|
|
86
|
+
# "projectName": settings.project,
|
|
87
|
+
'logType': dtype.upper() if dtype != 'num' else 'METRIC',
|
|
88
|
+
'logName': meta, # TODO: better aggregate
|
|
89
|
+
}
|
|
90
|
+
).encode()
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def make_compat_monitor_v1(data):
|
|
94
|
+
if not ABBR:
|
|
95
|
+
return data
|
|
96
|
+
pattern = re.compile('|'.join(map(re.escape, ABBR.keys())))
|
|
97
|
+
return {pattern.sub(lambda m: ABBR[m.group(0)], k): v for k, v in data.items()}
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def make_compat_num_v1(data, timestamp, step):
|
|
101
|
+
line = [
|
|
102
|
+
json.dumps(
|
|
103
|
+
{
|
|
104
|
+
'time': int(timestamp * 1000), # convert to ms
|
|
105
|
+
'step': int(step),
|
|
106
|
+
'data': data,
|
|
107
|
+
}
|
|
108
|
+
)
|
|
109
|
+
]
|
|
110
|
+
return ('\n'.join(line) + '\n').encode('utf-8')
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def make_compat_data_v1(data, timestamp, step):
|
|
114
|
+
lines = []
|
|
115
|
+
for k, dl in data.items():
|
|
116
|
+
for d in dl:
|
|
117
|
+
c = json.dumps(d.to_dict())
|
|
118
|
+
lines.append(
|
|
119
|
+
json.dumps(
|
|
120
|
+
{
|
|
121
|
+
'time': int(timestamp * 1000), # convert to ms
|
|
122
|
+
'data': c,
|
|
123
|
+
'dataType': type(d).__name__.upper(),
|
|
124
|
+
'logName': k,
|
|
125
|
+
'step': step,
|
|
126
|
+
}
|
|
127
|
+
)
|
|
128
|
+
)
|
|
129
|
+
return ('\n'.join(lines) + '\n').encode('utf-8')
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def make_compat_file_v1(file, timestamp, step):
|
|
133
|
+
batch = []
|
|
134
|
+
for k, fl in file.items():
|
|
135
|
+
for f in fl:
|
|
136
|
+
i = {
|
|
137
|
+
'fileName': f'{f._name}{f._ext}',
|
|
138
|
+
'fileSize': f._stat.st_size,
|
|
139
|
+
'fileType': f._ext[1:],
|
|
140
|
+
'time': int(f._stat.st_mtime * 1000),
|
|
141
|
+
'logName': k,
|
|
142
|
+
'step': step,
|
|
143
|
+
}
|
|
144
|
+
batch.append(i)
|
|
145
|
+
return json.dumps({'files': batch}).encode()
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def make_compat_storage_v1(f, fl):
|
|
149
|
+
# workaround for lack of file ident on server side
|
|
150
|
+
for i in fl:
|
|
151
|
+
if next(iter(i.keys())) == f'{f._name}{f._ext}':
|
|
152
|
+
return next(iter(i.values()))
|
|
153
|
+
return None
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def make_compat_message_v1(level, message, timestamp, step):
|
|
157
|
+
# TODO: server side int log level support
|
|
158
|
+
line = [
|
|
159
|
+
json.dumps(
|
|
160
|
+
{
|
|
161
|
+
'time': int(timestamp * 1000), # convert to ms
|
|
162
|
+
'message': message,
|
|
163
|
+
'lineNumber': step,
|
|
164
|
+
'logType': logging._levelToName.get(level),
|
|
165
|
+
}
|
|
166
|
+
)
|
|
167
|
+
]
|
|
168
|
+
return ('\n'.join(line) + '\n').encode('utf-8')
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def make_compat_graph_v1(settings, name, nodes):
|
|
172
|
+
return json.dumps(
|
|
173
|
+
{'runId': settings._op_id, 'graph': {'format': name, 'nodes': nodes}}
|
|
174
|
+
).encode()
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def make_compat_graph_nodes_v1(d, ref, dep=0, p='', r={}):
|
|
178
|
+
if 'name' not in d:
|
|
179
|
+
d['name'] = ''
|
|
180
|
+
name = '.'
|
|
181
|
+
elif p == '.':
|
|
182
|
+
name = str(d['name'])
|
|
183
|
+
else:
|
|
184
|
+
name = f"{p}.{d['name']}"
|
|
185
|
+
|
|
186
|
+
if 'id' in d:
|
|
187
|
+
n = d.copy()
|
|
188
|
+
n = {k: v for k, v in n.items() if k not in ['id', 'nodes', 'name']}
|
|
189
|
+
r.update({name: n})
|
|
190
|
+
r[name]['depth'] = dep
|
|
191
|
+
|
|
192
|
+
if ref:
|
|
193
|
+
rd = find_node(ref, d['id'], key='nodes')
|
|
194
|
+
if rd:
|
|
195
|
+
rn = rd.copy()
|
|
196
|
+
rn = {k: v for k, v in rn.items() if k not in ['id', 'nodes']}
|
|
197
|
+
r[name].update(rn)
|
|
198
|
+
else:
|
|
199
|
+
logger.debug(
|
|
200
|
+
f'{tag}: {n} not found in reference dictionary '
|
|
201
|
+
f'when processing {name}'
|
|
202
|
+
)
|
|
203
|
+
r[name]['node_type'] = (
|
|
204
|
+
r[name]['node_type'].upper() if r[name].get('node_type') else 'UNKNOWN'
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
if 'nodes' in d:
|
|
208
|
+
for c in d['nodes']:
|
|
209
|
+
make_compat_graph_nodes_v1(d=c, ref=ref, dep=dep + 1, p=name, r=r)
|
|
210
|
+
|
|
211
|
+
return r
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def make_compat_alert_v1(settings, t, m, n, level, url, **kwargs):
|
|
215
|
+
return json.dumps(
|
|
216
|
+
{
|
|
217
|
+
'runId': settings._op_id,
|
|
218
|
+
'alert': {
|
|
219
|
+
'timestamp': int(t * 1000),
|
|
220
|
+
'level': level,
|
|
221
|
+
'title': n,
|
|
222
|
+
'body': m,
|
|
223
|
+
'email': kwargs.get('email', None),
|
|
224
|
+
'url': kwargs.get('url', None),
|
|
225
|
+
},
|
|
226
|
+
}
|
|
227
|
+
).encode()
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def make_compat_webhook_v1(timestamp, level, title, message, step, url):
|
|
231
|
+
timestamp_str = datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
|
|
232
|
+
return json.dumps(
|
|
233
|
+
{
|
|
234
|
+
'username': __name__.split('.')[0],
|
|
235
|
+
'content': f'{level}: {title}',
|
|
236
|
+
'embeds': [
|
|
237
|
+
{
|
|
238
|
+
'description': message,
|
|
239
|
+
'footer': {'text': ('Step: ' f'{step} at ' f'{timestamp_str}')},
|
|
240
|
+
}
|
|
241
|
+
],
|
|
242
|
+
# slack
|
|
243
|
+
'text': f'{level}: {title}',
|
|
244
|
+
'blocks': [
|
|
245
|
+
{
|
|
246
|
+
'type': 'section',
|
|
247
|
+
'text': {
|
|
248
|
+
'type': 'mrkdwn',
|
|
249
|
+
'text': (
|
|
250
|
+
f'`{level}` *{title}:* {message}\n\n'
|
|
251
|
+
+ f'_<{url}|Check out live updates for this run>_\n'
|
|
252
|
+
+ f'*Local Time:* <!date^{int(timestamp)}^'
|
|
253
|
+
+ '{date_short_pretty} {time_secs}'
|
|
254
|
+
+ '|Time>\n'
|
|
255
|
+
+ f'*Step:* {step}\n'
|
|
256
|
+
),
|
|
257
|
+
},
|
|
258
|
+
}
|
|
259
|
+
],
|
|
260
|
+
}
|
|
261
|
+
).encode()
|
pluto/auth.py
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import getpass
|
|
2
|
+
import logging
|
|
3
|
+
import sys
|
|
4
|
+
import webbrowser
|
|
5
|
+
|
|
6
|
+
import httpx
|
|
7
|
+
import keyring
|
|
8
|
+
|
|
9
|
+
from .log import setup_logger, teardown_logger
|
|
10
|
+
from .sets import get_console, setup
|
|
11
|
+
from .util import ANSI, import_lib, print_url
|
|
12
|
+
|
|
13
|
+
tlogger = logging.getLogger('auth')
|
|
14
|
+
tag = 'Authentication'
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def login(settings=None, retry=False):
|
|
18
|
+
settings = setup(settings)
|
|
19
|
+
setup_logger(settings=settings, logger=tlogger)
|
|
20
|
+
try:
|
|
21
|
+
assert sys.platform == 'darwin'
|
|
22
|
+
auth = keyring.get_password(f'{settings.tag}', f'{settings.tag}')
|
|
23
|
+
except (keyring.errors.NoKeyringError, AssertionError): # fallback
|
|
24
|
+
keyring.set_keyring(import_lib('keyrings.alt.file').PlaintextKeyring())
|
|
25
|
+
auth = keyring.get_password(f'{settings.tag}', f'{settings.tag}')
|
|
26
|
+
if settings._auth is None:
|
|
27
|
+
if auth == '':
|
|
28
|
+
keyring.delete_password(f'{settings.tag}', f'{settings.tag}')
|
|
29
|
+
elif auth is not None:
|
|
30
|
+
settings._auth = auth
|
|
31
|
+
if settings._auth == '':
|
|
32
|
+
tlogger.critical(
|
|
33
|
+
'%s: authentication failed: the provided token cannot be empty', tag
|
|
34
|
+
)
|
|
35
|
+
settings._auth = '_key'
|
|
36
|
+
client = httpx.Client(
|
|
37
|
+
verify=True if not settings.insecure_disable_ssl else False,
|
|
38
|
+
proxy=settings.http_proxy or settings.https_proxy or None,
|
|
39
|
+
)
|
|
40
|
+
try:
|
|
41
|
+
r = client.post(
|
|
42
|
+
url=settings.url_login,
|
|
43
|
+
headers={
|
|
44
|
+
'Authorization': f'Bearer {settings._auth}',
|
|
45
|
+
},
|
|
46
|
+
)
|
|
47
|
+
except Exception as e:
|
|
48
|
+
tlogger.warning(f'{tag}: server not reachable; reason: {e}')
|
|
49
|
+
settings._auth = '_key'
|
|
50
|
+
try:
|
|
51
|
+
tlogger.info(f"{tag}: logged in as {r.json()['organization']['slug']}")
|
|
52
|
+
keyring.set_password(f'{settings.tag}', f'{settings.tag}', f'{settings._auth}')
|
|
53
|
+
teardown_logger(tlogger)
|
|
54
|
+
except Exception as e:
|
|
55
|
+
if retry:
|
|
56
|
+
tlogger.warning('%s: authentication failed; reason: %s', tag, e)
|
|
57
|
+
hint1 = (
|
|
58
|
+
f'{ANSI.cyan}- Please copy the API key provided in the web portal '
|
|
59
|
+
'and paste it below'
|
|
60
|
+
)
|
|
61
|
+
hint2 = f'- You can alternatively manually open {print_url(settings.url_token)}'
|
|
62
|
+
hint3 = f'{ANSI.green}- You may exit at any time by pressing CTRL+C / ⌃+C'
|
|
63
|
+
tlogger.info(
|
|
64
|
+
f'{tag}: initializing authentication\n\n {hint1}\n\n {hint2}\n\n {hint3}\n'
|
|
65
|
+
)
|
|
66
|
+
if (
|
|
67
|
+
hasattr(settings._sys, 'monitor') and settings._sys.monitor() == {}
|
|
68
|
+
): # migrate mode
|
|
69
|
+
return
|
|
70
|
+
else:
|
|
71
|
+
webbrowser.open(url=settings.url_token)
|
|
72
|
+
if get_console() == 'jupyter':
|
|
73
|
+
settings._auth = getpass.getpass(prompt='Enter API key: ')
|
|
74
|
+
else:
|
|
75
|
+
settings._auth = input(f'{ANSI.yellow}Enter API key: ')
|
|
76
|
+
try:
|
|
77
|
+
keyring.set_password(
|
|
78
|
+
f'{settings.tag}', f'{settings.tag}', f'{settings._auth}'
|
|
79
|
+
)
|
|
80
|
+
except Exception as e:
|
|
81
|
+
tlogger.critical(
|
|
82
|
+
'%s: failed to save key to system keyring service: %s', tag, e
|
|
83
|
+
)
|
|
84
|
+
teardown_logger(tlogger)
|
|
85
|
+
login(settings=settings, retry=True)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def logout(settings=None):
|
|
89
|
+
settings = setup(settings)
|
|
90
|
+
setup_logger(settings=settings, logger=tlogger)
|
|
91
|
+
try:
|
|
92
|
+
assert sys.platform == 'darwin'
|
|
93
|
+
keyring.delete_password(f'{settings.tag}', f'{settings.tag}')
|
|
94
|
+
except (keyring.errors.NoKeyringError, AssertionError):
|
|
95
|
+
keyring.set_keyring(import_lib('keyrings.alt.file').PlaintextKeyring())
|
|
96
|
+
keyring.delete_password(f'{settings.tag}', f'{settings.tag}')
|
|
97
|
+
except Exception as e:
|
|
98
|
+
tlogger.warning(
|
|
99
|
+
'%s: failed to delete key from system keyring service: %s', tag, e
|
|
100
|
+
)
|
|
101
|
+
tlogger.info(f'{tag}: logged out')
|
|
102
|
+
teardown_logger(tlogger)
|
pluto/compat/__init__.py
ADDED
|
File without changes
|