ominfra 0.0.0.dev7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ominfra/__about__.py +27 -0
- ominfra/__init__.py +0 -0
- ominfra/bootstrap/__init__.py +0 -0
- ominfra/bootstrap/bootstrap.py +8 -0
- ominfra/cmds.py +83 -0
- ominfra/deploy/__init__.py +0 -0
- ominfra/deploy/_executor.py +1036 -0
- ominfra/deploy/configs.py +19 -0
- ominfra/deploy/executor/__init__.py +1 -0
- ominfra/deploy/executor/base.py +115 -0
- ominfra/deploy/executor/concerns/__init__.py +0 -0
- ominfra/deploy/executor/concerns/dirs.py +28 -0
- ominfra/deploy/executor/concerns/nginx.py +47 -0
- ominfra/deploy/executor/concerns/repo.py +17 -0
- ominfra/deploy/executor/concerns/supervisor.py +46 -0
- ominfra/deploy/executor/concerns/systemd.py +88 -0
- ominfra/deploy/executor/concerns/user.py +25 -0
- ominfra/deploy/executor/concerns/venv.py +22 -0
- ominfra/deploy/executor/main.py +119 -0
- ominfra/deploy/poly/__init__.py +1 -0
- ominfra/deploy/poly/_main.py +725 -0
- ominfra/deploy/poly/base.py +179 -0
- ominfra/deploy/poly/configs.py +38 -0
- ominfra/deploy/poly/deploy.py +25 -0
- ominfra/deploy/poly/main.py +18 -0
- ominfra/deploy/poly/nginx.py +60 -0
- ominfra/deploy/poly/repo.py +41 -0
- ominfra/deploy/poly/runtime.py +39 -0
- ominfra/deploy/poly/site.py +11 -0
- ominfra/deploy/poly/supervisor.py +64 -0
- ominfra/deploy/poly/venv.py +52 -0
- ominfra/deploy/remote.py +91 -0
- ominfra/pyremote/__init__.py +0 -0
- ominfra/pyremote/_runcommands.py +824 -0
- ominfra/pyremote/bootstrap.py +149 -0
- ominfra/pyremote/runcommands.py +56 -0
- ominfra/ssh.py +191 -0
- ominfra/tools/__init__.py +0 -0
- ominfra/tools/listresources.py +256 -0
- ominfra-0.0.0.dev7.dist-info/LICENSE +21 -0
- ominfra-0.0.0.dev7.dist-info/METADATA +19 -0
- ominfra-0.0.0.dev7.dist-info/RECORD +44 -0
- ominfra-0.0.0.dev7.dist-info/WHEEL +5 -0
- ominfra-0.0.0.dev7.dist-info/top_level.txt +1 -0
@@ -0,0 +1,149 @@
|
|
1
|
+
"""
|
2
|
+
Basically this: https://mitogen.networkgenomics.com/howitworks.html
|
3
|
+
"""
|
4
|
+
import base64
|
5
|
+
import inspect
|
6
|
+
import os
|
7
|
+
import sys
|
8
|
+
import textwrap
|
9
|
+
import typing as ta
|
10
|
+
import zlib
|
11
|
+
|
12
|
+
|
13
|
+
##
|
14
|
+
|
15
|
+
|
16
|
+
_BOOTSTRAP_COMM_FD = 100
|
17
|
+
_BOOTSTRAP_SRC_FD = 101
|
18
|
+
|
19
|
+
_BOOTSTRAP_CHILD_PID_VAR = '_PYR_CPID'
|
20
|
+
_BOOTSTRAP_ARGV0_VAR = '_PYR_ARGV0'
|
21
|
+
|
22
|
+
BOOTSTRAP_ACK0 = b'OPYR000\n'
|
23
|
+
BOOTSTRAP_ACK1 = b'OPYR001\n'
|
24
|
+
|
25
|
+
_BOOTSTRAP_PROC_TITLE_FMT = '(pyremote:%s)'
|
26
|
+
|
27
|
+
_BOOTSTRAP_IMPORTS = [
|
28
|
+
'base64',
|
29
|
+
'os',
|
30
|
+
'sys',
|
31
|
+
'zlib',
|
32
|
+
]
|
33
|
+
|
34
|
+
|
35
|
+
def _bootstrap_main(context_name: str, main_z_len: int) -> None:
|
36
|
+
# Two copies of main src to be sent to parent
|
37
|
+
r0, w0 = os.pipe()
|
38
|
+
r1, w1 = os.pipe()
|
39
|
+
|
40
|
+
if (cp := os.fork()):
|
41
|
+
# Parent process
|
42
|
+
|
43
|
+
# Dup original stdin to comm_fd for use as comm channel
|
44
|
+
os.dup2(0, _BOOTSTRAP_COMM_FD)
|
45
|
+
|
46
|
+
# Overwrite stdin (fed to python repl) with first copy of src
|
47
|
+
os.dup2(r0, 0)
|
48
|
+
|
49
|
+
# Dup second copy of src to src_fd to recover after launch
|
50
|
+
os.dup2(r1, _BOOTSTRAP_SRC_FD)
|
51
|
+
|
52
|
+
# Close remaining fd's
|
53
|
+
for f in [r0, w0, r1, w1]:
|
54
|
+
os.close(f)
|
55
|
+
|
56
|
+
# Save child pid to close after relaunch
|
57
|
+
os.environ[_BOOTSTRAP_CHILD_PID_VAR] = str(cp)
|
58
|
+
|
59
|
+
# Save original argv0
|
60
|
+
os.environ[_BOOTSTRAP_ARGV0_VAR] = sys.executable
|
61
|
+
|
62
|
+
# Start repl reading stdin from r0
|
63
|
+
os.execl(sys.executable, sys.executable + (_BOOTSTRAP_PROC_TITLE_FMT % (context_name,)))
|
64
|
+
|
65
|
+
else:
|
66
|
+
# Child process
|
67
|
+
|
68
|
+
# Write first ack
|
69
|
+
os.write(1, BOOTSTRAP_ACK0)
|
70
|
+
|
71
|
+
# Read main src from stdin
|
72
|
+
main_src = zlib.decompress(os.fdopen(0, 'rb').read(main_z_len))
|
73
|
+
|
74
|
+
# Write both copies of main src
|
75
|
+
for w in [w0, w1]:
|
76
|
+
fp = os.fdopen(w, 'wb', 0)
|
77
|
+
fp.write(main_src)
|
78
|
+
fp.close()
|
79
|
+
|
80
|
+
# Write second ack
|
81
|
+
os.write(1, BOOTSTRAP_ACK1)
|
82
|
+
|
83
|
+
sys.exit(0)
|
84
|
+
|
85
|
+
|
86
|
+
#
|
87
|
+
|
88
|
+
|
89
|
+
def bootstrap_payload(context_name: str, main_z_len: int) -> str:
|
90
|
+
bs_src = textwrap.dedent(inspect.getsource(_bootstrap_main))
|
91
|
+
|
92
|
+
for gl in [
|
93
|
+
'_BOOTSTRAP_COMM_FD',
|
94
|
+
'_BOOTSTRAP_SRC_FD',
|
95
|
+
|
96
|
+
'_BOOTSTRAP_CHILD_PID_VAR',
|
97
|
+
'_BOOTSTRAP_ARGV0_VAR',
|
98
|
+
|
99
|
+
'BOOTSTRAP_ACK0',
|
100
|
+
'BOOTSTRAP_ACK1',
|
101
|
+
|
102
|
+
'_BOOTSTRAP_PROC_TITLE_FMT',
|
103
|
+
]:
|
104
|
+
bs_src = bs_src.replace(gl, repr(globals()[gl]))
|
105
|
+
|
106
|
+
bs_src = '\n'.join(
|
107
|
+
cl
|
108
|
+
for l in bs_src.splitlines()
|
109
|
+
if (cl := (l.split('#')[0]).rstrip())
|
110
|
+
if cl.strip()
|
111
|
+
)
|
112
|
+
|
113
|
+
bs_z = zlib.compress(bs_src.encode('utf-8'))
|
114
|
+
bs_z64 = base64.encodebytes(bs_z).replace(b'\n', b'')
|
115
|
+
|
116
|
+
stmts = [
|
117
|
+
f'import {", ".join(_BOOTSTRAP_IMPORTS)}',
|
118
|
+
f'exec(zlib.decompress(base64.decodebytes({bs_z64!r})))',
|
119
|
+
f'_bootstrap_main({context_name!r}, {main_z_len})',
|
120
|
+
]
|
121
|
+
|
122
|
+
cmd = '; '.join(stmts)
|
123
|
+
return cmd
|
124
|
+
|
125
|
+
|
126
|
+
#
|
127
|
+
|
128
|
+
|
129
|
+
class PostBoostrap(ta.NamedTuple):
|
130
|
+
input: ta.BinaryIO
|
131
|
+
main_src: str
|
132
|
+
|
133
|
+
|
134
|
+
def post_boostrap() -> PostBoostrap:
|
135
|
+
# Restore original argv0
|
136
|
+
sys.executable = os.environ.pop(_BOOTSTRAP_ARGV0_VAR)
|
137
|
+
|
138
|
+
# Reap boostrap child
|
139
|
+
os.waitpid(int(os.environ.pop(_BOOTSTRAP_CHILD_PID_VAR)), 0)
|
140
|
+
|
141
|
+
# Read second copy of main src
|
142
|
+
r1 = os.fdopen(_BOOTSTRAP_SRC_FD, 'rb', 0)
|
143
|
+
main_src = r1.read().decode('utf-8')
|
144
|
+
r1.close()
|
145
|
+
|
146
|
+
return PostBoostrap(
|
147
|
+
input=os.fdopen(_BOOTSTRAP_COMM_FD, 'rb', 0),
|
148
|
+
main_src=main_src,
|
149
|
+
)
|
@@ -0,0 +1,56 @@
|
|
1
|
+
#!/usr/bin/env python3
|
2
|
+
# @omdev-amalg ./_runcommands.py
|
3
|
+
# ruff: noqa: UP006 UP007
|
4
|
+
import dataclasses as dc
|
5
|
+
import io
|
6
|
+
import json
|
7
|
+
import subprocess
|
8
|
+
import sys
|
9
|
+
import typing as ta
|
10
|
+
|
11
|
+
from omlish.lite.json import json_dumps_compact
|
12
|
+
from omlish.lite.marshal import marshal_obj
|
13
|
+
from omlish.lite.marshal import unmarshal_obj
|
14
|
+
from omlish.lite.subprocesses import subprocess_maybe_shell_wrap_exec
|
15
|
+
|
16
|
+
from .bootstrap import post_boostrap
|
17
|
+
|
18
|
+
|
19
|
+
@dc.dataclass(frozen=True)
|
20
|
+
class CommandRequest:
|
21
|
+
cmd: ta.Sequence[str]
|
22
|
+
in_: ta.Optional[bytes] = None
|
23
|
+
|
24
|
+
|
25
|
+
@dc.dataclass(frozen=True)
|
26
|
+
class CommandResponse:
|
27
|
+
req: CommandRequest
|
28
|
+
rc: int
|
29
|
+
out: bytes
|
30
|
+
err: bytes
|
31
|
+
|
32
|
+
|
33
|
+
def _run_commands_loop(input: ta.BinaryIO, output: ta.BinaryIO = sys.stdout.buffer) -> None: # noqa
|
34
|
+
while (l := input.readline().decode('utf-8').strip()):
|
35
|
+
req: CommandRequest = unmarshal_obj(json.loads(l), CommandRequest)
|
36
|
+
proc = subprocess.Popen( # type: ignore
|
37
|
+
subprocess_maybe_shell_wrap_exec(*req.cmd),
|
38
|
+
**(dict(stdin=io.BytesIO(req.in_)) if req.in_ is not None else {}),
|
39
|
+
stdout=subprocess.PIPE,
|
40
|
+
stderr=subprocess.PIPE,
|
41
|
+
)
|
42
|
+
out, err = proc.communicate()
|
43
|
+
resp = CommandResponse(
|
44
|
+
req=req,
|
45
|
+
rc=proc.returncode,
|
46
|
+
out=out, # noqa
|
47
|
+
err=err, # noqa
|
48
|
+
)
|
49
|
+
output.write(json_dumps_compact(marshal_obj(resp)).encode('utf-8'))
|
50
|
+
output.write(b'\n')
|
51
|
+
output.flush()
|
52
|
+
|
53
|
+
|
54
|
+
def run_commands_main() -> None:
|
55
|
+
bs = post_boostrap()
|
56
|
+
_run_commands_loop(bs.input)
|
ominfra/ssh.py
ADDED
@@ -0,0 +1,191 @@
|
|
1
|
+
"""
|
2
|
+
TODO:
|
3
|
+
- sessionized
|
4
|
+
- streamed
|
5
|
+
- actual timeout
|
6
|
+
|
7
|
+
bcrypt
|
8
|
+
fido2
|
9
|
+
gssapi
|
10
|
+
libnacl
|
11
|
+
pkcs11
|
12
|
+
pyOpenSSL
|
13
|
+
|
14
|
+
asyncssh[bcrypt,fido2,gssapi,libnacl,pkcs11,pyOpenSSL]
|
15
|
+
"""
|
16
|
+
import asyncio
|
17
|
+
import contextlib
|
18
|
+
import shlex
|
19
|
+
import typing as ta
|
20
|
+
|
21
|
+
from omlish import check
|
22
|
+
from omlish import dataclasses as dc
|
23
|
+
from omlish import lang
|
24
|
+
from omserv.secrets import load_secrets
|
25
|
+
|
26
|
+
from .cmds import CommandRunner
|
27
|
+
from .cmds import LocalCommandRunner
|
28
|
+
|
29
|
+
|
30
|
+
if ta.TYPE_CHECKING:
|
31
|
+
import asyncssh
|
32
|
+
import paramiko
|
33
|
+
else:
|
34
|
+
asyncssh = lang.proxy_import('asyncssh')
|
35
|
+
paramiko = lang.proxy_import('paramiko')
|
36
|
+
|
37
|
+
|
38
|
+
##
|
39
|
+
|
40
|
+
|
41
|
+
@dc.dataclass(frozen=True)
|
42
|
+
class SshConfig:
|
43
|
+
host: str | None = None
|
44
|
+
port: int | None = None
|
45
|
+
|
46
|
+
username: str | None = None
|
47
|
+
password: str | None = None
|
48
|
+
|
49
|
+
key_file_path: str | None = None
|
50
|
+
|
51
|
+
|
52
|
+
class SshSubprocessCommandRunner(CommandRunner):
|
53
|
+
def __init__(
|
54
|
+
self,
|
55
|
+
cfg: SshConfig,
|
56
|
+
lcr: LocalCommandRunner | None = None,
|
57
|
+
) -> None:
|
58
|
+
super().__init__()
|
59
|
+
self._cfg = check.isinstance(cfg, SshConfig)
|
60
|
+
self._lcr = check.isinstance(lcr, LocalCommandRunner) if lcr is not None else LocalCommandRunner()
|
61
|
+
|
62
|
+
async def run_command(self, cmd: CommandRunner.Command) -> CommandRunner.Result:
|
63
|
+
args = ['ssh']
|
64
|
+
|
65
|
+
if self._cfg.key_file_path is not None:
|
66
|
+
args.extend(['-i', self._cfg.key_file_path])
|
67
|
+
|
68
|
+
if self._cfg.port is not None:
|
69
|
+
args.extend(['-p', str(self._cfg.port)])
|
70
|
+
|
71
|
+
dst = check.non_empty_str(self._cfg.host)
|
72
|
+
if self._cfg.username is not None:
|
73
|
+
dst = f'{self._cfg.username}@{dst}'
|
74
|
+
if self._cfg.password is not None:
|
75
|
+
raise NotImplementedError
|
76
|
+
|
77
|
+
args.append(dst)
|
78
|
+
args.extend(cmd.args)
|
79
|
+
lcmd = CommandRunner.Command(
|
80
|
+
args=args,
|
81
|
+
in_=cmd.in_,
|
82
|
+
)
|
83
|
+
return await self._lcr.run_command(lcmd)
|
84
|
+
|
85
|
+
|
86
|
+
class AsyncsshSshCommandRunner(CommandRunner):
|
87
|
+
def __init__(
|
88
|
+
self,
|
89
|
+
cfg: SshConfig,
|
90
|
+
) -> None:
|
91
|
+
super().__init__()
|
92
|
+
self._cfg = check.isinstance(cfg, SshConfig)
|
93
|
+
|
94
|
+
async def run_command(self, cmd: CommandRunner.Command) -> CommandRunner.Result:
|
95
|
+
arg = ' '.join(map(shlex.quote, cmd.args))
|
96
|
+
|
97
|
+
async with asyncssh.connect(
|
98
|
+
self._cfg.host,
|
99
|
+
encoding=None,
|
100
|
+
**(dict(port=int(self._cfg.port)) if self._cfg.port is not None else {}),
|
101
|
+
**(dict(username=self._cfg.username) if self._cfg.username is not None else {}),
|
102
|
+
**(dict(password=self._cfg.password) if self._cfg.password is not None else {}),
|
103
|
+
**(dict(client_keys=[self._cfg.key_file_path]) if self._cfg.key_file_path is not None else {}),
|
104
|
+
known_hosts=None,
|
105
|
+
) as conn:
|
106
|
+
proc: asyncssh.SSHClientProcess
|
107
|
+
async with await conn.create_process(arg) as proc:
|
108
|
+
checkw = False
|
109
|
+
timeout = None
|
110
|
+
res = await proc.wait(checkw, timeout)
|
111
|
+
|
112
|
+
return CommandRunner.Result(
|
113
|
+
rc=check.not_none(res.returncode),
|
114
|
+
out=check.isinstance(res.stdout, bytes),
|
115
|
+
err=check.isinstance(res.stderr, bytes),
|
116
|
+
)
|
117
|
+
|
118
|
+
|
119
|
+
class ParamikoSshCommandRunner(CommandRunner):
|
120
|
+
def __init__(
|
121
|
+
self,
|
122
|
+
cfg: SshConfig,
|
123
|
+
) -> None:
|
124
|
+
super().__init__()
|
125
|
+
self._cfg = check.isinstance(cfg, SshConfig)
|
126
|
+
|
127
|
+
def _run_command(self, cmd: CommandRunner.Command) -> CommandRunner.Result:
|
128
|
+
arg = ' '.join(map(shlex.quote, cmd.args))
|
129
|
+
|
130
|
+
kw: dict[str, ta.Any] = {}
|
131
|
+
if self._cfg.port is not None:
|
132
|
+
kw.update(port=int(self._cfg.port))
|
133
|
+
if self._cfg.username is not None:
|
134
|
+
kw.update(username=self._cfg.username)
|
135
|
+
if self._cfg.password is not None:
|
136
|
+
kw.update(password=self._cfg.password)
|
137
|
+
if self._cfg.key_file_path is not None:
|
138
|
+
kw.update(key_filename=self._cfg.key_file_path)
|
139
|
+
|
140
|
+
client: paramiko.client.SSHClient
|
141
|
+
with contextlib.closing(paramiko.client.SSHClient()) as client:
|
142
|
+
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
143
|
+
|
144
|
+
client.connect(
|
145
|
+
check.not_none(self._cfg.host),
|
146
|
+
**kw,
|
147
|
+
)
|
148
|
+
|
149
|
+
si, so, se = client.exec_command(arg)
|
150
|
+
# https://stackoverflow.com/questions/60037299/attributeerror-nonetype-object-has-no-attribute-time-paramiko
|
151
|
+
si.close()
|
152
|
+
|
153
|
+
rc = so.channel.recv_exit_status()
|
154
|
+
out = so.read()
|
155
|
+
err = se.read()
|
156
|
+
|
157
|
+
return CommandRunner.Result(
|
158
|
+
rc=rc,
|
159
|
+
out=out,
|
160
|
+
err=err,
|
161
|
+
)
|
162
|
+
|
163
|
+
async def run_command(self, cmd: CommandRunner.Command) -> CommandRunner.Result:
|
164
|
+
return await asyncio.to_thread(self._run_command, cmd)
|
165
|
+
|
166
|
+
|
167
|
+
async def _a_main() -> None:
|
168
|
+
cmd = CommandRunner.Command(
|
169
|
+
['ls', '-al'],
|
170
|
+
)
|
171
|
+
|
172
|
+
cfg = load_secrets()
|
173
|
+
|
174
|
+
sc = SshConfig(
|
175
|
+
host=cfg['ec2_ssh_host'],
|
176
|
+
username=cfg['ec2_ssh_user'],
|
177
|
+
key_file_path=cfg['ec2_ssh_key_file'],
|
178
|
+
)
|
179
|
+
|
180
|
+
for scr in [
|
181
|
+
SshSubprocessCommandRunner(sc),
|
182
|
+
AsyncsshSshCommandRunner(sc),
|
183
|
+
ParamikoSshCommandRunner(sc),
|
184
|
+
]:
|
185
|
+
rc = await scr.run_command(cmd)
|
186
|
+
check.equal(rc.rc, 0)
|
187
|
+
print(rc.out.decode())
|
188
|
+
|
189
|
+
|
190
|
+
if __name__ == '__main__':
|
191
|
+
asyncio.run(_a_main())
|
File without changes
|
@@ -0,0 +1,256 @@
|
|
1
|
+
"""
|
2
|
+
TODO:
|
3
|
+
- async
|
4
|
+
- 'things' not just servers ('resources'?) - s3 buckets, db servers, etc
|
5
|
+
- ssh
|
6
|
+
- keys
|
7
|
+
|
8
|
+
unique server ids:
|
9
|
+
aws:{region_name}:{instance_id}
|
10
|
+
runpod:{id}
|
11
|
+
lambda_labs:{id}
|
12
|
+
"""
|
13
|
+
import json
|
14
|
+
import pprint
|
15
|
+
import typing as ta
|
16
|
+
|
17
|
+
import urllib3
|
18
|
+
|
19
|
+
from omlish import check
|
20
|
+
from omlish import dataclasses as dc
|
21
|
+
from omlish import lang
|
22
|
+
from omserv.secrets import load_secrets
|
23
|
+
|
24
|
+
|
25
|
+
##
|
26
|
+
|
27
|
+
|
28
|
+
@lang.cached_function
|
29
|
+
def _get_secrets() -> dict[str, ta.Any]:
|
30
|
+
return load_secrets()
|
31
|
+
|
32
|
+
|
33
|
+
##
|
34
|
+
|
35
|
+
|
36
|
+
class Resource(lang.Abstract):
|
37
|
+
pass
|
38
|
+
|
39
|
+
|
40
|
+
@dc.dataclass(frozen=True)
|
41
|
+
class Server(Resource):
|
42
|
+
host: str
|
43
|
+
|
44
|
+
|
45
|
+
class ObjectStorage(Resource):
|
46
|
+
pass
|
47
|
+
|
48
|
+
|
49
|
+
@dc.dataclass(frozen=True)
|
50
|
+
class DbInstance(Resource):
|
51
|
+
host: str
|
52
|
+
port: int
|
53
|
+
|
54
|
+
|
55
|
+
##
|
56
|
+
|
57
|
+
|
58
|
+
class AwsResource(Resource):
|
59
|
+
pass
|
60
|
+
|
61
|
+
|
62
|
+
##
|
63
|
+
|
64
|
+
|
65
|
+
@dc.dataclass(frozen=True)
|
66
|
+
class Ec2Server(Server, AwsResource):
|
67
|
+
id: str
|
68
|
+
region: str
|
69
|
+
|
70
|
+
|
71
|
+
def get_ec2_servers() -> list[Ec2Server]:
|
72
|
+
cfg = _get_secrets()
|
73
|
+
import boto3
|
74
|
+
session = boto3.Session(
|
75
|
+
aws_access_key_id=cfg['aws_access_key_id'],
|
76
|
+
aws_secret_access_key=cfg['aws_secret_access_key'],
|
77
|
+
region_name=cfg['aws_region'],
|
78
|
+
)
|
79
|
+
ec2 = session.client('ec2')
|
80
|
+
resp = ec2.describe_instances()
|
81
|
+
out: list[Ec2Server] = []
|
82
|
+
for res in resp.get('Reservations', []):
|
83
|
+
for inst in res.get('Instances', []):
|
84
|
+
out.append(Ec2Server(
|
85
|
+
host=inst['PublicIpAddress'],
|
86
|
+
id=inst['InstanceId'],
|
87
|
+
region=ec2.meta.region_name,
|
88
|
+
))
|
89
|
+
return out
|
90
|
+
|
91
|
+
|
92
|
+
##
|
93
|
+
|
94
|
+
|
95
|
+
@dc.dataclass(frozen=True)
|
96
|
+
class RdsInstance(DbInstance, AwsResource):
|
97
|
+
id: str
|
98
|
+
region: str
|
99
|
+
engine: str
|
100
|
+
|
101
|
+
|
102
|
+
def get_rds_instances() -> list[RdsInstance]:
|
103
|
+
cfg = _get_secrets()
|
104
|
+
import boto3
|
105
|
+
session = boto3.Session(
|
106
|
+
aws_access_key_id=cfg['aws_access_key_id'],
|
107
|
+
aws_secret_access_key=cfg['aws_secret_access_key'],
|
108
|
+
region_name=cfg['aws_region'],
|
109
|
+
)
|
110
|
+
rds = session.client('rds')
|
111
|
+
resp = rds.describe_db_instances()
|
112
|
+
out: list[RdsInstance] = []
|
113
|
+
for inst in resp.get('DBInstances', []):
|
114
|
+
out.append(RdsInstance(
|
115
|
+
host=inst['Endpoint']['Address'],
|
116
|
+
port=inst['Endpoint']['Port'],
|
117
|
+
id=inst['DBInstanceIdentifier'],
|
118
|
+
region=rds.meta.region_name,
|
119
|
+
engine=inst['Engine'],
|
120
|
+
))
|
121
|
+
return out
|
122
|
+
|
123
|
+
|
124
|
+
##
|
125
|
+
|
126
|
+
|
127
|
+
@dc.dataclass(frozen=True)
|
128
|
+
class GcpServer(Server):
|
129
|
+
id: str
|
130
|
+
zone: str
|
131
|
+
|
132
|
+
|
133
|
+
def get_gcp_servers() -> list[GcpServer]:
|
134
|
+
cfg = _get_secrets()
|
135
|
+
from google.oauth2 import service_account
|
136
|
+
credentials = service_account.Credentials.from_service_account_info(cfg['gcp_oauth2'])
|
137
|
+
from google.cloud import compute_v1
|
138
|
+
instance_client = compute_v1.InstancesClient(credentials=credentials)
|
139
|
+
request = compute_v1.AggregatedListInstancesRequest()
|
140
|
+
request.project = cfg['gcp_project_id']
|
141
|
+
request.max_results = 50
|
142
|
+
out: list[GcpServer] = []
|
143
|
+
for zone, response in instance_client.aggregated_list(request=request):
|
144
|
+
for instance in (response.instances or []):
|
145
|
+
ip = check.single([ac.nat_i_p for ni in instance.network_interfaces for ac in ni.access_configs if ac.nat_i_p]) # noqa
|
146
|
+
out.append(GcpServer(
|
147
|
+
host=ip,
|
148
|
+
id=instance.name,
|
149
|
+
zone=zone,
|
150
|
+
))
|
151
|
+
return out
|
152
|
+
|
153
|
+
|
154
|
+
##
|
155
|
+
|
156
|
+
|
157
|
+
@dc.dataclass(frozen=True)
|
158
|
+
class RunpodServer(Server):
|
159
|
+
id: str
|
160
|
+
|
161
|
+
|
162
|
+
def get_runpod_servers() -> list[RunpodServer]:
|
163
|
+
api_key = _get_secrets()['runpod_api_key']
|
164
|
+
query = 'query Pods { myself { pods { id runtime { ports { ip isIpPublic privatePort publicPort type } } } } }'
|
165
|
+
resp = urllib3.request(
|
166
|
+
'POST',
|
167
|
+
f'https://api.runpod.io/graphql?api_key={api_key}',
|
168
|
+
body=('{"query": "' + query + '"}').encode('utf-8'),
|
169
|
+
headers={
|
170
|
+
'content-type': 'application/json',
|
171
|
+
},
|
172
|
+
)
|
173
|
+
dct = json.loads(resp.data.decode('utf-8')).get('data', {}).get('myself', {})
|
174
|
+
out = []
|
175
|
+
for pod in dct.get('pods', []):
|
176
|
+
ssh = check.single([p for p in pod['runtime']['ports'] if p['isIpPublic'] and p['privatePort'] == 22])
|
177
|
+
out.append(RunpodServer(
|
178
|
+
host=f'{ssh["ip"]}:{ssh["publicPort"]}',
|
179
|
+
id=pod['id'],
|
180
|
+
))
|
181
|
+
return out
|
182
|
+
|
183
|
+
|
184
|
+
##
|
185
|
+
|
186
|
+
|
187
|
+
@dc.dataclass(frozen=True)
|
188
|
+
class LambdaLabsServer(Server):
|
189
|
+
id: str
|
190
|
+
|
191
|
+
|
192
|
+
def get_lambda_labs_servers() -> list[LambdaLabsServer]:
|
193
|
+
api_key = _get_secrets()['lambda_labs_api_key']
|
194
|
+
resp = urllib3.request(
|
195
|
+
'GET',
|
196
|
+
'https://cloud.lambdalabs.com/api/v1/instances',
|
197
|
+
headers=urllib3.make_headers(
|
198
|
+
basic_auth=f'{api_key}:',
|
199
|
+
),
|
200
|
+
)
|
201
|
+
insts = json.loads(resp.data.decode('utf-8')).get('data', {})
|
202
|
+
out: list[LambdaLabsServer] = []
|
203
|
+
for inst in insts:
|
204
|
+
out.append(LambdaLabsServer(
|
205
|
+
host=inst['ip'],
|
206
|
+
id=inst['id'],
|
207
|
+
))
|
208
|
+
return out
|
209
|
+
|
210
|
+
|
211
|
+
##
|
212
|
+
|
213
|
+
|
214
|
+
@dc.dataclass(frozen=True)
|
215
|
+
class DigitalOceanServer(Server):
|
216
|
+
id: str
|
217
|
+
|
218
|
+
|
219
|
+
def get_digital_ocean_servers() -> list[DigitalOceanServer]:
|
220
|
+
api_key = _get_secrets()['digital_ocean_api_key']
|
221
|
+
resp = urllib3.request(
|
222
|
+
'GET',
|
223
|
+
'https://api.digitalocean.com/v2/droplets',
|
224
|
+
headers={
|
225
|
+
'Authorization': f'Bearer {api_key}',
|
226
|
+
},
|
227
|
+
)
|
228
|
+
droplets = json.loads(resp.data.decode('utf-8')).get('droplets', [])
|
229
|
+
out: list[DigitalOceanServer] = []
|
230
|
+
for droplet in droplets:
|
231
|
+
net = check.single([n for n in droplet['networks']['v4'] if n['type'] == 'public'])
|
232
|
+
out.append(DigitalOceanServer(
|
233
|
+
host=net['ip_address'],
|
234
|
+
id=droplet['id'],
|
235
|
+
))
|
236
|
+
return out
|
237
|
+
|
238
|
+
|
239
|
+
##
|
240
|
+
|
241
|
+
|
242
|
+
def _main() -> None:
|
243
|
+
rsrcs: list[Resource] = [
|
244
|
+
*get_ec2_servers(),
|
245
|
+
*get_rds_instances(),
|
246
|
+
*get_gcp_servers(),
|
247
|
+
*get_runpod_servers(),
|
248
|
+
*get_lambda_labs_servers(),
|
249
|
+
*get_digital_ocean_servers(),
|
250
|
+
]
|
251
|
+
|
252
|
+
pprint.pprint(rsrcs)
|
253
|
+
|
254
|
+
|
255
|
+
if __name__ == '__main__':
|
256
|
+
_main()
|
@@ -0,0 +1,21 @@
|
|
1
|
+
Copyright 2023- wrmsr
|
2
|
+
|
3
|
+
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
|
4
|
+
following conditions are met:
|
5
|
+
|
6
|
+
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
|
7
|
+
disclaimer.
|
8
|
+
|
9
|
+
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
|
10
|
+
disclaimer in the documentation and/or other materials provided with the distribution.
|
11
|
+
|
12
|
+
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products
|
13
|
+
derived from this software without specific prior written permission.
|
14
|
+
|
15
|
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
|
16
|
+
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
17
|
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
18
|
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
19
|
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
20
|
+
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
21
|
+
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
@@ -0,0 +1,19 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: ominfra
|
3
|
+
Version: 0.0.0.dev7
|
4
|
+
Summary: ominfra
|
5
|
+
Author: wrmsr
|
6
|
+
License: BSD-3-Clause
|
7
|
+
Project-URL: source, https://github.com/wrmsr/omlish
|
8
|
+
Classifier: License :: OSI Approved :: BSD License
|
9
|
+
Classifier: Development Status :: 2 - Pre-Alpha
|
10
|
+
Classifier: Intended Audience :: Developers
|
11
|
+
Classifier: Operating System :: OS Independent
|
12
|
+
Classifier: Operating System :: POSIX
|
13
|
+
Requires-Python: >=3.12
|
14
|
+
License-File: LICENSE
|
15
|
+
Requires-Dist: omlish ==0.0.0.dev7
|
16
|
+
Provides-Extra: ssh
|
17
|
+
Requires-Dist: paramiko >=3.4 ; extra == 'ssh'
|
18
|
+
Requires-Dist: asyncssh >=2.16 ; (python_version < "3.13") and extra == 'ssh'
|
19
|
+
|