plain.dev 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plain/dev/README.md +96 -0
- plain/dev/__init__.py +5 -0
- plain/dev/cli.py +166 -0
- plain/dev/config.py +20 -0
- plain/dev/contribute/__init__.py +3 -0
- plain/dev/contribute/cli.py +114 -0
- plain/dev/db/__init__.py +3 -0
- plain/dev/db/cli.py +113 -0
- plain/dev/db/container.py +151 -0
- plain/dev/debug.py +12 -0
- plain/dev/default_settings.py +5 -0
- plain/dev/pid.py +20 -0
- plain/dev/precommit/__init__.py +3 -0
- plain/dev/precommit/cli.py +123 -0
- plain/dev/requests.py +224 -0
- plain/dev/services.py +80 -0
- plain/dev/templates/dev/requests.html +134 -0
- plain/dev/urls.py +9 -0
- plain/dev/utils.py +14 -0
- plain/dev/views.py +37 -0
- plain_dev-0.1.0.dist-info/LICENSE +28 -0
- plain_dev-0.1.0.dist-info/METADATA +126 -0
- plain_dev-0.1.0.dist-info/RECORD +25 -0
- plain_dev-0.1.0.dist-info/WHEEL +4 -0
- plain_dev-0.1.0.dist-info/entry_points.txt +5 -0
plain/dev/README.md
ADDED
@@ -0,0 +1,96 @@
|
|
1
|
+
# plain.dev
|
2
|
+
|
3
|
+
A single command that runs everything you need for local development.
|
4
|
+
|
5
|
+

|
6
|
+
|
7
|
+
The `plain.dev` package can be [installed from PyPI](https://pypi.org/project/plain.dev/), and does *not* need to be added to `INSTALLED_PACKAGES`.
|
8
|
+
|
9
|
+
- [`plain dev`](#plain-dev)
|
10
|
+
- [`plain dev services`](#plain-dev-services)
|
11
|
+
- [`plain pre-commit`](#plain-pre-commit)
|
12
|
+
- [`plain contrib`](#plain-contrib)
|
13
|
+
- [VS Code debugging](#vscode-debugging)
|
14
|
+
|
15
|
+
## `plain dev`
|
16
|
+
|
17
|
+
The `plain dev` command does several things:
|
18
|
+
|
19
|
+
- Sets `PLAIN_CSRF_TRUSTED_ORIGINS` to localhost by default
|
20
|
+
- Runs `plain preflight` to check for any issues
|
21
|
+
- Executes any pending model migrations
|
22
|
+
- Starts `gunicorn` with `--reload`
|
23
|
+
- Runs `plain tailwind compile --watch`, if `plain.tailwind` is installed
|
24
|
+
- Any custom process defined in `pyproject.toml` at `tool.plain.dev.run`
|
25
|
+
- Necessary services (ex. Postgres) defined in `pyproject.toml` at `tool.plain.dev.services`
|
26
|
+
|
27
|
+
### Services
|
28
|
+
|
29
|
+
Use services to define databases or other processes that your app *needs* to be functional. The services will be started automatically in `plain dev`, but also in `plain pre-commit` (so preflight and tests have a database).
|
30
|
+
|
31
|
+
Ultimately, how you run your development database is up to you. But a recommended starting point is to use Docker:
|
32
|
+
|
33
|
+
```toml
|
34
|
+
# pyproject.toml
|
35
|
+
[tool.plain.dev.services]
|
36
|
+
postgres = {cmd = "docker run --name app-postgres --rm -p 54321:5432 -v $(pwd)/.plain/dev/pgdata:/var/lib/postgresql/data -e POSTGRES_PASSWORD=postgres postgres:15 postgres"}
|
37
|
+
```
|
38
|
+
|
39
|
+
### Custom processes
|
40
|
+
|
41
|
+
Unlike [services](#services), custom processes are *only* run during `plain dev`. This is a good place to run something like [ngrok](https://ngrok.com/) or a [Plain worker](../../../plain-worker), which you might need to use your local site, but don't need running for executing tests, for example.
|
42
|
+
|
43
|
+
```toml
|
44
|
+
# pyproject.toml
|
45
|
+
[tool.plain.dev.run]
|
46
|
+
ngrok = {command = "ngrok http $PORT"}
|
47
|
+
```
|
48
|
+
|
49
|
+
## `plain dev services`
|
50
|
+
|
51
|
+
Starts your [services](#services) by themselves.
|
52
|
+
|
53
|
+
## `plain pre-commit`
|
54
|
+
|
55
|
+
A built-in pre-commit hook that can be installed with `plain pre-commit --install`.
|
56
|
+
|
57
|
+
Runs:
|
58
|
+
|
59
|
+
- Custom commands defined in `pyproject.toml` at `tool.plain.pre-commit.run`
|
60
|
+
- `plain code check`, if [`plain.code`](https://plainframework.com/docs/plain-code/plain/code/) is installed
|
61
|
+
- `poetry lock --check`, if using [Poetry](https://python-poetry.org/)
|
62
|
+
- `plain preflight --database default`
|
63
|
+
- `plain legacy migrate --check`
|
64
|
+
- `plain legacy makemigrations --dry-run --check`
|
65
|
+
- `plain compile`
|
66
|
+
- `plain test`
|
67
|
+
|
68
|
+
## VS Code debugging
|
69
|
+
|
70
|
+

|
71
|
+
|
72
|
+
Since `plain dev` runs multiple processes at once, the regular [pdb](https://docs.python.org/3/library/pdb.html) debuggers don't quite work.
|
73
|
+
|
74
|
+
Instead, we include [microsoft/debugpy](https://github.com/microsoft/debugpy) and an `attach` function to make it even easier to use VS Code's debugger.
|
75
|
+
|
76
|
+
First, import and run the `debug.attach()` function:
|
77
|
+
|
78
|
+
```python
|
79
|
+
class HomeView(TemplateView):
|
80
|
+
template_name = "home.html"
|
81
|
+
|
82
|
+
def get_template_context(self):
|
83
|
+
context = super().get_template_context()
|
84
|
+
|
85
|
+
# Make sure the debugger is attached (will need to be if runserver reloads)
|
86
|
+
from plain.dev import debug; debug.attach()
|
87
|
+
|
88
|
+
# Add a breakpoint (or use the gutter in VS Code to add one)
|
89
|
+
breakpoint()
|
90
|
+
|
91
|
+
return context
|
92
|
+
```
|
93
|
+
|
94
|
+
When you load the page, you'll see "Waiting for debugger to attach...".
|
95
|
+
|
96
|
+
You can then run the VS Code debugger and attach to an existing Python process, at localhost:5678.
|
plain/dev/__init__.py
ADDED
plain/dev/cli.py
ADDED
@@ -0,0 +1,166 @@
|
|
1
|
+
import json
|
2
|
+
import os
|
3
|
+
import subprocess
|
4
|
+
import sys
|
5
|
+
from importlib.util import find_spec
|
6
|
+
from pathlib import Path
|
7
|
+
|
8
|
+
import click
|
9
|
+
from honcho.manager import Manager as HonchoManager
|
10
|
+
|
11
|
+
from plain.runtime import APP_PATH
|
12
|
+
|
13
|
+
from .db import cli as db_cli
|
14
|
+
from .pid import Pid
|
15
|
+
from .services import Services
|
16
|
+
from .utils import has_pyproject_toml, plainpackage_installed
|
17
|
+
|
18
|
+
try:
|
19
|
+
import tomllib
|
20
|
+
except ModuleNotFoundError:
|
21
|
+
import tomli as tomllib
|
22
|
+
|
23
|
+
|
24
|
+
@click.group(invoke_without_command=True)
|
25
|
+
@click.pass_context
|
26
|
+
@click.option(
|
27
|
+
"--port",
|
28
|
+
"-p",
|
29
|
+
default=8000,
|
30
|
+
type=int,
|
31
|
+
help="Port to run the web server on",
|
32
|
+
envvar="PORT",
|
33
|
+
)
|
34
|
+
def cli(ctx, port):
|
35
|
+
"""Start local development"""
|
36
|
+
|
37
|
+
if ctx.invoked_subcommand:
|
38
|
+
return
|
39
|
+
|
40
|
+
returncode = Dev(port=port).run()
|
41
|
+
if returncode:
|
42
|
+
sys.exit(returncode)
|
43
|
+
|
44
|
+
|
45
|
+
@cli.command()
|
46
|
+
def services():
|
47
|
+
"""Start additional services defined in pyproject.toml"""
|
48
|
+
Services().run()
|
49
|
+
|
50
|
+
|
51
|
+
class Dev:
|
52
|
+
def __init__(self, *, port):
|
53
|
+
self.manager = HonchoManager()
|
54
|
+
self.port = port
|
55
|
+
self.plain_env = {
|
56
|
+
**os.environ,
|
57
|
+
"PYTHONUNBUFFERED": "true",
|
58
|
+
}
|
59
|
+
self.custom_process_env = {
|
60
|
+
**self.plain_env,
|
61
|
+
"PORT": str(self.port),
|
62
|
+
"PYTHONPATH": os.path.join(APP_PATH.parent, "app"),
|
63
|
+
}
|
64
|
+
|
65
|
+
def run(self):
|
66
|
+
pid = Pid()
|
67
|
+
pid.write()
|
68
|
+
|
69
|
+
try:
|
70
|
+
self.add_csrf_trusted_origins()
|
71
|
+
self.run_preflight()
|
72
|
+
self.add_gunicorn()
|
73
|
+
self.add_tailwind()
|
74
|
+
self.add_pyproject_run()
|
75
|
+
self.add_services()
|
76
|
+
|
77
|
+
self.manager.loop()
|
78
|
+
|
79
|
+
return self.manager.returncode
|
80
|
+
finally:
|
81
|
+
pid.rm()
|
82
|
+
|
83
|
+
def add_csrf_trusted_origins(self):
|
84
|
+
if "PLAIN_CSRF_TRUSTED_ORIGINS" in os.environ:
|
85
|
+
return
|
86
|
+
|
87
|
+
csrf_trusted_origins = json.dumps(
|
88
|
+
[f"http://localhost:{self.port}", f"http://127.0.0.1:{self.port}"]
|
89
|
+
)
|
90
|
+
|
91
|
+
click.secho(
|
92
|
+
f"Automatically set PLAIN_CSRF_TRUSTED_ORIGINS={click.style(csrf_trusted_origins, underline=True)}",
|
93
|
+
bold=True,
|
94
|
+
)
|
95
|
+
|
96
|
+
# Set BASE_URL for plain and custom processes
|
97
|
+
self.plain_env["PLAIN_CSRF_TRUSTED_ORIGINS"] = csrf_trusted_origins
|
98
|
+
self.custom_process_env["PLAIN_CSRF_TRUSTED_ORIGINS"] = csrf_trusted_origins
|
99
|
+
|
100
|
+
def run_preflight(self):
|
101
|
+
if subprocess.run(["plain", "preflight"], env=self.plain_env).returncode:
|
102
|
+
click.secho("Preflight check failed!", fg="red")
|
103
|
+
sys.exit(1)
|
104
|
+
|
105
|
+
def add_gunicorn(self):
|
106
|
+
plain_db_installed = find_spec("plain.models") is not None
|
107
|
+
|
108
|
+
# TODO not necessarily watching the right .env...
|
109
|
+
# could return path from env.load?
|
110
|
+
extra_watch_files = []
|
111
|
+
for f in os.listdir(APP_PATH.parent):
|
112
|
+
if f.startswith(".env"):
|
113
|
+
# Will include some extra, but good enough for now
|
114
|
+
extra_watch_files.append(f)
|
115
|
+
|
116
|
+
reload_extra = " ".join(f"--reload-extra-file {f}" for f in extra_watch_files)
|
117
|
+
gunicorn = f"gunicorn --bind 127.0.0.1:{self.port} --reload plain.wsgi:app --timeout 60 --access-logfile - --error-logfile - {reload_extra} --access-logformat '\"%(r)s\" status=%(s)s length=%(b)s dur=%(M)sms'"
|
118
|
+
|
119
|
+
if plain_db_installed:
|
120
|
+
runserver_cmd = (
|
121
|
+
f"plain models db-wait && plain legacy migrate && {gunicorn}"
|
122
|
+
)
|
123
|
+
else:
|
124
|
+
runserver_cmd = gunicorn
|
125
|
+
|
126
|
+
if "WEB_CONCURRENCY" not in self.plain_env:
|
127
|
+
# Default to two workers so request log etc are less
|
128
|
+
# likely to get locked up
|
129
|
+
self.plain_env["WEB_CONCURRENCY"] = "2"
|
130
|
+
|
131
|
+
self.manager.add_process("plain", runserver_cmd, env=self.plain_env)
|
132
|
+
|
133
|
+
def add_tailwind(self):
|
134
|
+
if not plainpackage_installed("tailwind"):
|
135
|
+
return
|
136
|
+
|
137
|
+
self.manager.add_process("tailwind", "plain tailwind compile --watch")
|
138
|
+
|
139
|
+
def add_pyproject_run(self):
|
140
|
+
if not has_pyproject_toml(APP_PATH.parent):
|
141
|
+
return
|
142
|
+
|
143
|
+
with open(Path(APP_PATH.parent, "pyproject.toml"), "rb") as f:
|
144
|
+
pyproject = tomllib.load(f)
|
145
|
+
|
146
|
+
for name, data in (
|
147
|
+
pyproject.get("tool", {}).get("plain", {}).get("dev", {}).get("run", {})
|
148
|
+
).items():
|
149
|
+
env = {
|
150
|
+
**self.custom_process_env,
|
151
|
+
**data.get("env", {}),
|
152
|
+
}
|
153
|
+
self.manager.add_process(name, data["cmd"], env=env)
|
154
|
+
|
155
|
+
def add_services(self):
|
156
|
+
services = Services.get_services(APP_PATH.parent)
|
157
|
+
for name, data in services.items():
|
158
|
+
env = {
|
159
|
+
**os.environ,
|
160
|
+
"PYTHONUNBUFFERED": "true",
|
161
|
+
**data.get("env", {}),
|
162
|
+
}
|
163
|
+
self.manager.add_process(name, data["cmd"], env=env)
|
164
|
+
|
165
|
+
|
166
|
+
cli.add_command(db_cli)
|
plain/dev/config.py
ADDED
@@ -0,0 +1,20 @@
|
|
1
|
+
import importlib
|
2
|
+
from pathlib import Path
|
3
|
+
|
4
|
+
from plain.packages import PackageConfig
|
5
|
+
from plain.runtime import settings
|
6
|
+
|
7
|
+
|
8
|
+
class Config(PackageConfig):
|
9
|
+
name = "plain.dev"
|
10
|
+
|
11
|
+
def ready(self):
|
12
|
+
# Symlink the plain package into .plain so we can look at it easily
|
13
|
+
plain_path = Path(
|
14
|
+
importlib.util.find_spec("plain.runtime").origin
|
15
|
+
).parent.parent
|
16
|
+
if not settings.PLAIN_TEMP_PATH.exists():
|
17
|
+
settings.PLAIN_TEMP_PATH.mkdir()
|
18
|
+
src_path = settings.PLAIN_TEMP_PATH / "src"
|
19
|
+
if plain_path.exists() and not src_path.exists():
|
20
|
+
src_path.symlink_to(plain_path)
|
@@ -0,0 +1,114 @@
|
|
1
|
+
import subprocess
|
2
|
+
import sys
|
3
|
+
from pathlib import Path
|
4
|
+
|
5
|
+
import click
|
6
|
+
import tomllib
|
7
|
+
|
8
|
+
|
9
|
+
@click.command("contribute")
|
10
|
+
@click.option("--repo", default="../plain", help="Path to the plain repo")
|
11
|
+
@click.argument("package")
|
12
|
+
def cli(package, repo):
|
13
|
+
"""Contribute to plain by linking a package locally."""
|
14
|
+
|
15
|
+
if package == "reset":
|
16
|
+
click.secho("Undoing any changes to pyproject.toml and poetry.lock", bold=True)
|
17
|
+
result = subprocess.run(["git", "checkout", "pyproject.toml", "poetry.lock"])
|
18
|
+
if result.returncode:
|
19
|
+
click.secho("Failed to checkout pyproject.toml and poetry.lock", fg="red")
|
20
|
+
sys.exit(result.returncode)
|
21
|
+
|
22
|
+
click.secho("Removing current .venv", bold=True)
|
23
|
+
result = subprocess.run(["rm", "-rf", ".venv"])
|
24
|
+
if result.returncode:
|
25
|
+
click.secho("Failed to remove .venv", fg="red")
|
26
|
+
sys.exit(result.returncode)
|
27
|
+
|
28
|
+
click.secho("Running poetry install", bold=True)
|
29
|
+
result = subprocess.run(["poetry", "install"])
|
30
|
+
if result.returncode:
|
31
|
+
click.secho("Failed to install", fg="red")
|
32
|
+
sys.exit(result.returncode)
|
33
|
+
|
34
|
+
return
|
35
|
+
|
36
|
+
repo = Path(repo)
|
37
|
+
if not repo.exists():
|
38
|
+
click.secho(f"Repo not found at {repo}", fg="red")
|
39
|
+
return
|
40
|
+
|
41
|
+
repo_branch = (
|
42
|
+
subprocess.check_output(
|
43
|
+
[
|
44
|
+
"git",
|
45
|
+
"rev-parse",
|
46
|
+
"--abbrev-ref",
|
47
|
+
"HEAD",
|
48
|
+
],
|
49
|
+
cwd=repo,
|
50
|
+
)
|
51
|
+
.decode()
|
52
|
+
.strip()
|
53
|
+
)
|
54
|
+
click.secho(f"Using repo at {repo} ({repo_branch} branch)", bold=True)
|
55
|
+
|
56
|
+
pyproject = Path("pyproject.toml")
|
57
|
+
if not pyproject.exists():
|
58
|
+
click.secho("pyproject.toml not found", fg="red")
|
59
|
+
return
|
60
|
+
|
61
|
+
poetry_group = "main"
|
62
|
+
|
63
|
+
with pyproject.open("rb") as f:
|
64
|
+
pyproject_data = tomllib.load(f)
|
65
|
+
poetry_dependencies = (
|
66
|
+
pyproject_data.get("tool", {}).get("poetry", {}).get("dependencies", {})
|
67
|
+
)
|
68
|
+
|
69
|
+
for group_name, group_data in (
|
70
|
+
pyproject_data.get("tool", {}).get("poetry", {}).get("group", {}).items()
|
71
|
+
):
|
72
|
+
if package in group_data.get("dependencies", {}).keys():
|
73
|
+
poetry_group = group_name
|
74
|
+
break
|
75
|
+
|
76
|
+
if not poetry_group and package not in poetry_dependencies.keys():
|
77
|
+
click.secho(
|
78
|
+
f"{package} not found in pyproject.toml (only poetry is supported)",
|
79
|
+
fg="red",
|
80
|
+
)
|
81
|
+
return
|
82
|
+
|
83
|
+
click.secho(f"Linking {package} to {repo}", bold=True)
|
84
|
+
if package == "plain" or package.startswith("plain-"):
|
85
|
+
result = subprocess.run(
|
86
|
+
[
|
87
|
+
"poetry",
|
88
|
+
"add",
|
89
|
+
"--editable",
|
90
|
+
"--group",
|
91
|
+
poetry_group,
|
92
|
+
str(repo / package), # Link a subdirectory
|
93
|
+
]
|
94
|
+
)
|
95
|
+
if result.returncode:
|
96
|
+
click.secho("Failed to link the package", fg="red")
|
97
|
+
sys.exit(result.returncode)
|
98
|
+
elif package.startswith("plainx-"):
|
99
|
+
result = subprocess.run(
|
100
|
+
[
|
101
|
+
"poetry",
|
102
|
+
"add",
|
103
|
+
"--editable",
|
104
|
+
"--group",
|
105
|
+
poetry_group,
|
106
|
+
str(repo),
|
107
|
+
]
|
108
|
+
)
|
109
|
+
if result.returncode:
|
110
|
+
click.secho("Failed to link the package", fg="red")
|
111
|
+
sys.exit(result.returncode)
|
112
|
+
else:
|
113
|
+
click.secho(f"Unknown package {package}", fg="red")
|
114
|
+
sys.exit(2)
|
plain/dev/db/__init__.py
ADDED
plain/dev/db/cli.py
ADDED
@@ -0,0 +1,113 @@
|
|
1
|
+
import os
|
2
|
+
import sys
|
3
|
+
|
4
|
+
import click
|
5
|
+
|
6
|
+
from ..services import Services
|
7
|
+
from .container import DBContainer
|
8
|
+
|
9
|
+
|
10
|
+
@click.group("db")
|
11
|
+
def cli():
|
12
|
+
"""Start, stop, and manage the local Postgres database"""
|
13
|
+
pass
|
14
|
+
|
15
|
+
|
16
|
+
# @cli.command()
|
17
|
+
# def reset():
|
18
|
+
# DBContainer().reset(create=True)
|
19
|
+
# click.secho("Local development database reset", fg="green")
|
20
|
+
|
21
|
+
|
22
|
+
@cli.command()
|
23
|
+
@click.argument("export_path", default="")
|
24
|
+
def export(export_path):
|
25
|
+
"""Export the local database to a file"""
|
26
|
+
if not export_path:
|
27
|
+
current_dir_name = os.path.basename(os.getcwd())
|
28
|
+
export_path = f"{current_dir_name}-dev-db.sql"
|
29
|
+
with Services():
|
30
|
+
export_successful = DBContainer().export(export_path)
|
31
|
+
|
32
|
+
if export_successful:
|
33
|
+
click.secho(f"Local development database exported to {export_path}", fg="green")
|
34
|
+
else:
|
35
|
+
click.secho("Export failed", fg="red")
|
36
|
+
sys.exit(1)
|
37
|
+
|
38
|
+
|
39
|
+
@cli.command("import")
|
40
|
+
@click.argument("sql_file")
|
41
|
+
def import_db(sql_file):
|
42
|
+
"""Import a database file into the local database"""
|
43
|
+
|
44
|
+
print(f"Importing {sql_file} ({os.path.getsize(sql_file) / 1024 / 1024:.2f} MB)")
|
45
|
+
|
46
|
+
with Services():
|
47
|
+
successful = DBContainer().import_sql(sql_file)
|
48
|
+
|
49
|
+
if successful:
|
50
|
+
click.secho(f"Local development database imported from {sql_file}", fg="green")
|
51
|
+
else:
|
52
|
+
click.secho("Import failed", fg="red")
|
53
|
+
sys.exit(1)
|
54
|
+
|
55
|
+
|
56
|
+
@cli.group()
|
57
|
+
def snapshot():
|
58
|
+
"""Manage local database snapshots"""
|
59
|
+
pass
|
60
|
+
|
61
|
+
|
62
|
+
@snapshot.command("create")
|
63
|
+
@click.argument("name")
|
64
|
+
@click.pass_context
|
65
|
+
def snapshot_create(ctx, name):
|
66
|
+
"""Create a snapshot of the main database"""
|
67
|
+
created = DBContainer().create_snapshot(name)
|
68
|
+
if not created:
|
69
|
+
click.secho(f'Snapshot "{name}" already exists', fg="red")
|
70
|
+
sys.exit(1)
|
71
|
+
|
72
|
+
click.secho(f'Snapshot "{name}" created', fg="green")
|
73
|
+
print()
|
74
|
+
ctx.invoke(snapshot_list)
|
75
|
+
|
76
|
+
|
77
|
+
@snapshot.command("list")
|
78
|
+
def snapshot_list():
|
79
|
+
"""List all snapshots"""
|
80
|
+
DBContainer().list_snapshots()
|
81
|
+
|
82
|
+
|
83
|
+
@snapshot.command("restore")
|
84
|
+
@click.argument("name")
|
85
|
+
@click.option("--yes", "-y", is_flag=True)
|
86
|
+
def snapshot_restore(name, yes):
|
87
|
+
"""Restore a snapshot to the main database"""
|
88
|
+
if not yes:
|
89
|
+
click.confirm(
|
90
|
+
f'Are you sure you want to restore snapshot "{name}" to the main database?',
|
91
|
+
abort=True,
|
92
|
+
)
|
93
|
+
|
94
|
+
DBContainer().restore_snapshot(name)
|
95
|
+
click.secho(f'Snapshot "{name}" restored', fg="green")
|
96
|
+
|
97
|
+
|
98
|
+
@snapshot.command("delete")
|
99
|
+
@click.argument("name")
|
100
|
+
@click.pass_context
|
101
|
+
def snapshot_delete(ctx, name):
|
102
|
+
"""Delete a snapshot"""
|
103
|
+
deleted = DBContainer().delete_snapshot(name)
|
104
|
+
if not deleted:
|
105
|
+
click.secho(f'Snapshot "{name}" does not exist', fg="red")
|
106
|
+
sys.exit(1)
|
107
|
+
click.secho(f'Snapshot "{name}" deleted', fg="green")
|
108
|
+
print()
|
109
|
+
ctx.invoke(snapshot_list)
|
110
|
+
|
111
|
+
|
112
|
+
if __name__ == "__main__":
|
113
|
+
cli()
|
@@ -0,0 +1,151 @@
|
|
1
|
+
import os
|
2
|
+
import shlex
|
3
|
+
import subprocess
|
4
|
+
|
5
|
+
from plain.runtime import APP_PATH, settings
|
6
|
+
|
7
|
+
SNAPSHOT_DB_PREFIX = "plaindb_snapshot_"
|
8
|
+
|
9
|
+
|
10
|
+
class DBContainer:
|
11
|
+
def __init__(self):
|
12
|
+
project_root = APP_PATH.parent
|
13
|
+
tmp_dir = settings.PLAIN_TEMP_PATH
|
14
|
+
|
15
|
+
name = os.path.basename(project_root) + "-postgres-1"
|
16
|
+
|
17
|
+
if "DATABASE_URL" in os.environ:
|
18
|
+
from plain.models import database_url
|
19
|
+
|
20
|
+
postgres_version = os.environ.get("POSTGRES_VERSION")
|
21
|
+
parsed_db_url = database_url.parse(os.environ.get("DATABASE_URL"))
|
22
|
+
|
23
|
+
self.name = name
|
24
|
+
self.tmp_dir = os.path.abspath(tmp_dir)
|
25
|
+
self.postgres_version = postgres_version or "13"
|
26
|
+
self.postgres_port = parsed_db_url.get("PORT", "5432")
|
27
|
+
self.postgres_db = parsed_db_url.get("NAME", "postgres")
|
28
|
+
self.postgres_user = parsed_db_url.get("USER", "postgres")
|
29
|
+
self.postgres_password = parsed_db_url.get("PASSWORD", "postgres")
|
30
|
+
|
31
|
+
def execute(self, command, *args, **kwargs):
|
32
|
+
docker_flags = kwargs.pop("docker_flags", "-it")
|
33
|
+
return subprocess.run(
|
34
|
+
[
|
35
|
+
"docker",
|
36
|
+
"exec",
|
37
|
+
docker_flags,
|
38
|
+
self.name,
|
39
|
+
*shlex.split(command),
|
40
|
+
]
|
41
|
+
+ list(args),
|
42
|
+
check=True,
|
43
|
+
**kwargs,
|
44
|
+
)
|
45
|
+
|
46
|
+
def reset(self, create=False):
|
47
|
+
try:
|
48
|
+
self.execute(
|
49
|
+
f"dropdb {self.postgres_db} --force -U {self.postgres_user}",
|
50
|
+
stdout=subprocess.PIPE,
|
51
|
+
stderr=subprocess.PIPE,
|
52
|
+
)
|
53
|
+
except subprocess.CalledProcessError as e:
|
54
|
+
if "does not exist" not in e.stdout.decode():
|
55
|
+
print(e.stderr.decode())
|
56
|
+
raise
|
57
|
+
|
58
|
+
if create:
|
59
|
+
self.execute(
|
60
|
+
f"createdb {self.postgres_db} -U {self.postgres_user}",
|
61
|
+
stdout=subprocess.PIPE,
|
62
|
+
stderr=subprocess.PIPE,
|
63
|
+
)
|
64
|
+
|
65
|
+
def terminate_connections(self):
|
66
|
+
self.execute(
|
67
|
+
f"psql -U {self.postgres_user} {self.postgres_db} -c",
|
68
|
+
f"SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = '{self.postgres_db}' AND pid <> pg_backend_pid();",
|
69
|
+
stdout=subprocess.DEVNULL,
|
70
|
+
)
|
71
|
+
|
72
|
+
def create_snapshot(self, name):
|
73
|
+
snapshot_name = f"{SNAPSHOT_DB_PREFIX}{name}"
|
74
|
+
current_git_branch = (
|
75
|
+
subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"])
|
76
|
+
.decode()
|
77
|
+
.strip()
|
78
|
+
)
|
79
|
+
description = f"branch={current_git_branch}"
|
80
|
+
|
81
|
+
self.terminate_connections()
|
82
|
+
try:
|
83
|
+
self.execute(
|
84
|
+
f"createdb {snapshot_name} '{description}' -U {self.postgres_user} -T {self.postgres_db}",
|
85
|
+
stdout=subprocess.PIPE,
|
86
|
+
stderr=subprocess.PIPE,
|
87
|
+
)
|
88
|
+
except subprocess.CalledProcessError as e:
|
89
|
+
if "already exists" in e.stdout.decode():
|
90
|
+
return False
|
91
|
+
else:
|
92
|
+
raise
|
93
|
+
|
94
|
+
return True
|
95
|
+
|
96
|
+
def list_snapshots(self):
|
97
|
+
self.execute(
|
98
|
+
f"psql -U {self.postgres_user} {self.postgres_db} -c",
|
99
|
+
f"SELECT REPLACE(datname, '{SNAPSHOT_DB_PREFIX}', '') as name, pg_size_pretty(pg_database_size(datname)) as size, pg_catalog.shobj_description(oid, 'pg_database') AS description, (pg_stat_file('base/'||oid ||'/PG_VERSION')).modification as created FROM pg_catalog.pg_database WHERE datname LIKE '{SNAPSHOT_DB_PREFIX}%' ORDER BY created;",
|
100
|
+
)
|
101
|
+
|
102
|
+
def delete_snapshot(self, name):
|
103
|
+
snapshot_name = f"{SNAPSHOT_DB_PREFIX}{name}"
|
104
|
+
try:
|
105
|
+
self.execute(
|
106
|
+
f"dropdb {snapshot_name} -U {self.postgres_user}",
|
107
|
+
stdout=subprocess.PIPE,
|
108
|
+
stderr=subprocess.PIPE,
|
109
|
+
)
|
110
|
+
except subprocess.CalledProcessError as e:
|
111
|
+
if "does not exist" in e.stdout.decode():
|
112
|
+
return False
|
113
|
+
else:
|
114
|
+
raise
|
115
|
+
|
116
|
+
return True
|
117
|
+
|
118
|
+
def restore_snapshot(self, name):
|
119
|
+
snapshot_name = f"{SNAPSHOT_DB_PREFIX}{name}"
|
120
|
+
self.reset(create=False)
|
121
|
+
self.execute(
|
122
|
+
f"createdb {self.postgres_db} -U {self.postgres_user} -T {snapshot_name}",
|
123
|
+
)
|
124
|
+
|
125
|
+
def export(self, export_path):
|
126
|
+
successful = (
|
127
|
+
subprocess.run(
|
128
|
+
[
|
129
|
+
"docker",
|
130
|
+
"exec",
|
131
|
+
self.name,
|
132
|
+
"/bin/bash",
|
133
|
+
"-c",
|
134
|
+
f"pg_dump -U {self.postgres_user} {self.postgres_db}",
|
135
|
+
],
|
136
|
+
stdout=open(export_path, "w+"),
|
137
|
+
).returncode
|
138
|
+
== 0
|
139
|
+
)
|
140
|
+
return successful
|
141
|
+
|
142
|
+
def import_sql(self, sql_file):
|
143
|
+
self.reset(create=True)
|
144
|
+
successful = (
|
145
|
+
subprocess.run(
|
146
|
+
f"docker exec -i {self.name} psql -U {self.postgres_user} {self.postgres_db} < {shlex.quote(sql_file)}",
|
147
|
+
shell=True,
|
148
|
+
).returncode
|
149
|
+
== 0
|
150
|
+
)
|
151
|
+
return successful
|
plain/dev/debug.py
ADDED
@@ -0,0 +1,12 @@
|
|
1
|
+
import debugpy
|
2
|
+
|
3
|
+
|
4
|
+
def attach(endpoint=("localhost", 5678)):
|
5
|
+
if debugpy.is_client_connected():
|
6
|
+
print("Debugger already attached")
|
7
|
+
return
|
8
|
+
|
9
|
+
debugpy.listen(endpoint)
|
10
|
+
print("Waiting for debugger to attach...")
|
11
|
+
debugpy.wait_for_client()
|
12
|
+
print("Debugger attached!")
|