plain.dev 0.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,151 @@
1
+ import os
2
+ import shlex
3
+ import subprocess
4
+
5
+ from plain.runtime import APP_PATH, settings
6
+
7
+ SNAPSHOT_DB_PREFIX = "plaindb_snapshot_"
8
+
9
+
10
+ class DBContainer:
11
+ def __init__(self):
12
+ project_root = APP_PATH.parent
13
+ tmp_dir = settings.PLAIN_TEMP_PATH
14
+
15
+ name = os.path.basename(project_root) + "-postgres-1"
16
+
17
+ if "DATABASE_URL" in os.environ:
18
+ from plain.models import database_url
19
+
20
+ postgres_version = os.environ.get("POSTGRES_VERSION")
21
+ parsed_db_url = database_url.parse(os.environ.get("DATABASE_URL"))
22
+
23
+ self.name = name
24
+ self.tmp_dir = os.path.abspath(tmp_dir)
25
+ self.postgres_version = postgres_version or "13"
26
+ self.postgres_port = parsed_db_url.get("PORT", "5432")
27
+ self.postgres_db = parsed_db_url.get("NAME", "postgres")
28
+ self.postgres_user = parsed_db_url.get("USER", "postgres")
29
+ self.postgres_password = parsed_db_url.get("PASSWORD", "postgres")
30
+
31
+ def execute(self, command, *args, **kwargs):
32
+ docker_flags = kwargs.pop("docker_flags", "-it")
33
+ return subprocess.run(
34
+ [
35
+ "docker",
36
+ "exec",
37
+ docker_flags,
38
+ self.name,
39
+ *shlex.split(command),
40
+ ]
41
+ + list(args),
42
+ check=True,
43
+ **kwargs,
44
+ )
45
+
46
+ def reset(self, create=False):
47
+ try:
48
+ self.execute(
49
+ f"dropdb {self.postgres_db} --force -U {self.postgres_user}",
50
+ stdout=subprocess.PIPE,
51
+ stderr=subprocess.PIPE,
52
+ )
53
+ except subprocess.CalledProcessError as e:
54
+ if "does not exist" not in e.stdout.decode():
55
+ print(e.stderr.decode())
56
+ raise
57
+
58
+ if create:
59
+ self.execute(
60
+ f"createdb {self.postgres_db} -U {self.postgres_user}",
61
+ stdout=subprocess.PIPE,
62
+ stderr=subprocess.PIPE,
63
+ )
64
+
65
+ def terminate_connections(self):
66
+ self.execute(
67
+ f"psql -U {self.postgres_user} {self.postgres_db} -c",
68
+ f"SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = '{self.postgres_db}' AND pid <> pg_backend_pid();",
69
+ stdout=subprocess.DEVNULL,
70
+ )
71
+
72
+ def create_snapshot(self, name):
73
+ snapshot_name = f"{SNAPSHOT_DB_PREFIX}{name}"
74
+ current_git_branch = (
75
+ subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"])
76
+ .decode()
77
+ .strip()
78
+ )
79
+ description = f"branch={current_git_branch}"
80
+
81
+ self.terminate_connections()
82
+ try:
83
+ self.execute(
84
+ f"createdb {snapshot_name} '{description}' -U {self.postgres_user} -T {self.postgres_db}",
85
+ stdout=subprocess.PIPE,
86
+ stderr=subprocess.PIPE,
87
+ )
88
+ except subprocess.CalledProcessError as e:
89
+ if "already exists" in e.stdout.decode():
90
+ return False
91
+ else:
92
+ raise
93
+
94
+ return True
95
+
96
+ def list_snapshots(self):
97
+ self.execute(
98
+ f"psql -U {self.postgres_user} {self.postgres_db} -c",
99
+ f"SELECT REPLACE(datname, '{SNAPSHOT_DB_PREFIX}', '') as name, pg_size_pretty(pg_database_size(datname)) as size, pg_catalog.shobj_description(oid, 'pg_database') AS description, (pg_stat_file('base/'||oid ||'/PG_VERSION')).modification as created FROM pg_catalog.pg_database WHERE datname LIKE '{SNAPSHOT_DB_PREFIX}%' ORDER BY created;",
100
+ )
101
+
102
+ def delete_snapshot(self, name):
103
+ snapshot_name = f"{SNAPSHOT_DB_PREFIX}{name}"
104
+ try:
105
+ self.execute(
106
+ f"dropdb {snapshot_name} -U {self.postgres_user}",
107
+ stdout=subprocess.PIPE,
108
+ stderr=subprocess.PIPE,
109
+ )
110
+ except subprocess.CalledProcessError as e:
111
+ if "does not exist" in e.stdout.decode():
112
+ return False
113
+ else:
114
+ raise
115
+
116
+ return True
117
+
118
+ def restore_snapshot(self, name):
119
+ snapshot_name = f"{SNAPSHOT_DB_PREFIX}{name}"
120
+ self.reset(create=False)
121
+ self.execute(
122
+ f"createdb {self.postgres_db} -U {self.postgres_user} -T {snapshot_name}",
123
+ )
124
+
125
+ def export(self, export_path):
126
+ successful = (
127
+ subprocess.run(
128
+ [
129
+ "docker",
130
+ "exec",
131
+ self.name,
132
+ "/bin/bash",
133
+ "-c",
134
+ f"pg_dump -U {self.postgres_user} {self.postgres_db}",
135
+ ],
136
+ stdout=open(export_path, "w+"),
137
+ ).returncode
138
+ == 0
139
+ )
140
+ return successful
141
+
142
+ def import_sql(self, sql_file):
143
+ self.reset(create=True)
144
+ successful = (
145
+ subprocess.run(
146
+ f"docker exec -i {self.name} psql -U {self.postgres_user} {self.postgres_db} < {shlex.quote(sql_file)}",
147
+ shell=True,
148
+ ).returncode
149
+ == 0
150
+ )
151
+ return successful
plain/dev/debug.py ADDED
@@ -0,0 +1,12 @@
1
+ import debugpy
2
+
3
+
4
+ def attach(endpoint=("localhost", 5678)):
5
+ if debugpy.is_client_connected():
6
+ print("Debugger already attached")
7
+ return
8
+
9
+ debugpy.listen(endpoint)
10
+ print("Waiting for debugger to attach...")
11
+ debugpy.wait_for_client()
12
+ print("Debugger attached!")
@@ -0,0 +1,5 @@
1
+ DEV_REQUESTS_IGNORE_PATHS = [
2
+ "/favicon.ico",
3
+ ]
4
+
5
+ DEV_REQUESTS_MAX = 50
plain/dev/pid.py ADDED
@@ -0,0 +1,20 @@
1
+ import os
2
+
3
+ from plain.runtime import settings
4
+
5
+
6
+ class Pid:
7
+ def __init__(self):
8
+ self.pidfile = settings.PLAIN_TEMP_PATH / "dev.pid"
9
+
10
+ def write(self):
11
+ pid = os.getpid()
12
+ self.pidfile.parent.mkdir(parents=True, exist_ok=True)
13
+ with self.pidfile.open("w+") as f:
14
+ f.write(str(pid))
15
+
16
+ def rm(self):
17
+ self.pidfile.unlink()
18
+
19
+ def exists(self):
20
+ return self.pidfile.exists()
@@ -0,0 +1,3 @@
1
+ from .cli import cli
2
+
3
+ __all__ = ["cli"]
@@ -0,0 +1,123 @@
1
+ import os
2
+ import subprocess
3
+ import sys
4
+ from importlib.util import find_spec
5
+ from pathlib import Path
6
+
7
+ from plain.cli.print import print_event
8
+
9
+ from ..services import Services
10
+
11
+ try:
12
+ import tomllib
13
+ except ModuleNotFoundError:
14
+ import tomli as tomllib
15
+
16
+ import click
17
+
18
+
19
+ def install_git_hook():
20
+ hook_path = os.path.join(".git", "hooks", "pre-commit")
21
+ if os.path.exists(hook_path):
22
+ print("pre-commit hook already exists")
23
+ else:
24
+ with open(hook_path, "w") as f:
25
+ f.write(
26
+ """#!/bin/sh
27
+ plain pre-commit"""
28
+ )
29
+ os.chmod(hook_path, 0o755)
30
+ print("pre-commit hook installed")
31
+
32
+
33
+ @click.command()
34
+ @click.option("--install", is_flag=True)
35
+ def cli(install):
36
+ """Git pre-commit checks"""
37
+ if install:
38
+ install_git_hook()
39
+ return
40
+
41
+ pyproject = Path("pyproject.toml")
42
+
43
+ with Services():
44
+ if pyproject.exists():
45
+ with open(pyproject, "rb") as f:
46
+ pyproject = tomllib.load(f)
47
+ for name, data in (
48
+ pyproject.get("tool", {})
49
+ .get("plain", {})
50
+ .get("pre-commit", {})
51
+ .get("run", {})
52
+ ).items():
53
+ cmd = data["cmd"]
54
+ print_event(f"Custom: {name} -> {cmd}")
55
+ result = subprocess.run(cmd, shell=True)
56
+ if result.returncode != 0:
57
+ sys.exit(result.returncode)
58
+
59
+ # Run this first since it's probably the most likely to fail
60
+ if find_spec("plain.code"):
61
+ check_short("Running plain code checks", "plain", "code", "check")
62
+
63
+ if Path("poetry.lock").exists():
64
+ check_short("Checking poetry.lock", "poetry", "lock", "--check")
65
+
66
+ if plain_db_connected():
67
+ check_short(
68
+ "Running preflight checks",
69
+ "plain",
70
+ "preflight",
71
+ "--database",
72
+ "default",
73
+ )
74
+ check_short(
75
+ "Checking Plain migrations", "plain", "legacy", "migrate", "--check"
76
+ )
77
+ check_short(
78
+ "Checking for Plain models missing migrations",
79
+ "plain",
80
+ "legacy",
81
+ "makemigrations",
82
+ "--dry-run",
83
+ "--check",
84
+ )
85
+ else:
86
+ check_short("Running Plain checks (without database)", "plain", "preflight")
87
+ click.secho("--> Skipping migration checks", bold=True, fg="yellow")
88
+
89
+ print_event("Running plain compile")
90
+ result = subprocess.run(["plain", "compile"])
91
+ if result.returncode != 0:
92
+ sys.exit(result.returncode)
93
+
94
+ if find_spec("plain.pytest"):
95
+ print_event("Running tests")
96
+ result = subprocess.run(["plain", "test"])
97
+ if result.returncode != 0:
98
+ sys.exit(result.returncode)
99
+
100
+
101
+ def plain_db_connected():
102
+ result = subprocess.run(
103
+ [
104
+ "plain",
105
+ "legacy",
106
+ "showmigrations",
107
+ "--skip-checks",
108
+ ],
109
+ stdout=subprocess.DEVNULL,
110
+ stderr=subprocess.DEVNULL,
111
+ )
112
+ return result.returncode == 0
113
+
114
+
115
+ def check_short(message, *args):
116
+ print_event(message, newline=False)
117
+ result = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
118
+ if result.returncode != 0:
119
+ click.secho("✘", fg="red")
120
+ click.secho(result.stdout.decode("utf-8"))
121
+ sys.exit(1)
122
+ else:
123
+ click.secho("✔", fg="green")
plain/dev/requests.py ADDED
@@ -0,0 +1,224 @@
1
+ import datetime
2
+ import json
3
+ import os
4
+ import sys
5
+ import traceback
6
+
7
+ import requests
8
+
9
+ from plain.runtime import settings
10
+ from plain.signals import got_request_exception
11
+
12
+
13
+ class RequestLog:
14
+ def __init__(self, *, request, response, exception=None):
15
+ self.request = request
16
+ self.response = response
17
+ self.exception = exception
18
+
19
+ @staticmethod
20
+ def storage_path():
21
+ return str(settings.PLAIN_TEMP_PATH / "requestlog")
22
+
23
+ @classmethod
24
+ def replay_request(cls, name):
25
+ path = os.path.join(cls.storage_path(), f"{name}.json")
26
+ with open(path) as f:
27
+ data = json.load(f)
28
+
29
+ method = data["request"]["method"]
30
+
31
+ if method == "GET":
32
+ # Params are in absolute uri
33
+ request_data = data["request"]["body"].encode("utf-8")
34
+ elif method in ("POST", "PUT", "PATCH"):
35
+ if data["request"]["querydict"]:
36
+ request_data = data["request"]["querydict"]
37
+ else:
38
+ request_data = data["request"]["body"].encode("utf-8")
39
+
40
+ # Cookies need to be passed as a dict, so that
41
+ # they are passed through redirects
42
+ data["request"]["headers"].pop("Cookie", None)
43
+
44
+ # TODO???
45
+ if data["request"]["headers"].get("X-Forwarded-Proto", "") == "https,https":
46
+ data["request"]["headers"]["X-Forwarded-Proto"] = "https"
47
+
48
+ response = requests.request(
49
+ method,
50
+ data["request"]["absolute_uri"],
51
+ headers=data["request"]["headers"],
52
+ cookies=data["request"]["cookies"],
53
+ data=request_data,
54
+ timeout=5,
55
+ )
56
+ print("Replayed request", response)
57
+
58
+ @staticmethod
59
+ def load_json_logs():
60
+ storage_path = RequestLog.storage_path()
61
+ if not os.path.exists(storage_path):
62
+ return []
63
+
64
+ logs = []
65
+ filenames = os.listdir(storage_path)
66
+ sorted_filenames = sorted(filenames, reverse=True)
67
+ for filename in sorted_filenames:
68
+ path = os.path.join(storage_path, filename)
69
+ with open(path) as f:
70
+ log = json.load(f)
71
+ log["name"] = os.path.splitext(filename)[0]
72
+ # Convert timestamp back to datetime
73
+ log["timestamp"] = datetime.datetime.fromtimestamp(log["timestamp"])
74
+ try:
75
+ log["request"]["body_json"] = json.dumps(
76
+ json.loads(log["request"]["body"]), indent=2
77
+ )
78
+ except json.JSONDecodeError:
79
+ pass
80
+ logs.append(log)
81
+
82
+ return logs
83
+
84
+ @staticmethod
85
+ def delete_old_logs():
86
+ storage_path = RequestLog.storage_path()
87
+ if not os.path.exists(storage_path):
88
+ return
89
+
90
+ filenames = os.listdir(storage_path)
91
+ sorted_filenames = sorted(filenames, reverse=True)
92
+ for filename in sorted_filenames[settings.DEV_REQUESTS_MAX :]:
93
+ path = os.path.join(storage_path, filename)
94
+ try:
95
+ os.remove(path)
96
+ except FileNotFoundError:
97
+ pass
98
+
99
+ @staticmethod
100
+ def clear():
101
+ storage_path = RequestLog.storage_path()
102
+ if not os.path.exists(storage_path):
103
+ return
104
+
105
+ filenames = os.listdir(storage_path)
106
+ for filename in filenames:
107
+ path = os.path.join(storage_path, filename)
108
+ try:
109
+ os.remove(path)
110
+ except FileNotFoundError:
111
+ pass
112
+
113
+ def save(self):
114
+ storage_path = self.storage_path()
115
+ if not os.path.exists(storage_path):
116
+ os.makedirs(storage_path)
117
+
118
+ timestamp = datetime.datetime.now().timestamp()
119
+ filename = f"{timestamp}.json"
120
+ path = os.path.join(storage_path, filename)
121
+ with open(path, "w+") as f:
122
+ json.dump(self.as_dict(), f, indent=2)
123
+
124
+ self.delete_old_logs()
125
+
126
+ def as_dict(self):
127
+ return {
128
+ "timestamp": datetime.datetime.now().timestamp(),
129
+ "request": self.request_as_dict(self.request),
130
+ "response": self.response_as_dict(self.response),
131
+ "exception": self.exception_as_dict(self.exception),
132
+ }
133
+
134
+ @staticmethod
135
+ def request_as_dict(request):
136
+ return {
137
+ "method": request.method,
138
+ "path": request.path,
139
+ "full_path": request.get_full_path(),
140
+ "querydict": request.POST.dict()
141
+ if request.method == "POST"
142
+ else request.GET.dict(),
143
+ "cookies": request.COOKIES,
144
+ # files?
145
+ "absolute_uri": request.build_absolute_uri(),
146
+ "body": request.body.decode("utf-8"),
147
+ "headers": dict(request.headers),
148
+ }
149
+
150
+ @staticmethod
151
+ def response_as_dict(response):
152
+ try:
153
+ content = response.content.decode("utf-8")
154
+ except AttributeError:
155
+ content = "<streaming_content>"
156
+
157
+ return {
158
+ "status_code": response.status_code,
159
+ "headers": dict(response.headers),
160
+ "content": content,
161
+ }
162
+
163
+ @staticmethod
164
+ def exception_as_dict(exception):
165
+ if not exception:
166
+ return None
167
+
168
+ tb_string = "".join(traceback.format_tb(exception.__traceback__))
169
+
170
+ try:
171
+ args = json.dumps(exception.args)
172
+ except TypeError:
173
+ args = str(exception.args)
174
+
175
+ return {
176
+ "type": type(exception).__name__,
177
+ "str": str(exception),
178
+ "args": args,
179
+ "traceback": tb_string,
180
+ }
181
+
182
+
183
+ def should_capture_request(request):
184
+ if not settings.DEBUG:
185
+ return False
186
+
187
+ if request.resolver_match and request.resolver_match.default_namespace == "dev":
188
+ return False
189
+
190
+ if request.path in settings.DEV_REQUESTS_IGNORE_PATHS:
191
+ return False
192
+
193
+ # This could be an attribute set on request or response
194
+ # or something more dynamic
195
+ if "querystats" in request.GET:
196
+ return False
197
+
198
+ return True
199
+
200
+
201
+ class RequestsMiddleware:
202
+ def __init__(self, get_response):
203
+ self.get_response = get_response
204
+ self.exception = None # If an exception occurs, we want to remember it
205
+
206
+ got_request_exception.connect(self.store_exception)
207
+
208
+ def __call__(self, request):
209
+ # Process it first, so we know the resolver_match
210
+ response = self.get_response(request)
211
+
212
+ if should_capture_request(request):
213
+ RequestLog(
214
+ request=request, response=response, exception=self.exception
215
+ ).save()
216
+
217
+ return response
218
+
219
+ def store_exception(self, **kwargs):
220
+ """
221
+ The signal calls this at the right time,
222
+ so we can use sys.exxception to capture.
223
+ """
224
+ self.exception = sys.exception()
plain/dev/services.py ADDED
@@ -0,0 +1,80 @@
1
+ import os
2
+ import subprocess
3
+ import time
4
+ from importlib.util import find_spec
5
+ from pathlib import Path
6
+
7
+ import click
8
+ from honcho.manager import Manager as HonchoManager
9
+
10
+ from plain.runtime import APP_PATH
11
+
12
+ from .pid import Pid
13
+ from .utils import has_pyproject_toml
14
+
15
+ try:
16
+ import tomllib
17
+ except ModuleNotFoundError:
18
+ import tomli as tomllib
19
+
20
+
21
+ class Services:
22
+ @staticmethod
23
+ def get_services(root):
24
+ if not has_pyproject_toml(root):
25
+ return {}
26
+
27
+ with open(Path(root, "pyproject.toml"), "rb") as f:
28
+ pyproject = tomllib.load(f)
29
+
30
+ return (
31
+ pyproject.get("tool", {})
32
+ .get("plain", {})
33
+ .get("dev", {})
34
+ .get("services", {})
35
+ )
36
+
37
+ def __init__(self):
38
+ self.manager = HonchoManager()
39
+
40
+ def run(self):
41
+ services = self.get_services(APP_PATH.parent)
42
+ for name, data in services.items():
43
+ env = {
44
+ **os.environ,
45
+ "PYTHONUNBUFFERED": "true",
46
+ **data.get("env", {}),
47
+ }
48
+ self.manager.add_process(name, data["cmd"], env=env)
49
+
50
+ self.manager.loop()
51
+
52
+ def __enter__(self):
53
+ if not self.get_services(APP_PATH.parent):
54
+ # No-op if no services are defined
55
+ return
56
+
57
+ if Pid().exists():
58
+ click.secho("Services already running in `plain dev` command", fg="yellow")
59
+ return
60
+
61
+ print("Starting `plain dev services`")
62
+ self._subprocess = subprocess.Popen(
63
+ ["plain", "dev", "services"], cwd=APP_PATH.parent
64
+ )
65
+
66
+ if find_spec("plain.models"):
67
+ time.sleep(0.5) # Give it a chance to hit on the first try
68
+ subprocess.check_call(["plain", "db", "wait"], env=os.environ)
69
+ else:
70
+ # A bit of a hack to wait for the services to start
71
+ time.sleep(3)
72
+
73
+ def __exit__(self, *args):
74
+ if not hasattr(self, "_subprocess"):
75
+ return
76
+
77
+ self._subprocess.terminate()
78
+
79
+ # Flush the buffer so the output doesn't spill over
80
+ self._subprocess.communicate()