pg-r2-backup 1.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cli/__init__.py ADDED
File without changes
cli/cli.py ADDED
@@ -0,0 +1,172 @@
1
+ import argparse
2
+ import shutil
3
+ import os
4
+ import sys
5
+ import textwrap
6
+ import importlib.metadata
7
+
8
+ from main import run_backup
9
+
10
+ def get_version():
11
+ try:
12
+ return importlib.metadata.version("pg-r2-backup")
13
+ except importlib.metadata.PackageNotFoundError:
14
+ return "dev"
15
+
16
+
17
+ def mask(value, show=4):
18
+ if not value:
19
+ return ""
20
+ if len(value) <= show:
21
+ return "*" * len(value)
22
+ return value[:show] + "*" * (len(value) - show)
23
+
24
+ def doctor():
25
+ print("pg-r2-backup doctor\n")
26
+
27
+ if shutil.which("pg_dump") is None:
28
+ print("[FAIL] pg_dump not found in PATH")
29
+ else:
30
+ print("[OK] pg_dump found")
31
+
32
+ required_envs = [
33
+ "DATABASE_URL",
34
+ "R2_ACCESS_KEY",
35
+ "R2_SECRET_KEY",
36
+ "R2_BUCKET_NAME",
37
+ "R2_ENDPOINT",
38
+ ]
39
+
40
+ missing = [e for e in required_envs if not os.environ.get(e)]
41
+
42
+ if missing:
43
+ print("\n[FAIL] Missing environment variables:")
44
+ for m in missing:
45
+ print(f" - {m}")
46
+ else:
47
+ print("\n[OK] Required environment variables set")
48
+
49
+ use_public = os.environ.get("USE_PUBLIC_URL", "false").lower() == "true"
50
+ print(f"\nDatabase URL mode : {'public' if use_public else 'private'}")
51
+
52
+ if os.environ.get("BACKUP_PASSWORD"):
53
+ print("Compression : 7z (encrypted)")
54
+ else:
55
+ print("Compression : gzip")
56
+
57
+ if os.environ.get("KEEP_LOCAL_BACKUP", "false").lower() == "true":
58
+ print("Local backups : kept after upload")
59
+ else:
60
+ print("Local backups : deleted after upload")
61
+
62
+ print("\nDoctor check complete.")
63
+
64
+
65
+ def config_show():
66
+ print("pg-r2-backup config\n")
67
+
68
+ config = {
69
+ "USE_PUBLIC_URL": os.environ.get("USE_PUBLIC_URL", "false"),
70
+ "DUMP_FORMAT": os.environ.get("DUMP_FORMAT", "dump"),
71
+ "FILENAME_PREFIX": os.environ.get("FILENAME_PREFIX", "backup"),
72
+ "MAX_BACKUPS": os.environ.get("MAX_BACKUPS", "7"),
73
+ "KEEP_LOCAL_BACKUP": os.environ.get("KEEP_LOCAL_BACKUP", "false"),
74
+ "BACKUP_TIME": os.environ.get("BACKUP_TIME", "00:00"),
75
+ "R2_BUCKET_NAME": os.environ.get("R2_BUCKET_NAME", ""),
76
+ "R2_ENDPOINT": os.environ.get("R2_ENDPOINT", ""),
77
+ "R2_ACCESS_KEY": mask(os.environ.get("R2_ACCESS_KEY")),
78
+ "R2_SECRET_KEY": mask(os.environ.get("R2_SECRET_KEY")),
79
+ }
80
+
81
+ for k, v in config.items():
82
+ print(f"{k:<16} : {v}")
83
+
84
+
85
+ def init_env():
86
+ if os.path.exists(".env"):
87
+ print("[ERROR] .env already exists")
88
+ return
89
+
90
+ example = ".env.example"
91
+ if not os.path.exists(example):
92
+ print("[ERROR] .env.example not found")
93
+ return
94
+
95
+ shutil.copy(example, ".env")
96
+ print("[SUCCESS] .env created from .env.example")
97
+ print("Edit the file before running backups.")
98
+
99
+
100
+ def schedule_info():
101
+ print(textwrap.dedent("""
102
+ pg-r2-backup scheduling
103
+
104
+ Linux / macOS (cron):
105
+ 0 0 * * * pg-r2-backup run
106
+
107
+ Windows (Task Scheduler):
108
+ Program : pg-r2-backup
109
+ Args : run
110
+ Start in: folder containing .env (working directory)
111
+
112
+ Railway / Docker:
113
+ Use the platform scheduler
114
+ """).strip())
115
+
116
+ def main():
117
+ parser = argparse.ArgumentParser(
118
+ prog="pg-r2-backup",
119
+ description="PostgreSQL backup tool for Cloudflare R2",
120
+ formatter_class=argparse.RawDescriptionHelpFormatter,
121
+ epilog=textwrap.dedent("""
122
+ Examples:
123
+ pg-r2-backup doctor
124
+ pg-r2-backup run
125
+ pg-r2-backup config show
126
+ pg-r2-backup init
127
+ pg-r2-backup schedule
128
+ """)
129
+ )
130
+
131
+ parser.add_argument(
132
+ "--version",
133
+ action="version",
134
+ version=f"%(prog)s {get_version()}"
135
+ )
136
+
137
+ subparsers = parser.add_subparsers(dest="command")
138
+
139
+ subparsers.add_parser("run", help="Run backup immediately")
140
+ subparsers.add_parser("doctor", help="Check environment & dependencies")
141
+ subparsers.add_parser("schedule", help="Show scheduling examples")
142
+
143
+ config_parser = subparsers.add_parser("config", help="Show configuration")
144
+ config_sub = config_parser.add_subparsers(dest="subcommand")
145
+ config_sub.add_parser("show", help="Show current configuration")
146
+
147
+ subparsers.add_parser("init", help="Create .env from .env.example")
148
+
149
+ args = parser.parse_args()
150
+
151
+ if args.command == "run":
152
+ run_backup()
153
+
154
+ elif args.command == "doctor":
155
+ doctor()
156
+
157
+ elif args.command == "config" and args.subcommand == "show":
158
+ config_show()
159
+
160
+ elif args.command == "init":
161
+ init_env()
162
+
163
+ elif args.command == "schedule":
164
+ schedule_info()
165
+
166
+ else:
167
+ parser.print_help()
168
+ sys.exit(1)
169
+
170
+
171
+ if __name__ == "__main__":
172
+ main()
main.py ADDED
@@ -0,0 +1,193 @@
1
+ import os
2
+ import subprocess
3
+ import boto3
4
+ from boto3.session import Config
5
+ from datetime import datetime, timezone
6
+ from boto3.s3.transfer import TransferConfig
7
+ from dotenv import load_dotenv, find_dotenv
8
+ import time
9
+ import schedule
10
+ import py7zr
11
+ import shutil
12
+ import gzip
13
+
14
+ load_dotenv(find_dotenv(usecwd=True), override=True)
15
+
16
+ ## ENV
17
+
18
+ DATABASE_URL = os.environ.get("DATABASE_URL")
19
+ DATABASE_PUBLIC_URL = os.environ.get("DATABASE_PUBLIC_URL")
20
+ R2_ACCESS_KEY = os.environ.get("R2_ACCESS_KEY")
21
+ R2_SECRET_KEY = os.environ.get("R2_SECRET_KEY")
22
+ R2_BUCKET_NAME = os.environ.get("R2_BUCKET_NAME")
23
+ R2_ENDPOINT = os.environ.get("R2_ENDPOINT")
24
+ MAX_BACKUPS = int(os.environ.get("MAX_BACKUPS", 7))
25
+ KEEP_LOCAL_BACKUP = os.environ.get("KEEP_LOCAL_BACKUP", "false").lower() == "true"
26
+ BACKUP_PREFIX = os.environ.get("BACKUP_PREFIX", "")
27
+ FILENAME_PREFIX = os.environ.get("FILENAME_PREFIX", "backup")
28
+ DUMP_FORMAT = os.environ.get("DUMP_FORMAT", "dump")
29
+ BACKUP_PASSWORD = os.environ.get("BACKUP_PASSWORD")
30
+ USE_PUBLIC_URL = os.environ.get("USE_PUBLIC_URL", "false").lower() == "true"
31
+ BACKUP_TIME = os.environ.get("BACKUP_TIME", "00:00")
32
+ S3_REGION = os.environ.get("S3_REGION", "us-east-1")
33
+
34
+ def log(msg):
35
+ print(msg, flush=True)
36
+
37
+ ## Validate BACKUP_TIME
38
+ try:
39
+ hour, minute = BACKUP_TIME.split(":")
40
+ if not (0 <= int(hour) <= 23 and 0 <= int(minute) <= 59):
41
+ raise ValueError
42
+ except ValueError:
43
+ log("[WARNING] Invalid BACKUP_TIME format. Using default: 00:00")
44
+ BACKUP_TIME = "00:00"
45
+
46
+ def get_database_url():
47
+ if USE_PUBLIC_URL:
48
+ if not DATABASE_PUBLIC_URL:
49
+ raise ValueError("[ERROR] DATABASE_PUBLIC_URL not set but USE_PUBLIC_URL=true!")
50
+ return DATABASE_PUBLIC_URL
51
+
52
+ if not DATABASE_URL:
53
+ raise ValueError("[ERROR] DATABASE_URL not set!")
54
+ return DATABASE_URL
55
+
56
+ def gzip_compress(src):
57
+ dst = src + ".gz"
58
+ with open(src, "rb") as f_in:
59
+ with gzip.open(dst, "wb") as f_out:
60
+ shutil.copyfileobj(f_in, f_out)
61
+ return dst
62
+
63
+ def run_backup():
64
+ if shutil.which("pg_dump") is None:
65
+ log("[ERROR] pg_dump not found. Install postgresql-client.")
66
+ return
67
+
68
+ database_url = get_database_url()
69
+ log(f"[INFO] Using {'public' if USE_PUBLIC_URL else 'private'} database URL")
70
+
71
+ format_map = {
72
+ "sql": ("p", "sql"),
73
+ "plain": ("p", "sql"),
74
+ "dump": ("c", "dump"),
75
+ "custom": ("c", "dump"),
76
+ "tar": ("t", "tar")
77
+ }
78
+ pg_format, ext = format_map.get(DUMP_FORMAT.lower(), ("c", "dump"))
79
+
80
+ timestamp = datetime.now(timezone.utc).strftime("%Y%m%d_%H%M%S")
81
+ backup_file = f"{FILENAME_PREFIX}_{timestamp}.{ext}"
82
+
83
+ compressed_file = (
84
+ f"{backup_file}.7z" if BACKUP_PASSWORD else f"{backup_file}.gz"
85
+ )
86
+
87
+ compressed_file_r2 = f"{BACKUP_PREFIX}{compressed_file}"
88
+
89
+ ## Create backup
90
+ try:
91
+ log(f"[INFO] Creating backup {backup_file}")
92
+
93
+ dump_cmd = [
94
+ "pg_dump",
95
+ f"--dbname={database_url}",
96
+ "-F", pg_format,
97
+ "--no-owner",
98
+ "--no-acl",
99
+ "-f", backup_file
100
+ ]
101
+
102
+ subprocess.run(dump_cmd, check=True)
103
+
104
+ if BACKUP_PASSWORD:
105
+ log("[INFO] Encrypting backup with 7z...")
106
+ with py7zr.SevenZipFile(compressed_file, "w", password=BACKUP_PASSWORD) as archive:
107
+ archive.write(backup_file)
108
+ log("[SUCCESS] Backup encrypted successfully")
109
+ else:
110
+ log("[INFO] Compressing backup with gzip...")
111
+ gzip_compress(backup_file)
112
+ log("[SUCCESS] Backup compressed successfully")
113
+
114
+ except subprocess.CalledProcessError as e:
115
+ log(f"[ERROR] Backup creation failed: {e}")
116
+ return
117
+ finally:
118
+ if os.path.exists(backup_file):
119
+ os.remove(backup_file)
120
+
121
+ ## Upload to R2
122
+ if os.path.exists(compressed_file):
123
+ size = os.path.getsize(compressed_file)
124
+ log(f"[INFO] Final backup size: {size / 1024 / 1024:.2f} MB")
125
+
126
+ try:
127
+ client = boto3.client(
128
+ "s3",
129
+ endpoint_url=R2_ENDPOINT,
130
+ aws_access_key_id=R2_ACCESS_KEY,
131
+ aws_secret_access_key=R2_SECRET_KEY,
132
+ region_name=S3_REGION,
133
+ config=Config(
134
+ s3={"addressing_style": "path"}
135
+ )
136
+ )
137
+
138
+ config = TransferConfig(
139
+ multipart_threshold=8 * 1024 * 1024,
140
+ multipart_chunksize=8 * 1024 * 1024,
141
+ max_concurrency=4,
142
+ use_threads=True
143
+ )
144
+
145
+ client.upload_file(
146
+ compressed_file,
147
+ R2_BUCKET_NAME,
148
+ compressed_file_r2,
149
+ Config=config
150
+ )
151
+
152
+ log(f"[SUCCESS] Backup uploaded: {compressed_file_r2}")
153
+
154
+ objects = client.list_objects_v2(
155
+ Bucket=R2_BUCKET_NAME,
156
+ Prefix=BACKUP_PREFIX
157
+ )
158
+
159
+ if "Contents" in objects:
160
+ backups = sorted(
161
+ objects["Contents"],
162
+ key=lambda x: x["LastModified"],
163
+ reverse=True
164
+ )
165
+
166
+ for obj in backups[MAX_BACKUPS:]:
167
+ client.delete_object(
168
+ Bucket=R2_BUCKET_NAME,
169
+ Key=obj["Key"]
170
+ )
171
+ log(f"[INFO] Deleted old backup: {obj['Key']}")
172
+
173
+ except Exception as e:
174
+ log(f"[ERROR] R2 operation failed: {e}")
175
+ finally:
176
+ if os.path.exists(compressed_file):
177
+ if KEEP_LOCAL_BACKUP:
178
+ log("[INFO] Keeping local backup (KEEP_LOCAL_BACKUP=true)")
179
+ else:
180
+ os.remove(compressed_file)
181
+ log("[INFO] Local backup deleted")
182
+
183
+ if __name__ == "__main__":
184
+ log("[INFO] Starting backup scheduler...")
185
+ log(f"[INFO] Scheduled backup time: {BACKUP_TIME} UTC")
186
+
187
+ schedule.every().day.at(BACKUP_TIME).do(run_backup)
188
+
189
+ run_backup()
190
+
191
+ while True:
192
+ schedule.run_pending()
193
+ time.sleep(60)
@@ -0,0 +1,325 @@
1
+ Metadata-Version: 2.4
2
+ Name: pg-r2-backup
3
+ Version: 1.0.6
4
+ Summary: PostgreSQL backup tool for Cloudflare R2 (S3 Compatible)
5
+ Author: Aman
6
+ License-Expression: MIT
7
+ Project-URL: Homepage, https://github.com/BigDaddyAman/pg-r2-backup
8
+ Project-URL: Repository, https://github.com/BigDaddyAman/pg-r2-backup
9
+ Project-URL: Issues, https://github.com/BigDaddyAman/pg-r2-backup/issues
10
+ Requires-Python: >=3.9
11
+ Description-Content-Type: text/markdown
12
+ Requires-Dist: boto3
13
+ Requires-Dist: python-dotenv
14
+ Requires-Dist: schedule
15
+ Requires-Dist: py7zr
16
+
17
+ ![License](https://img.shields.io/badge/license-MIT-blue.svg)
18
+ ![Python](https://img.shields.io/badge/python-3.12-blue)
19
+ ![Storage](https://img.shields.io/badge/storage-S3--compatible-orange)
20
+ ![Database](https://img.shields.io/badge/database-PostgreSQL-336791)
21
+ ![Deploy](https://img.shields.io/badge/deploy-Railway-purple)
22
+ ![Docker](https://img.shields.io/badge/docker-supported-blue)
23
+
24
+ # Postgres-to-R2 Backup (S3-Compatible)
25
+
26
+ A lightweight automation service that creates scheduled PostgreSQL backups and securely uploads them to **S3-compatible object storage**
27
+ such as **Cloudflare R2, AWS S3, Wasabi, Backblaze B2, or MinIO**.
28
+ Designed specifically as a **Railway deployment template**, with built-in support for Docker and cron scheduling.
29
+
30
+ ---
31
+
32
+ ## ✨ Features
33
+
34
+ - 📦 **Automated Backups** — scheduled daily or hourly PostgreSQL backups
35
+ - 🔐 **Optional Encryption** — gzip compression or 7z encryption with password
36
+ - ☁️ **Cloudflare R2 Integration** — seamless S3-compatible storage support
37
+ - 🧹 **Retention Policy** — automatically delete old backups
38
+ - 🔗 **Flexible Database URLs** — supports private and public PostgreSQL connection URLs
39
+ - ⚡ **Optimized Performance** — parallel pg_dump and multipart S3 uploads
40
+ - 🐳 **Docker Ready** — portable, lightweight container
41
+ - 🚀 **Railway Template First** — no fork required for normal usage
42
+ - 🪣 **S3-Compatible Storage** — works with R2, AWS S3, Wasabi, B2, MinIO
43
+ - 💾 **Optional Local Retention** — keep backups locally for CLI, VPS, or NAS usage
44
+
45
+ ---
46
+
47
+ ## 🚀 Deployment on Railway
48
+
49
+ 1. Click the **Deploy on Railway** button below
50
+ 2. Railway will create a new project using the latest version of this repository
51
+ 3. Add the required environment variables in the Railway dashboard
52
+ 4. (Optional) Configure a cron job for your desired backup schedule
53
+
54
+ > Railway uses ephemeral storage. Local backup files are deleted by default after upload.
55
+
56
+ [![Deploy on Railway](https://railway.com/button.svg)](https://railway.com/deploy/postgres-to-r2-backup?referralCode=nIQTyp&utm_medium=integration&utm_source=template&utm_campaign=generic)
57
+
58
+ ---
59
+
60
+ ## 🔧 Environment Variables (S3-Compatible)
61
+
62
+ ```env
63
+ DATABASE_URL= # PostgreSQL database URL (private)
64
+ DATABASE_PUBLIC_URL= # Public PostgreSQL URL (optional)
65
+ USE_PUBLIC_URL=false # Set true to use DATABASE_PUBLIC_URL
66
+
67
+ DUMP_FORMAT=dump # sql | plain | dump | custom | tar
68
+ FILENAME_PREFIX=backup # Backup filename prefix
69
+ MAX_BACKUPS=7 # Number of backups to retain
70
+ KEEP_LOCAL_BACKUP=false # Keep backup file locally after upload (not recommended on PaaS)
71
+
72
+ R2_ENDPOINT= # S3 endpoint URL
73
+ R2_BUCKET_NAME= # Bucket name
74
+ R2_ACCESS_KEY= # Access key
75
+ R2_SECRET_KEY= # Secret key
76
+ S3_REGION=us-east-1 # Required for AWS S3 (ignored by R2/MinIO)
77
+
78
+ BACKUP_PASSWORD= # Optional: enables 7z encryption
79
+ BACKUP_TIME=00:00 # Daily backup time (UTC, HH:MM)
80
+ ```
81
+
82
+ > Variable names use `R2_*` for historical reasons, but **any S3-compatible provider** can be used by changing the endpoint and credentials.
83
+ > For AWS S3 users: ensure `S3_REGION` matches your bucket’s region.
84
+
85
+ ---
86
+
87
+ ## ☁️ Supported S3-Compatible Providers
88
+
89
+ This project uses the **standard AWS S3 API via boto3**, and works with:
90
+
91
+ - Cloudflare R2 (recommended)
92
+ - AWS S3
93
+ - Wasabi
94
+ - Backblaze B2 (S3 API)
95
+ - MinIO (self-hosted)
96
+
97
+ ### Example Endpoints
98
+
99
+ | Provider | Endpoint Example |
100
+ |--------|------------------|
101
+ | Cloudflare R2 | `https://<accountid>.r2.cloudflarestorage.com` |
102
+ | AWS S3 | `https://s3.amazonaws.com` |
103
+ | Wasabi | `https://s3.wasabisys.com` |
104
+ | Backblaze B2 | `https://s3.us-west-004.backblazeb2.com` |
105
+ | MinIO | `http://localhost:9000` |
106
+
107
+ ---
108
+
109
+ ## ⏰ Railway Cron Jobs
110
+
111
+ You can configure the backup schedule using **Railway Cron Jobs**:
112
+
113
+ 1. Open your Railway project
114
+ 2. Go to **Deployments → Cron**
115
+ 3. Add a cron job targeting this service
116
+
117
+ ### Common Cron Expressions
118
+
119
+ | Schedule | Cron Expression | Description |
120
+ |--------|----------------|------------|
121
+ | Hourly | `0 * * * *` | Every hour |
122
+ | Daily | `0 0 * * *` | Once per day (UTC midnight) |
123
+ | Twice Daily | `0 */12 * * *` | Every 12 hours |
124
+ | Weekly | `0 0 * * 0` | Every Sunday |
125
+ | Monthly | `0 0 1 * *` | First day of the month |
126
+
127
+ **Tips**
128
+ - All cron times are **UTC**
129
+ - Use https://crontab.guru to validate expressions
130
+ - Adjust `MAX_BACKUPS` to match your schedule
131
+
132
+ > If you use Railway Cron Jobs, the service will start once per execution.
133
+ > In this case, the internal scheduler is ignored after startup.
134
+
135
+ ---
136
+
137
+ ## 🖥️ Running Locally or on Other Platforms
138
+
139
+ It can run on **any platform** that supports:
140
+ - Python 3.9+
141
+ - `pg_dump` (PostgreSQL client tools)
142
+ - Environment variables
143
+ - Long-running background processes or cron
144
+
145
+ > Docker images use **Python 3.12** by default.
146
+ > Local execution supports **Python 3.9+**.
147
+
148
+ ### Supported Environments
149
+
150
+ - Local machine (Linux / macOS / Windows*)
151
+ - VPS (Netcup, Hetzner, DigitalOcean, etc.)
152
+ - Docker containers
153
+ - Other PaaS providers (Heroku, Fly.io, Render, etc.)
154
+
155
+ > *Windows is supported when `pg_dump` is installed and available in PATH.*
156
+
157
+ ### Local Requirements
158
+
159
+ - Python 3.9+
160
+ - PostgreSQL client tools (`pg_dump`)
161
+ - pip
162
+
163
+ ### Run Manually (Local)
164
+
165
+ ```bash
166
+ pip install -r requirements.txt
167
+ python main.py
168
+ ```
169
+
170
+ ### Run with Docker (Optional)
171
+
172
+ Build and run the image locally:
173
+
174
+ ```bash
175
+ docker build -t postgres-to-r2-backup .
176
+ docker run --env-file .env postgres-to-r2-backup
177
+ ```
178
+
179
+ > Ensure the container is allowed to run continuously when not using an external cron scheduler.
180
+
181
+ > All scheduling uses **UTC** by default (e.g. Malaysia UTC+8 → set `BACKUP_TIME=16:00` for midnight).
182
+
183
+ ### Run from Prebuilt Docker Image
184
+
185
+ If you downloaded a prebuilt Docker image archive (`.tar` or `.tar.gz`), you can run it without building locally:
186
+
187
+ ```bash
188
+ # Extract the archive (if compressed)
189
+ tar -xzf postgres-to-r2-backup_v1.0.6.tar.gz
190
+
191
+ # Load the image into Docker
192
+ docker load -i postgres-to-r2-backup_v1.0.6.tar
193
+
194
+ # Run the container
195
+ docker run --env-file .env postgres-to-r2-backup:v1.0.6
196
+ ```
197
+
198
+ > Prebuilt images are architecture-specific (amd64 / arm64).
199
+
200
+ ---
201
+
202
+ ## 🧰 Using the CLI (Global Installation)
203
+
204
+ This project can also be used as a standalone CLI tool, installable via pip, in addition to running as a Railway or Docker service.
205
+
206
+ ### Install via pip
207
+
208
+ ```bash
209
+ pip install pg-r2-backup
210
+ ```
211
+
212
+ ### Requirements
213
+
214
+ - Python 3.9+
215
+ - PostgreSQL client tools (`pg_dump`) installed and available in PATH
216
+
217
+ ### Quick Start (CLI)
218
+
219
+ ```bash
220
+ mkdir backups
221
+ cd backups
222
+
223
+ pg-r2-backup init # creates .env from .env.example
224
+ pg-r2-backup doctor # checks environment and dependencies
225
+ pg-r2-backup run # runs a backup immediately
226
+ ```
227
+
228
+ ### CLI Commands
229
+
230
+ ```bash
231
+ pg-r2-backup run # Run backup immediately
232
+ pg-r2-backup doctor # Check environment & dependencies
233
+ pg-r2-backup config show # Show current configuration
234
+ pg-r2-backup init # Create .env from .env.example
235
+ pg-r2-backup schedule # Show scheduling examples
236
+ pg-r2-backup --version
237
+ ```
238
+
239
+ ### Environment Variable Resolution (CLI)
240
+
241
+ When running via the CLI, environment variables are resolved in the following order:
242
+
243
+ 1. A `.env` file in the current working directory (or parent directory)
244
+ 2. System environment variables
245
+
246
+ This allows different folders to maintain separate backup configurations.
247
+
248
+ ### Local Backup Behavior (CLI)
249
+
250
+ By default, pg-r2-backup deletes the local backup file after a successful upload.
251
+
252
+ To keep a local copy (recommended for local machines, VPS, or NAS):
253
+
254
+ KEEP_LOCAL_BACKUP=true
255
+
256
+ > Not recommended on PaaS platforms (Railway, Fly.io, Render, Heroku, etc.)
257
+ > due to ephemeral filesystems.
258
+
259
+ ### Scheduling Backups (CLI)
260
+
261
+ The CLI does not run a background scheduler. Use your operating system or platform scheduler instead.
262
+
263
+ **Linux / macOS (cron)**
264
+
265
+ ```bash
266
+ 0 0 * * * pg-r2-backup run
267
+ ```
268
+
269
+ **Windows (Task Scheduler)**
270
+
271
+ - Program: `pg-r2-backup`
272
+ - Arguments: `run`
273
+ - Start in: folder containing `.env` (working directory)
274
+
275
+ **Railway / Docker**
276
+
277
+ Use the platform's built-in scheduler (recommended).
278
+
279
+ 💡 **Tip**
280
+ Run `pg-r2-backup schedule` at any time to see scheduling examples.
281
+
282
+ ---
283
+
284
+ ## 🔐 Security
285
+
286
+ - **Do not expose PostgreSQL directly to the public internet.**
287
+ If your database is not on a private network, use a secure tunnel instead.
288
+
289
+ - **Recommended: Cloudflare Tunnel**
290
+ When using a public database URL, it is strongly recommended to connect via a secure tunnel such as **Cloudflare Tunnel** rather than opening database ports.
291
+
292
+ - **Protect credentials**
293
+ Store all secrets (database URLs, R2 keys, encryption passwords) using environment variables.
294
+ Never commit `.env` files to version control.
295
+
296
+ - **Encrypted backups (optional)**
297
+ Set `BACKUP_PASSWORD` to enable encrypted backups using 7z before uploading to S3-compatible storage.
298
+
299
+ - **Least privilege access**
300
+ Use a PostgreSQL user with read-only access where possible, and restrict R2 credentials to the required bucket only.
301
+
302
+ ---
303
+
304
+ ## 🛠 Development & Contributions
305
+
306
+ Fork this repository **only if you plan to**:
307
+
308
+ - Modify the backup logic
309
+ - Add features or integrations
310
+ - Submit pull requests
311
+ - Run locally for development
312
+
313
+ ---
314
+
315
+ ## ❓ FAQ
316
+
317
+ **Why only DATABASE_URL?**
318
+ This matches how most modern platforms expose PostgreSQL credentials.
319
+ Support for separate DB variables may be added if there is demand.
320
+
321
+ ## 📜 License
322
+
323
+ This project is open source under the **MIT License**.
324
+
325
+ You are free to use, modify, and distribute it with attribution.
@@ -0,0 +1,8 @@
1
+ main.py,sha256=k94YZEWRSfmFVDLnVlYaVfkT63fj3LIQcX3iCl_6A-U,6116
2
+ cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ cli/cli.py,sha256=p9mrc4mZX4Z6G-DozJpaKze-7ATPHjAa7EC8z-Gdhw0,4864
4
+ pg_r2_backup-1.0.6.dist-info/METADATA,sha256=UXZ3f2hLn2sEA12X6yc_4g7rwMHNGp_ujlTo56r7xb0,10422
5
+ pg_r2_backup-1.0.6.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
6
+ pg_r2_backup-1.0.6.dist-info/entry_points.txt,sha256=wmQ319oWNOZrGsNdHJXkAvdUs2fy8JbMez_121wVjWw,46
7
+ pg_r2_backup-1.0.6.dist-info/top_level.txt,sha256=Ram48mxeKPIx8LspvtoTGxibGqWT215h9M2iGFcJ7eI,9
8
+ pg_r2_backup-1.0.6.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.10.2)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ pg-r2-backup = cli.cli:main
@@ -0,0 +1,2 @@
1
+ cli
2
+ main