@mtldev514/retro-portfolio-engine 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +408 -0
- package/bin/cli.js +103 -0
- package/engine/admin/admin.css +720 -0
- package/engine/admin/admin.html +801 -0
- package/engine/admin/admin_api.py +230 -0
- package/engine/admin/scripts/backup.sh +116 -0
- package/engine/admin/scripts/config_loader.py +180 -0
- package/engine/admin/scripts/init.sh +141 -0
- package/engine/admin/scripts/manager.py +308 -0
- package/engine/admin/scripts/restore.sh +121 -0
- package/engine/admin/scripts/server.py +41 -0
- package/engine/admin/scripts/update.sh +321 -0
- package/engine/admin/scripts/validate_json.py +62 -0
- package/engine/fonts.css +37 -0
- package/engine/index.html +190 -0
- package/engine/js/config-loader.js +370 -0
- package/engine/js/config.js +173 -0
- package/engine/js/counter.js +17 -0
- package/engine/js/effects.js +97 -0
- package/engine/js/i18n.js +68 -0
- package/engine/js/init.js +107 -0
- package/engine/js/media.js +264 -0
- package/engine/js/render.js +282 -0
- package/engine/js/router.js +133 -0
- package/engine/js/sparkle.js +123 -0
- package/engine/js/themes.js +607 -0
- package/engine/style.css +2037 -0
- package/index.js +35 -0
- package/package.json +48 -0
- package/scripts/admin.js +67 -0
- package/scripts/build.js +142 -0
- package/scripts/init.js +237 -0
- package/scripts/post-install.js +16 -0
- package/scripts/serve.js +54 -0
- package/templates/user-portfolio/.github/workflows/deploy.yml +57 -0
- package/templates/user-portfolio/config/app.json +36 -0
- package/templates/user-portfolio/config/categories.json +241 -0
- package/templates/user-portfolio/config/languages.json +15 -0
- package/templates/user-portfolio/config/media-types.json +59 -0
- package/templates/user-portfolio/data/painting.json +3 -0
- package/templates/user-portfolio/data/projects.json +3 -0
- package/templates/user-portfolio/lang/en.json +114 -0
- package/templates/user-portfolio/lang/fr.json +114 -0
|
@@ -0,0 +1,308 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import glob
|
|
3
|
+
import json
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
import argparse
|
|
6
|
+
import re
|
|
7
|
+
import time
|
|
8
|
+
import mimetypes
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
import cloudinary
|
|
12
|
+
import cloudinary.uploader
|
|
13
|
+
import requests
|
|
14
|
+
from dotenv import load_dotenv
|
|
15
|
+
from config_loader import config
|
|
16
|
+
|
|
17
|
+
# Load environment variables
|
|
18
|
+
load_dotenv()
|
|
19
|
+
|
|
20
|
+
# Load configuration
|
|
21
|
+
config.load_all()
|
|
22
|
+
|
|
23
|
+
# Cloudinary Configuration
|
|
24
|
+
cloudinary.config(
|
|
25
|
+
cloud_name=os.getenv("CLOUDINARY_CLOUD_NAME"),
|
|
26
|
+
api_key=os.getenv("CLOUDINARY_API_KEY"),
|
|
27
|
+
api_secret=os.getenv("CLOUDINARY_API_SECRET"),
|
|
28
|
+
secure=True
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
# Load JSON_MAP from configuration
|
|
32
|
+
JSON_MAP = config.get_category_map()
|
|
33
|
+
|
|
34
|
+
# GitHub Releases Configuration (for audio/video that Cloudinary free plan rejects)
|
|
35
|
+
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
|
|
36
|
+
GITHUB_REPO = config.get_github_repo() # Returns "username/repoName"
|
|
37
|
+
github_config = config.get_github_config()
|
|
38
|
+
RELEASE_TAG = github_config.get('mediaReleaseTag', 'media')
|
|
39
|
+
GITHUB_UPLOAD_CATEGORIES = set(github_config.get('uploadCategories', ['music']))
|
|
40
|
+
|
|
41
|
+
MEDIA_CONTENT_TYPES = {
|
|
42
|
+
".mp3": "audio/mpeg",
|
|
43
|
+
".wav": "audio/wav",
|
|
44
|
+
".ogg": "audio/ogg",
|
|
45
|
+
".flac": "audio/flac",
|
|
46
|
+
".m4a": "audio/mp4",
|
|
47
|
+
".aac": "audio/aac",
|
|
48
|
+
".mp4": "video/mp4",
|
|
49
|
+
".webm": "video/webm",
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def get_or_create_release():
|
|
54
|
+
"""Get existing 'media' release or create one for hosting audio/video assets."""
|
|
55
|
+
headers = {
|
|
56
|
+
"Authorization": f"token {GITHUB_TOKEN}",
|
|
57
|
+
"Accept": "application/vnd.github.v3+json",
|
|
58
|
+
}
|
|
59
|
+
# Try to get existing release by tag
|
|
60
|
+
r = requests.get(
|
|
61
|
+
f"https://api.github.com/repos/{GITHUB_REPO}/releases/tags/{RELEASE_TAG}",
|
|
62
|
+
headers=headers,
|
|
63
|
+
)
|
|
64
|
+
if r.status_code == 200:
|
|
65
|
+
return r.json()
|
|
66
|
+
|
|
67
|
+
# Create a new release
|
|
68
|
+
print(f"Creating GitHub Release '{RELEASE_TAG}'...")
|
|
69
|
+
r = requests.post(
|
|
70
|
+
f"https://api.github.com/repos/{GITHUB_REPO}/releases",
|
|
71
|
+
headers=headers,
|
|
72
|
+
json={
|
|
73
|
+
"tag_name": RELEASE_TAG,
|
|
74
|
+
"name": "Media Assets",
|
|
75
|
+
"body": "Audio and video files for the portfolio.",
|
|
76
|
+
"draft": False,
|
|
77
|
+
"prerelease": False,
|
|
78
|
+
},
|
|
79
|
+
)
|
|
80
|
+
r.raise_for_status()
|
|
81
|
+
return r.json()
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def upload_to_github_release(file_path, filename):
|
|
85
|
+
"""Upload a file as an asset to the 'media' GitHub Release.
|
|
86
|
+
Returns the browser_download_url for the uploaded asset."""
|
|
87
|
+
release = get_or_create_release()
|
|
88
|
+
upload_url = release["upload_url"].replace("{?name,label}", "")
|
|
89
|
+
|
|
90
|
+
# Determine content type from extension
|
|
91
|
+
ext = os.path.splitext(filename)[1].lower()
|
|
92
|
+
content_type = MEDIA_CONTENT_TYPES.get(ext)
|
|
93
|
+
if not content_type:
|
|
94
|
+
content_type, _ = mimetypes.guess_type(filename)
|
|
95
|
+
if not content_type:
|
|
96
|
+
content_type = "application/octet-stream"
|
|
97
|
+
|
|
98
|
+
# Prepend timestamp to avoid duplicate filename collisions
|
|
99
|
+
unique_filename = f"{int(time.time())}_{filename}"
|
|
100
|
+
|
|
101
|
+
headers = {
|
|
102
|
+
"Authorization": f"token {GITHUB_TOKEN}",
|
|
103
|
+
"Content-Type": content_type,
|
|
104
|
+
"Accept": "application/vnd.github.v3+json",
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
print(f"Uploading asset '{unique_filename}' ({content_type})...")
|
|
108
|
+
with open(file_path, "rb") as f:
|
|
109
|
+
r = requests.post(
|
|
110
|
+
f"{upload_url}?name={unique_filename}",
|
|
111
|
+
headers=headers,
|
|
112
|
+
data=f,
|
|
113
|
+
)
|
|
114
|
+
r.raise_for_status()
|
|
115
|
+
return r.json()["browser_download_url"]
|
|
116
|
+
|
|
117
|
+
def upload_single(file_path, category):
|
|
118
|
+
"""Upload a single file to the appropriate service and return its URL."""
|
|
119
|
+
if category in GITHUB_UPLOAD_CATEGORIES and GITHUB_TOKEN:
|
|
120
|
+
print(f"Uploading {file_path} to GitHub Releases...")
|
|
121
|
+
original_filename = os.path.basename(file_path)
|
|
122
|
+
url = upload_to_github_release(file_path, original_filename)
|
|
123
|
+
else:
|
|
124
|
+
resource_type = "auto"
|
|
125
|
+
if category == "video":
|
|
126
|
+
resource_type = "video"
|
|
127
|
+
print(f"Uploading {file_path} to Cloudinary...")
|
|
128
|
+
upload_result = cloudinary.uploader.upload(
|
|
129
|
+
file_path,
|
|
130
|
+
folder=f"portfolio/{category}",
|
|
131
|
+
resource_type=resource_type
|
|
132
|
+
)
|
|
133
|
+
url = upload_result.get("secure_url")
|
|
134
|
+
print(f"Success! URL: {url}")
|
|
135
|
+
return url
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def upload_and_save(file_path, title, category, medium=None, genre=None, description=None, created=None, pile=False):
|
|
139
|
+
"""Core logic to upload file(s) and update JSON database.
|
|
140
|
+
When pile=True and file_path is a directory, all images inside are uploaded
|
|
141
|
+
as a single gallery item (first image = cover, rest = gallery array)."""
|
|
142
|
+
print(f"--- Processing: {title} ({category}) ---")
|
|
143
|
+
|
|
144
|
+
gallery_urls = []
|
|
145
|
+
|
|
146
|
+
if pile and os.path.isdir(file_path):
|
|
147
|
+
# Pile mode: upload all images in the directory
|
|
148
|
+
IMAGE_EXTS = ("*.jpg", "*.jpeg", "*.png", "*.webp", "*.gif", "*.tiff", "*.bmp")
|
|
149
|
+
files = []
|
|
150
|
+
for ext in IMAGE_EXTS:
|
|
151
|
+
files.extend(glob.glob(os.path.join(file_path, ext)))
|
|
152
|
+
files.extend(glob.glob(os.path.join(file_path, ext.upper())))
|
|
153
|
+
files = sorted(set(files)) # deduplicate and sort alphabetically
|
|
154
|
+
|
|
155
|
+
if not files:
|
|
156
|
+
raise ValueError(f"No image files found in '{file_path}'")
|
|
157
|
+
|
|
158
|
+
print(f"Pile mode: found {len(files)} images")
|
|
159
|
+
urls = []
|
|
160
|
+
for f in files:
|
|
161
|
+
urls.append(upload_single(f, category))
|
|
162
|
+
|
|
163
|
+
media_url = urls[0] # first image is the cover
|
|
164
|
+
gallery_urls = urls[1:] # rest go into gallery
|
|
165
|
+
else:
|
|
166
|
+
# Single file upload
|
|
167
|
+
media_url = upload_single(file_path, category)
|
|
168
|
+
|
|
169
|
+
# Determine JSON file
|
|
170
|
+
json_path = JSON_MAP.get(category)
|
|
171
|
+
if not json_path:
|
|
172
|
+
raise ValueError(f"Category '{category}' is invalid.")
|
|
173
|
+
|
|
174
|
+
# Load existing data
|
|
175
|
+
if os.path.exists(json_path):
|
|
176
|
+
try:
|
|
177
|
+
with open(json_path, "r", encoding="utf-8") as f:
|
|
178
|
+
content = f.read().strip()
|
|
179
|
+
data = json.loads(content) if content else []
|
|
180
|
+
except json.JSONDecodeError:
|
|
181
|
+
data = []
|
|
182
|
+
else:
|
|
183
|
+
data = []
|
|
184
|
+
|
|
185
|
+
# Create new entry with multilingual fields
|
|
186
|
+
def make_multilingual(value):
|
|
187
|
+
"""Wrap a single-language value as a multilingual object."""
|
|
188
|
+
if not value:
|
|
189
|
+
return None
|
|
190
|
+
return config.create_multilingual_object(value)
|
|
191
|
+
|
|
192
|
+
new_entry = {
|
|
193
|
+
"id": f"{category}_{int(datetime.now().timestamp())}",
|
|
194
|
+
"title": make_multilingual(title),
|
|
195
|
+
"url": media_url,
|
|
196
|
+
"date": datetime.now().strftime("%Y-%m-%d"),
|
|
197
|
+
"created": created if created else datetime.now().strftime("%Y-%m-%d")
|
|
198
|
+
}
|
|
199
|
+
if gallery_urls:
|
|
200
|
+
new_entry["gallery"] = gallery_urls
|
|
201
|
+
if medium:
|
|
202
|
+
new_entry["medium"] = make_multilingual(medium)
|
|
203
|
+
if genre:
|
|
204
|
+
new_entry["genre"] = make_multilingual(genre)
|
|
205
|
+
if description:
|
|
206
|
+
new_entry["description"] = make_multilingual(description)
|
|
207
|
+
|
|
208
|
+
data.append(new_entry)
|
|
209
|
+
|
|
210
|
+
# Save back to JSON
|
|
211
|
+
with open(json_path, "w", encoding="utf-8") as f:
|
|
212
|
+
json.dump(data, f, indent=4, ensure_ascii=False)
|
|
213
|
+
print(f"Updated {json_path}")
|
|
214
|
+
|
|
215
|
+
# Update "Last Updated" globally
|
|
216
|
+
update_site_timestamp()
|
|
217
|
+
return new_entry
|
|
218
|
+
|
|
219
|
+
def update_site_timestamp():
|
|
220
|
+
"""Updates the 'Last Updated' string in all HTML files."""
|
|
221
|
+
now = datetime.now().strftime("%d %b %Y")
|
|
222
|
+
|
|
223
|
+
# Check both current directory and content root for index.html
|
|
224
|
+
possible_files = [
|
|
225
|
+
"index.html",
|
|
226
|
+
config.content_root / "index.html"
|
|
227
|
+
]
|
|
228
|
+
|
|
229
|
+
for file_path in possible_files:
|
|
230
|
+
path = Path(file_path)
|
|
231
|
+
if path.exists():
|
|
232
|
+
try:
|
|
233
|
+
with open(path, "r", encoding="utf-8") as f:
|
|
234
|
+
content = f.read()
|
|
235
|
+
|
|
236
|
+
# Check if it has the timestamp span
|
|
237
|
+
if 'Last Updated:</span>' in content:
|
|
238
|
+
new_content = re.sub(r'Last Updated:</span> \d{1,2} \w{3} \d{4}', f'Last Updated:</span> {now}', content)
|
|
239
|
+
|
|
240
|
+
with open(path, "w", encoding="utf-8") as f:
|
|
241
|
+
f.write(new_content)
|
|
242
|
+
print(f"Updated timestamp in {path}")
|
|
243
|
+
except Exception as e:
|
|
244
|
+
print(f"Failed to update timestamp in {path}: {e}")
|
|
245
|
+
|
|
246
|
+
def save_from_url(url, title, category, medium=None, genre=None, description=None, created=None):
|
|
247
|
+
"""Save a media entry using a direct URL (no Cloudinary upload).
|
|
248
|
+
Used for audio files hosted on Internet Archive, GitHub Releases, etc."""
|
|
249
|
+
print(f"--- Saving from URL: {title} ({category}) ---")
|
|
250
|
+
|
|
251
|
+
json_path = JSON_MAP.get(category)
|
|
252
|
+
if not json_path:
|
|
253
|
+
raise ValueError(f"Category '{category}' is invalid.")
|
|
254
|
+
|
|
255
|
+
# Load existing data
|
|
256
|
+
if os.path.exists(json_path):
|
|
257
|
+
try:
|
|
258
|
+
with open(json_path, "r", encoding="utf-8") as f:
|
|
259
|
+
content = f.read().strip()
|
|
260
|
+
data = json.loads(content) if content else []
|
|
261
|
+
except json.JSONDecodeError:
|
|
262
|
+
data = []
|
|
263
|
+
else:
|
|
264
|
+
data = []
|
|
265
|
+
|
|
266
|
+
def make_multilingual(value):
|
|
267
|
+
if not value:
|
|
268
|
+
return None
|
|
269
|
+
return {"en": value, "fr": value, "mx": value, "ht": value}
|
|
270
|
+
|
|
271
|
+
new_entry = {
|
|
272
|
+
"id": f"{category}_{int(datetime.now().timestamp())}",
|
|
273
|
+
"title": make_multilingual(title),
|
|
274
|
+
"url": url,
|
|
275
|
+
"date": datetime.now().strftime("%Y-%m-%d"),
|
|
276
|
+
"created": created if created else datetime.now().strftime("%Y-%m-%d")
|
|
277
|
+
}
|
|
278
|
+
if medium:
|
|
279
|
+
new_entry["medium"] = make_multilingual(medium)
|
|
280
|
+
if genre:
|
|
281
|
+
new_entry["genre"] = make_multilingual(genre)
|
|
282
|
+
if description:
|
|
283
|
+
new_entry["description"] = make_multilingual(description)
|
|
284
|
+
|
|
285
|
+
data.append(new_entry)
|
|
286
|
+
|
|
287
|
+
with open(json_path, "w", encoding="utf-8") as f:
|
|
288
|
+
json.dump(data, f, indent=4, ensure_ascii=False)
|
|
289
|
+
print(f"Updated {json_path}")
|
|
290
|
+
|
|
291
|
+
update_site_timestamp()
|
|
292
|
+
return new_entry
|
|
293
|
+
|
|
294
|
+
if __name__ == "__main__":
|
|
295
|
+
parser = argparse.ArgumentParser(description="Alex's Portfolio Content Manager")
|
|
296
|
+
parser.add_argument("--file", required=True, help="Path to the media file or directory (with --pile)")
|
|
297
|
+
parser.add_argument("--title", required=True, help="Title of the work")
|
|
298
|
+
parser.add_argument("--cat", required=True, choices=list(JSON_MAP.keys()), help="Category")
|
|
299
|
+
parser.add_argument("--medium", help="Medium (for art/sculpting)")
|
|
300
|
+
parser.add_argument("--genre", help="Genre (for music/video)")
|
|
301
|
+
parser.add_argument("--description", help="Description of the work")
|
|
302
|
+
parser.add_argument("--pile", action="store_true", help="Pile mode: upload all images in a directory as one gallery item")
|
|
303
|
+
|
|
304
|
+
args = parser.parse_args()
|
|
305
|
+
try:
|
|
306
|
+
upload_and_save(args.file, args.title, args.cat, args.medium, args.genre, args.description, pile=args.pile)
|
|
307
|
+
except Exception as e:
|
|
308
|
+
print(f"Error: {e}")
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
# Restore Personal Data
|
|
4
|
+
# Restores a timestamped backup of your personal configuration and content
|
|
5
|
+
|
|
6
|
+
set -e
|
|
7
|
+
|
|
8
|
+
echo ""
|
|
9
|
+
echo "═══════════════════════════════════════════════════════════"
|
|
10
|
+
echo " 📥 Restore Personal Data"
|
|
11
|
+
echo "═══════════════════════════════════════════════════════════"
|
|
12
|
+
echo ""
|
|
13
|
+
|
|
14
|
+
BACKUP_DIR=".backup-personal"
|
|
15
|
+
|
|
16
|
+
# List available backups
|
|
17
|
+
list_backups() {
|
|
18
|
+
if [ ! -d "$BACKUP_DIR" ] || [ -z "$(ls -A $BACKUP_DIR 2>/dev/null)" ]; then
|
|
19
|
+
echo "❌ No backups found in $BACKUP_DIR"
|
|
20
|
+
exit 1
|
|
21
|
+
fi
|
|
22
|
+
|
|
23
|
+
echo "Available backups:"
|
|
24
|
+
echo ""
|
|
25
|
+
ls -lt "$BACKUP_DIR" | grep "^d" | awk '{print " " $9}' | nl
|
|
26
|
+
echo ""
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
# Get backup to restore
|
|
30
|
+
if [ -z "$1" ]; then
|
|
31
|
+
list_backups
|
|
32
|
+
read -p "Enter backup timestamp (or 'latest' for most recent): " TIMESTAMP
|
|
33
|
+
else
|
|
34
|
+
TIMESTAMP="$1"
|
|
35
|
+
fi
|
|
36
|
+
|
|
37
|
+
# Handle 'latest' option
|
|
38
|
+
if [ "$TIMESTAMP" == "latest" ]; then
|
|
39
|
+
TIMESTAMP=$(ls -t "$BACKUP_DIR" | head -n 1)
|
|
40
|
+
echo "Using latest backup: $TIMESTAMP"
|
|
41
|
+
fi
|
|
42
|
+
|
|
43
|
+
BACKUP_PATH="$BACKUP_DIR/$TIMESTAMP"
|
|
44
|
+
|
|
45
|
+
# Verify backup exists
|
|
46
|
+
if [ ! -d "$BACKUP_PATH" ]; then
|
|
47
|
+
echo "❌ Backup not found: $BACKUP_PATH"
|
|
48
|
+
echo ""
|
|
49
|
+
list_backups
|
|
50
|
+
exit 1
|
|
51
|
+
fi
|
|
52
|
+
|
|
53
|
+
# Show backup info
|
|
54
|
+
if [ -f "$BACKUP_PATH/BACKUP_INFO.txt" ]; then
|
|
55
|
+
echo "Backup Information:"
|
|
56
|
+
echo "─────────────────────────────────────────────────────────"
|
|
57
|
+
cat "$BACKUP_PATH/BACKUP_INFO.txt"
|
|
58
|
+
echo "─────────────────────────────────────────────────────────"
|
|
59
|
+
echo ""
|
|
60
|
+
fi
|
|
61
|
+
|
|
62
|
+
# Confirm restoration
|
|
63
|
+
echo "⚠️ This will restore files from: $BACKUP_PATH"
|
|
64
|
+
echo ""
|
|
65
|
+
read -p "Continue with restoration? (y/N): " -n 1 -r
|
|
66
|
+
echo
|
|
67
|
+
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
|
68
|
+
echo "Aborted."
|
|
69
|
+
exit 0
|
|
70
|
+
fi
|
|
71
|
+
|
|
72
|
+
echo ""
|
|
73
|
+
echo "Restoring files..."
|
|
74
|
+
echo ""
|
|
75
|
+
|
|
76
|
+
# Restore config
|
|
77
|
+
if [ -d "$BACKUP_PATH/config" ]; then
|
|
78
|
+
cp -r "$BACKUP_PATH/config" ./
|
|
79
|
+
echo " ✓ Restored config/ ($(find config -type f | wc -l | xargs) files)"
|
|
80
|
+
else
|
|
81
|
+
echo " ⚠️ config/ not in backup"
|
|
82
|
+
fi
|
|
83
|
+
|
|
84
|
+
# Restore data
|
|
85
|
+
if [ -d "$BACKUP_PATH/data" ]; then
|
|
86
|
+
cp -r "$BACKUP_PATH/data" ./
|
|
87
|
+
echo " ✓ Restored data/ ($(find data -type f | wc -l | xargs) files)"
|
|
88
|
+
else
|
|
89
|
+
echo " ⚠️ data/ not in backup"
|
|
90
|
+
fi
|
|
91
|
+
|
|
92
|
+
# Restore lang
|
|
93
|
+
if [ -d "$BACKUP_PATH/lang" ]; then
|
|
94
|
+
cp -r "$BACKUP_PATH/lang" ./
|
|
95
|
+
echo " ✓ Restored lang/ ($(find lang -type f | wc -l | xargs) files)"
|
|
96
|
+
else
|
|
97
|
+
echo " ⚠️ lang/ not in backup"
|
|
98
|
+
fi
|
|
99
|
+
|
|
100
|
+
# Restore .env
|
|
101
|
+
if [ -f "$BACKUP_PATH/.env" ]; then
|
|
102
|
+
cp "$BACKUP_PATH/.env" ./
|
|
103
|
+
echo " ✓ Restored .env"
|
|
104
|
+
else
|
|
105
|
+
echo " ⚠️ .env not in backup"
|
|
106
|
+
fi
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
echo ""
|
|
110
|
+
echo "═══════════════════════════════════════════════════════════"
|
|
111
|
+
echo " ✅ Restoration Complete!"
|
|
112
|
+
echo "═══════════════════════════════════════════════════════════"
|
|
113
|
+
echo ""
|
|
114
|
+
echo "Your personal data has been restored from:"
|
|
115
|
+
echo " $BACKUP_PATH"
|
|
116
|
+
echo ""
|
|
117
|
+
echo "You can now:"
|
|
118
|
+
echo " 1. Verify your data: ls -la config/ data/ lang/"
|
|
119
|
+
echo " 2. Test locally: python3 -m http.server 8000"
|
|
120
|
+
echo " 3. Open in browser: open http://localhost:8000"
|
|
121
|
+
echo ""
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"""
|
|
2
|
+
SPA-aware dev server for the retro portfolio.
|
|
3
|
+
Serves static files normally, but falls back to index.html
|
|
4
|
+
for any .html route that doesn't exist on disk (SPA routing).
|
|
5
|
+
|
|
6
|
+
Usage: python3 server.py [port]
|
|
7
|
+
"""
|
|
8
|
+
import http.server
|
|
9
|
+
import os
|
|
10
|
+
import sys
|
|
11
|
+
|
|
12
|
+
PORT = int(sys.argv[1]) if len(sys.argv) > 1 else 8000
|
|
13
|
+
ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class SPAHandler(http.server.SimpleHTTPRequestHandler):
|
|
17
|
+
def __init__(self, *args, **kwargs):
|
|
18
|
+
super().__init__(*args, directory=ROOT, **kwargs)
|
|
19
|
+
|
|
20
|
+
def do_GET(self):
|
|
21
|
+
# Strip query string for file lookup
|
|
22
|
+
path = self.path.split('?')[0].split('#')[0]
|
|
23
|
+
|
|
24
|
+
# Build the filesystem path (relative to repo root)
|
|
25
|
+
fs_path = os.path.join(ROOT, path.lstrip('/'))
|
|
26
|
+
|
|
27
|
+
# If the file exists on disk, serve it normally
|
|
28
|
+
if os.path.isfile(fs_path):
|
|
29
|
+
return super().do_GET()
|
|
30
|
+
|
|
31
|
+
# For .html routes (or bare /) that don't exist, serve index.html
|
|
32
|
+
if path == '/' or path.endswith('.html'):
|
|
33
|
+
self.path = '/index.html'
|
|
34
|
+
return super().do_GET()
|
|
35
|
+
|
|
36
|
+
# Everything else: default behaviour (will 404 if missing)
|
|
37
|
+
return super().do_GET()
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
print(f'SPA dev server running on http://localhost:{PORT}')
|
|
41
|
+
http.server.HTTPServer(('', PORT), SPAHandler).serve_forever()
|