@fishawack/lab-env 5.4.0 → 5.5.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/cli.js CHANGED
@@ -54,6 +54,7 @@ const args = hideBin(process.argv);
54
54
  "start",
55
55
  "setup",
56
56
  "test",
57
+ "scan",
57
58
  "production",
58
59
  "run",
59
60
  "connect",
@@ -1,9 +1,37 @@
1
1
  const execSync = require("child_process").execSync;
2
2
  const fs = require("fs-extra");
3
+ const path = require("path");
4
+ const { isEqual } = require("lodash");
3
5
  const utilities = require("./create/libs/utilities");
4
6
  const _ = require("../globals.js");
5
7
  const { getConfigurations, findRepository } = require("../hub.js");
6
8
  const glob = require("glob");
9
+ const { pullS3 } = require("./helpers/content-pull.js");
10
+ const { pullRequests } = require("./helpers/content-request.js");
11
+
12
+ const SNAPSHOT_PATH = ".tmp/content.json";
13
+
14
+ function detectConfigChange(currentContent) {
15
+ let prev = null;
16
+ try {
17
+ prev = JSON.parse(fs.readFileSync(SNAPSHOT_PATH, { encoding: "utf8" }));
18
+ } catch {
19
+ // No previous snapshot
20
+ }
21
+
22
+ if (prev && !isEqual(prev, currentContent)) {
23
+ return prev;
24
+ }
25
+
26
+ return null;
27
+ }
28
+
29
+ function saveConfigSnapshot(content) {
30
+ fs.mkdirpSync(path.dirname(SNAPSHOT_PATH));
31
+ fs.writeFileSync(SNAPSHOT_PATH, JSON.stringify(content), {
32
+ encoding: "utf8",
33
+ });
34
+ }
7
35
 
8
36
  module.exports = [
9
37
  "content",
@@ -34,9 +62,95 @@ module.exports = [
34
62
  "title",
35
63
  ),
36
64
  );
37
- } else {
65
+ } else if (_.pkg?.scripts?.content) {
66
+ // Fallback: project has its own content script, delegate to core
67
+ console.log(
68
+ utilities.colorize(
69
+ "Project has a content script in package.json, falling back to core...",
70
+ "warning",
71
+ ),
72
+ );
38
73
  _.command("core", `npm run content`);
74
+ } else {
75
+ const contentConfig = _.coreConfig?.attributes?.content;
76
+ const srcBase = _.coreConfig?.attributes?.src || "_Build";
77
+
78
+ if (!contentConfig || contentConfig.length === 0) {
79
+ console.log(
80
+ utilities.colorize(
81
+ "No content config found. Skipping...",
82
+ "warning",
83
+ ),
84
+ );
85
+ } else {
86
+ // Detect config changes and clean stale content
87
+ const prevContent = detectConfigChange(contentConfig);
88
+ if (prevContent) {
89
+ console.log(
90
+ utilities.colorize(
91
+ "Content config has changed. Removing existing content...",
92
+ "warning",
93
+ ),
94
+ );
95
+ fs.removeSync(`${srcBase}/content`);
96
+ prevContent
97
+ .filter((d) => d.saveTo)
98
+ .forEach((d) => fs.removeSync(d.saveTo));
99
+ }
100
+
101
+ // content:pull — S3-based content
102
+ for (const [i, d] of contentConfig.entries()) {
103
+ if (d.location) {
104
+ if (!d["aws-s3"]) {
105
+ console.log(
106
+ utilities.colorize(
107
+ `Skipping "${d.location}" — only aws-s3 protocol is supported. FTP/SSH/LFTP have been removed.`,
108
+ "warning",
109
+ ),
110
+ );
111
+ continue;
112
+ }
113
+
114
+ const saveTo =
115
+ d.saveTo ||
116
+ `${srcBase}/content/${d.key || `content-${i}`}`;
117
+
118
+ console.log(
119
+ utilities.colorize(
120
+ `Pulling content from: ${d.location}`,
121
+ "title",
122
+ ),
123
+ );
124
+
125
+ await pullS3(d, saveTo);
126
+
127
+ console.log(
128
+ utilities.colorize(
129
+ `Content pulled from: ${d.location}`,
130
+ "success",
131
+ ),
132
+ );
133
+ }
134
+ }
135
+
136
+ // content:request — HTTP API-based content
137
+ const requestItems = contentConfig.filter((d) => d.url);
138
+ if (requestItems.length > 0) {
139
+ console.log(
140
+ utilities.colorize(
141
+ "Fetching content from API endpoints...",
142
+ "title",
143
+ ),
144
+ );
145
+ await pullRequests(contentConfig, srcBase);
146
+ }
147
+
148
+ saveConfigSnapshot(contentConfig);
149
+ }
150
+ }
39
151
 
152
+ // Hub sync runs for both fallback and native paths
153
+ if (!argv.init) {
40
154
  try {
41
155
  if (process.env.HUB_URL) {
42
156
  console.log(`Syncing branch configurations from Hub...`);
@@ -122,7 +122,7 @@ module.exports = [
122
122
  name: "content-security-policy",
123
123
  message: "content-security-policy header value:",
124
124
  default:
125
- "default-src 'self' https: data: 'unsafe-inline';",
125
+ "default-src 'self' https: data: 'unsafe-inline' blob:;",
126
126
  validate: (input) => !!input.length,
127
127
  },
128
128
  {
@@ -45,7 +45,7 @@ function handler(event) {
45
45
  value: "max-age=31536000; includeSubDomains",
46
46
  },
47
47
  "content-security-policy": {
48
- value: "default-src 'self' https: data: 'unsafe-inline';",
48
+ value: "default-src 'self' https: data: 'unsafe-inline' blob:;",
49
49
  },
50
50
  "x-content-type-options": { value: "nosniff" },
51
51
  "x-frame-options": { value: "sameorigin" },
@@ -18,7 +18,7 @@ function handler(event) {
18
18
  value: "max-age=31536000; includeSubDomains",
19
19
  };
20
20
  headers["content-security-policy"] = {
21
- value: "default-src 'self' https: data: 'unsafe-inline';",
21
+ value: "default-src 'self' https: data: 'unsafe-inline' blob:;",
22
22
  };
23
23
  headers["x-content-type-options"] = { value: "nosniff" };
24
24
  headers["x-frame-options"] = { value: "sameorigin" };
@@ -142,6 +142,10 @@ module.exports.templates = [
142
142
  name: "boilerplate-adonis",
143
143
  type: "boilerplate",
144
144
  },
145
+ {
146
+ name: "boilerplate-python",
147
+ type: "boilerplate",
148
+ },
145
149
  {
146
150
  name: "sprinkle-base",
147
151
  type: "framework",
@@ -195,7 +195,7 @@ module.exports.createCloudFrontFunction = async (name, fn, config) => {
195
195
  const client = new CloudFrontClient({});
196
196
 
197
197
  let FunctionConfig = {
198
- Comment: `lab-env provisioned cloudfront function for project ${name} using code snippet ${fn}.js`,
198
+ Comment: `${name} - ${fn}.js`,
199
199
  Runtime: `cloudfront-js-1.0`,
200
200
  };
201
201
 
@@ -1,5 +1,5 @@
1
1
  Header set X-Content-Type-Options "nosniff"
2
- Header set Content-Security-Policy "default-src 'self' https: data: 'unsafe-inline' 'unsafe-eval';"
2
+ Header set Content-Security-Policy "default-src 'self' https: data: 'unsafe-inline' 'unsafe-eval' blob:;"
3
3
  Header set X-Frame-Options 'sameorigin'
4
4
  Header set Strict-Transport-Security "max-age=31536000; includeSubDomains"
5
5
 
@@ -1,5 +1,5 @@
1
1
  Header set X-Content-Type-Options "nosniff"
2
- Header set Content-Security-Policy "default-src 'self' https: data: 'unsafe-inline';"
2
+ Header set Content-Security-Policy "default-src 'self' https: data: 'unsafe-inline' blob:;"
3
3
  Header set X-Frame-Options 'sameorigin'
4
4
  Header set Strict-Transport-Security "max-age=31536000; includeSubDomains"
5
5
 
@@ -1,4 +1,4 @@
1
1
  add_header X-Content-Type-Options "nosniff";
2
- add_header Content-Security-Policy "default-src 'self' https: data: 'unsafe-inline';";
2
+ add_header Content-Security-Policy "default-src 'self' https: data: 'unsafe-inline' blob:;";
3
3
  add_header X-Frame-Options 'sameorigin';
4
4
  add_header Strict-Transport-Security "max-age=31536000; includeSubDomains";
@@ -0,0 +1,209 @@
1
+ const {
2
+ S3Client,
3
+ ListObjectsV2Command,
4
+ GetObjectCommand,
5
+ PutObjectCommand,
6
+ } = require("@aws-sdk/client-s3");
7
+ const { fromIni } = require("@aws-sdk/credential-providers");
8
+ const fs = require("fs-extra");
9
+ const path = require("path");
10
+ const { colorize } = require("../create/libs/utilities");
11
+
12
+ function createClient(profile) {
13
+ return new S3Client({
14
+ region: process.env.AWS_REGION || "us-east-1",
15
+ credentials: fromIni({ profile }),
16
+ });
17
+ }
18
+
19
+ function parseBucketAndPrefix(location) {
20
+ const slashIndex = location.indexOf("/");
21
+ if (slashIndex === -1) {
22
+ return { Bucket: location, Prefix: "" };
23
+ }
24
+ return {
25
+ Bucket: location.substring(0, slashIndex),
26
+ Prefix: location.substring(slashIndex + 1),
27
+ };
28
+ }
29
+
30
+ async function listRemoteObjects(client, Bucket, Prefix) {
31
+ const objects = [];
32
+ let ContinuationToken = null;
33
+
34
+ do {
35
+ const res = await client.send(
36
+ new ListObjectsV2Command({
37
+ Bucket,
38
+ Prefix,
39
+ ContinuationToken,
40
+ }),
41
+ );
42
+
43
+ if (res.Contents) {
44
+ for (const obj of res.Contents) {
45
+ objects.push({
46
+ Key: obj.Key,
47
+ LastModified: obj.LastModified,
48
+ Size: obj.Size,
49
+ });
50
+ }
51
+ }
52
+
53
+ ContinuationToken = res.NextContinuationToken;
54
+ } while (ContinuationToken);
55
+
56
+ return objects;
57
+ }
58
+
59
+ function getLocalFiles(dir) {
60
+ const files = [];
61
+
62
+ if (!fs.existsSync(dir)) {
63
+ return files;
64
+ }
65
+
66
+ const walk = (currentDir) => {
67
+ for (const entry of fs.readdirSync(currentDir, {
68
+ withFileTypes: true,
69
+ })) {
70
+ const fullPath = path.join(currentDir, entry.name);
71
+ if (entry.isDirectory()) {
72
+ walk(fullPath);
73
+ } else if (entry.isFile()) {
74
+ const stat = fs.statSync(fullPath);
75
+ files.push({
76
+ path: fullPath,
77
+ relativePath: path.relative(dir, fullPath),
78
+ mtime: stat.mtime,
79
+ size: stat.size,
80
+ });
81
+ }
82
+ }
83
+ };
84
+
85
+ walk(dir);
86
+ return files;
87
+ }
88
+
89
+ async function uploadLocalToS3(client, Bucket, Prefix, saveTo) {
90
+ const localFiles = getLocalFiles(saveTo);
91
+
92
+ if (localFiles.length === 0) {
93
+ return;
94
+ }
95
+
96
+ const remoteObjects = await listRemoteObjects(client, Bucket, Prefix);
97
+ const remoteMap = new Map();
98
+ for (const obj of remoteObjects) {
99
+ const relKey = Prefix ? obj.Key.substring(Prefix.length + 1) : obj.Key;
100
+ remoteMap.set(relKey, obj);
101
+ }
102
+
103
+ let uploaded = 0;
104
+
105
+ for (const local of localFiles) {
106
+ const remoteKey = Prefix
107
+ ? `${Prefix}/${local.relativePath}`
108
+ : local.relativePath;
109
+ const remote = remoteMap.get(local.relativePath);
110
+
111
+ // rclone copy --update: skip if remote is newer or equal
112
+ if (remote && remote.LastModified >= local.mtime) {
113
+ continue;
114
+ }
115
+
116
+ const body = fs.readFileSync(local.path);
117
+
118
+ await client.send(
119
+ new PutObjectCommand({
120
+ Bucket,
121
+ Key: remoteKey,
122
+ Body: body,
123
+ }),
124
+ );
125
+
126
+ uploaded++;
127
+ }
128
+
129
+ if (uploaded > 0) {
130
+ console.log(
131
+ colorize(` Uploaded ${uploaded} file(s) to S3`, "success"),
132
+ );
133
+ }
134
+ }
135
+
136
+ async function downloadS3ToLocal(client, Bucket, Prefix, saveTo) {
137
+ const remoteObjects = await listRemoteObjects(client, Bucket, Prefix);
138
+ const localFiles = getLocalFiles(saveTo);
139
+ const localMap = new Map();
140
+
141
+ for (const local of localFiles) {
142
+ localMap.set(local.relativePath, local);
143
+ }
144
+
145
+ let downloaded = 0;
146
+
147
+ for (const obj of remoteObjects) {
148
+ // Skip "directory" markers
149
+ if (obj.Key.endsWith("/")) {
150
+ continue;
151
+ }
152
+
153
+ const relPath = Prefix ? obj.Key.substring(Prefix.length + 1) : obj.Key;
154
+
155
+ if (!relPath) {
156
+ continue;
157
+ }
158
+
159
+ const localPath = path.join(saveTo, relPath);
160
+ const local = localMap.get(relPath);
161
+
162
+ // rclone copy --update: skip if local is newer or equal
163
+ if (local && local.mtime >= obj.LastModified) {
164
+ continue;
165
+ }
166
+
167
+ const res = await client.send(
168
+ new GetObjectCommand({ Bucket, Key: obj.Key }),
169
+ );
170
+
171
+ const chunks = [];
172
+ for await (const chunk of res.Body) {
173
+ chunks.push(chunk);
174
+ }
175
+
176
+ fs.mkdirpSync(path.dirname(localPath));
177
+ fs.writeFileSync(localPath, Buffer.concat(chunks));
178
+ downloaded++;
179
+ }
180
+
181
+ if (downloaded > 0) {
182
+ console.log(
183
+ colorize(` Downloaded ${downloaded} file(s) from S3`, "success"),
184
+ );
185
+ }
186
+ }
187
+
188
+ async function pullS3(contentItem, saveTo) {
189
+ const profile = contentItem["aws-s3"];
190
+ const client = createClient(profile);
191
+ const { Bucket, Prefix } = parseBucketAndPrefix(contentItem.location);
192
+
193
+ if (contentItem.sync) {
194
+ fs.mkdirpSync(saveTo);
195
+ await uploadLocalToS3(client, Bucket, Prefix, saveTo);
196
+ }
197
+
198
+ await downloadS3ToLocal(client, Bucket, Prefix, saveTo);
199
+ }
200
+
201
+ module.exports = {
202
+ pullS3,
203
+ parseBucketAndPrefix,
204
+ getLocalFiles,
205
+ listRemoteObjects,
206
+ uploadLocalToS3,
207
+ downloadS3ToLocal,
208
+ createClient,
209
+ };
@@ -0,0 +1,223 @@
1
+ const fs = require("fs-extra");
2
+ const path = require("path");
3
+ const glob = require("glob");
4
+ const { colorize } = require("../create/libs/utilities");
5
+
6
+ function urlJoin() {
7
+ return new URL(
8
+ path.join(...[].slice.call(arguments, 1)),
9
+ arguments[0],
10
+ ).toString();
11
+ }
12
+
13
+ async function image(src, options) {
14
+ let file = path.join(
15
+ options.saveTo,
16
+ "media",
17
+ src.replace(new RegExp(options.find), ""),
18
+ );
19
+
20
+ let resolvedSrc = src;
21
+
22
+ // Fix for Contentful pathing not being a real URL
23
+ if (options.path.indexOf("contentful") > -1) {
24
+ resolvedSrc = `https:${resolvedSrc}`;
25
+ }
26
+ try {
27
+ new URL(resolvedSrc);
28
+ } catch {
29
+ resolvedSrc = urlJoin(options.path, resolvedSrc);
30
+ }
31
+
32
+ fs.mkdirpSync(path.dirname(file));
33
+
34
+ try {
35
+ const res = await fetch(resolvedSrc);
36
+ console.log(
37
+ colorize(` Downloaded: ${path.basename(file)}`, "success"),
38
+ );
39
+ fs.writeFileSync(file, Buffer.from(await res.arrayBuffer()));
40
+ } catch (err) {
41
+ console.log(
42
+ colorize(
43
+ ` Failed to download: ${resolvedSrc} - ${err.message}`,
44
+ "warning",
45
+ ),
46
+ );
47
+ }
48
+ }
49
+
50
+ async function download(options) {
51
+ try {
52
+ const pLimit = (await import("p-limit")).default;
53
+ const limit = pLimit(5);
54
+
55
+ let arr = [];
56
+
57
+ glob.sync(
58
+ path.join(options.saveTo, options.bundle, `*.${options.ext}`),
59
+ ).forEach((endpoint) => {
60
+ const data = fs.readFileSync(endpoint, { encoding: "utf8" });
61
+
62
+ // Find all values between quotes
63
+ const matches = data.match(/(["'])(?:(?=(\\?))\2.)*?\1/g);
64
+ if (matches) {
65
+ matches.forEach((d) => {
66
+ let value = JSON.parse(d);
67
+
68
+ if (new RegExp(options.find).test(value)) {
69
+ arr.push(value);
70
+ }
71
+ });
72
+ }
73
+ });
74
+
75
+ // Make array of assets unique
76
+ arr = [...new Set(arr)];
77
+
78
+ await Promise.all(arr.map((d) => limit(() => image(d, options))));
79
+ } catch (e) {
80
+ console.log(
81
+ colorize(` Error downloading assets: ${e.message}`, "error"),
82
+ );
83
+ }
84
+ }
85
+
86
+ async function rewrite(options) {
87
+ console.log(colorize(` Rewriting json to use local paths`, "success"));
88
+
89
+ glob.sync(
90
+ path.join(options.saveTo, options.bundle, `*.${options.ext}`),
91
+ ).forEach((endpoint) => {
92
+ let data = fs.readFileSync(endpoint, { encoding: "utf8" });
93
+
94
+ // Find all values between quotes and rewrite matching URLs
95
+ data = data.replaceAll(/(["'])(?:(?=(\\?))\2.)*?\1/g, (d) => {
96
+ let value = JSON.parse(d);
97
+
98
+ if (new RegExp(options.find).test(value)) {
99
+ return `"${path.join("media/content", value.replace(new RegExp(options.find), ""))}"`;
100
+ }
101
+
102
+ return d;
103
+ });
104
+
105
+ if (options.type === "contentful") {
106
+ data = JSON.stringify(JSON.parse(data)[0]);
107
+ }
108
+
109
+ fs.writeFileSync(endpoint, data, { encoding: "utf8" });
110
+ });
111
+ }
112
+
113
+ async function load(options) {
114
+ let data = [];
115
+ let index = 0;
116
+ let current;
117
+
118
+ do {
119
+ index++;
120
+ let uri = "";
121
+
122
+ if (options.type === "contentful") {
123
+ uri = urlJoin(
124
+ options.path,
125
+ `${options.api}${options.endpoint}&skip=${(index - 1) * 100}`,
126
+ );
127
+ } else {
128
+ uri = urlJoin(
129
+ options.path,
130
+ options.api,
131
+ `${options.endpoint}?per_page=1&page=${index}`,
132
+ );
133
+ }
134
+
135
+ const res = await fetch(uri);
136
+ const json = await res.json();
137
+
138
+ data = data.concat(json);
139
+
140
+ if (options.type === "contentful") {
141
+ current = Math.ceil(json.total / 100);
142
+ } else {
143
+ current = +res.headers.get("x-wp-totalpages");
144
+ }
145
+ } while (current && current !== index);
146
+
147
+ console.log(colorize(` Downloaded: ${options.endpoint}`, "success"));
148
+
149
+ const file = path.join(
150
+ options.saveTo,
151
+ options.bundle,
152
+ `${options.endpoint}.${options.ext}`,
153
+ );
154
+
155
+ fs.mkdirpSync(path.dirname(file));
156
+ fs.writeFileSync(file, JSON.stringify(data));
157
+ }
158
+
159
+ async function pullRequests(contentItems, srcBase) {
160
+ const pLimit = (await import("p-limit")).default;
161
+ const limit = pLimit(5);
162
+
163
+ // Load endpoints
164
+ const loadPromises = contentItems.flatMap((d, i) => {
165
+ if (!d.url) return [];
166
+ return d.endpoints.map((endpoint) =>
167
+ limit(() =>
168
+ load({
169
+ path: d.url,
170
+ api: d.api || "/wp-json/wp/v2/",
171
+ endpoint,
172
+ type: d.type || "wp",
173
+ ext: d.ext || "json",
174
+ saveTo:
175
+ d.saveTo ||
176
+ `${srcBase}/content/${d.key || `content-${i}`}`,
177
+ bundle: d.bundle ? "media/" : "",
178
+ }),
179
+ ),
180
+ );
181
+ });
182
+ await Promise.all(loadPromises);
183
+
184
+ // Download assets referenced in json files
185
+ await Promise.all(
186
+ contentItems.map((d, i) => {
187
+ if (d.url && d.find !== null) {
188
+ return limit(() =>
189
+ download({
190
+ path: d.url,
191
+ ext: d.ext || "json",
192
+ saveTo:
193
+ d.saveTo ||
194
+ `${srcBase}/content/${d.key || `content-${i}`}`,
195
+ bundle: d.bundle ? "media/" : "",
196
+ find: d.find || `^https.*/wp-content/uploads`,
197
+ }),
198
+ );
199
+ }
200
+ }),
201
+ );
202
+
203
+ // Rewrite json files to use local paths
204
+ await Promise.all(
205
+ contentItems.map((d, i) => {
206
+ if (d.url && d.find !== null) {
207
+ return limit(() =>
208
+ rewrite({
209
+ ext: d.ext || "json",
210
+ type: d.type || "wp",
211
+ saveTo:
212
+ d.saveTo ||
213
+ `${srcBase}/content/${d.key || `content-${i}`}`,
214
+ bundle: d.bundle ? "media/" : "",
215
+ find: d.find || `^https.*/wp-content/uploads`,
216
+ }),
217
+ );
218
+ }
219
+ }),
220
+ );
221
+ }
222
+
223
+ module.exports = { pullRequests, load, download, rewrite, image, urlJoin };