@awsless/awsless 0.0.303 → 0.0.304
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/app.json +1 -1
- package/dist/bin.js +137 -191
- package/dist/build-json-schema.js +7 -13
- package/dist/stack.json +1 -1
- package/package.json +11 -10
package/dist/bin.js
CHANGED
|
@@ -129,8 +129,8 @@ var findRootDir = async (path, configFiles, level = 5) => {
|
|
|
129
129
|
};
|
|
130
130
|
var fileExist = async (file) => {
|
|
131
131
|
try {
|
|
132
|
-
const
|
|
133
|
-
if (
|
|
132
|
+
const stat4 = await lstat(file);
|
|
133
|
+
if (stat4.isFile()) {
|
|
134
134
|
return true;
|
|
135
135
|
}
|
|
136
136
|
} catch (error) {
|
|
@@ -383,6 +383,8 @@ import { z } from "zod";
|
|
|
383
383
|
var ResourceIdSchema = z.string().min(3).max(24).regex(/^[a-z0-9\-]+$/i, "Invalid resource ID").transform((value) => paramCase(value));
|
|
384
384
|
|
|
385
385
|
// src/feature/function/schema.ts
|
|
386
|
+
import { days, minutes as minutes2, seconds } from "@awsless/duration";
|
|
387
|
+
import { gibibytes, mebibytes } from "@awsless/size";
|
|
386
388
|
import { z as z5 } from "zod";
|
|
387
389
|
|
|
388
390
|
// src/config/schema/duration.ts
|
|
@@ -439,8 +441,6 @@ var sizeMax = (max) => {
|
|
|
439
441
|
};
|
|
440
442
|
|
|
441
443
|
// src/feature/function/schema.ts
|
|
442
|
-
import { days, minutes as minutes2, seconds } from "@awsless/duration";
|
|
443
|
-
import { gibibytes, mebibytes } from "@awsless/size";
|
|
444
444
|
var MemorySizeSchema = SizeSchema.refine(sizeMin(mebibytes(128)), "Minimum memory size is 128 MB").refine(sizeMax(gibibytes(10)), "Maximum memory size is 10 GB").describe(
|
|
445
445
|
"The amount of memory available to the function at runtime. Increasing the function memory also increases its CPU allocation. The value can be any multiple of 1 MB. You can specify a size value from 128 MB to 10 GB."
|
|
446
446
|
);
|
|
@@ -451,9 +451,7 @@ var EphemeralStorageSizeSchema = SizeSchema.refine(
|
|
|
451
451
|
sizeMin(mebibytes(512)),
|
|
452
452
|
"Minimum ephemeral storage size is 512 MB"
|
|
453
453
|
).refine(sizeMax(gibibytes(10)), "Minimum ephemeral storage size is 10 GB").describe("The size of the function's /tmp directory. You can specify a size value from 512 MB to 10 GB.");
|
|
454
|
-
var ReservedConcurrentExecutionsSchema = z5.number().int().min(0).describe(
|
|
455
|
-
"The number of simultaneous executions to reserve for the function. You can specify a number from 0."
|
|
456
|
-
);
|
|
454
|
+
var ReservedConcurrentExecutionsSchema = z5.number().int().min(0).describe("The number of simultaneous executions to reserve for the function. You can specify a number from 0.");
|
|
457
455
|
var EnvironmentSchema = z5.record(z5.string(), z5.string()).optional().describe("Environment variable key-value pairs.");
|
|
458
456
|
var ArchitectureSchema = z5.enum(["x86_64", "arm64"]).describe("The instruction set architecture that the function supports.");
|
|
459
457
|
var RetryAttemptsSchema = z5.number().int().min(0).max(2).describe(
|
|
@@ -472,9 +470,7 @@ var PermissionSchema = z5.object({
|
|
|
472
470
|
resources: ResourcesSchema
|
|
473
471
|
});
|
|
474
472
|
var PermissionsSchema = z5.union([PermissionSchema.transform((v) => [v]), PermissionSchema.array()]).describe("Add IAM permissions to your function.");
|
|
475
|
-
var WarmSchema = z5.number().int().min(0).max(10).describe(
|
|
476
|
-
"Specify how many functions you want to warm up each 5 minutes. You can specify a number from 0 to 10."
|
|
477
|
-
);
|
|
473
|
+
var WarmSchema = z5.number().int().min(0).max(10).describe("Specify how many functions you want to warm up each 5 minutes. You can specify a number from 0 to 10.");
|
|
478
474
|
var VPCSchema = z5.boolean().describe("Put the function inside your global VPC.");
|
|
479
475
|
var MinifySchema = z5.boolean().describe("Minify the function code.");
|
|
480
476
|
var HandlerSchema = z5.string().describe("The name of the exported method within your code that Lambda calls to run your function.");
|
|
@@ -483,12 +479,12 @@ var DescriptionSchema = z5.string().describe("A description of the function.");
|
|
|
483
479
|
var LogRetentionSchema = DurationSchema.refine(
|
|
484
480
|
durationMin(days(0)),
|
|
485
481
|
"Minimum log retention is 0 day, which will disable logging."
|
|
486
|
-
);
|
|
482
|
+
).describe("The log retention duration.");
|
|
487
483
|
var LogSchema = z5.union([
|
|
488
484
|
z5.boolean().transform((enabled) => ({ retention: enabled ? days(7) : days(0) })),
|
|
489
485
|
LogRetentionSchema.transform((retention) => ({ retention })),
|
|
490
486
|
z5.object({
|
|
491
|
-
retention: LogRetentionSchema.
|
|
487
|
+
retention: LogRetentionSchema.optional(),
|
|
492
488
|
format: z5.enum(["text", "json"]).describe(
|
|
493
489
|
`The format in which Lambda sends your function's application and system logs to CloudWatch. Select between plain text and structured JSON.`
|
|
494
490
|
).optional(),
|
|
@@ -499,9 +495,7 @@ var LogSchema = z5.union([
|
|
|
499
495
|
"Set this property to filter the application logs for your function that Lambda sends to CloudWatch. Lambda only sends application logs at the selected level of detail and lower, where TRACE is the highest level and FATAL is the lowest."
|
|
500
496
|
).optional()
|
|
501
497
|
})
|
|
502
|
-
]).describe(
|
|
503
|
-
"Enable logging to a CloudWatch log group. Providing a duration value will set the log retention time."
|
|
504
|
-
);
|
|
498
|
+
]).describe("Enable logging to a CloudWatch log group. Providing a duration value will set the log retention time.");
|
|
505
499
|
var FunctionSchema = z5.union([
|
|
506
500
|
LocalFileSchema.transform((file) => ({
|
|
507
501
|
file
|
|
@@ -1951,8 +1945,9 @@ var formatLocalResourceName = (appName, stackName, ns, id, seperator = "--") =>
|
|
|
1951
1945
|
|
|
1952
1946
|
// src/feature/function/util.ts
|
|
1953
1947
|
import { Asset, aws as aws2 } from "@awsless/formation";
|
|
1948
|
+
import { generateFileHash } from "@awsless/ts-file-cache";
|
|
1954
1949
|
import deepmerge from "deepmerge";
|
|
1955
|
-
import { basename as
|
|
1950
|
+
import { basename as basename3, dirname as dirname6, extname as extname2 } from "path";
|
|
1956
1951
|
import { exec } from "promisify-child-process";
|
|
1957
1952
|
|
|
1958
1953
|
// src/build/index.ts
|
|
@@ -2007,7 +2002,7 @@ var writeCache = async (file, version, data) => {
|
|
|
2007
2002
|
var getBuildPath = (type, name, file) => {
|
|
2008
2003
|
return join6(directories.build, type, name, file);
|
|
2009
2004
|
};
|
|
2010
|
-
var build = (type, name, builder) => {
|
|
2005
|
+
var build = (type, name, builder, props) => {
|
|
2011
2006
|
return builder(async (version, callback) => {
|
|
2012
2007
|
const cacheFile = getBuildPath(type, name, "cache.json");
|
|
2013
2008
|
const cache = await readCache(cacheFile);
|
|
@@ -2030,7 +2025,7 @@ var build = (type, name, builder) => {
|
|
|
2030
2025
|
...data,
|
|
2031
2026
|
cached: false
|
|
2032
2027
|
};
|
|
2033
|
-
});
|
|
2028
|
+
}, props);
|
|
2034
2029
|
};
|
|
2035
2030
|
|
|
2036
2031
|
// src/util/byte-size.ts
|
|
@@ -2125,63 +2120,6 @@ var bundleTypeScript = async ({ format: format2 = "esm", minify = true, file })
|
|
|
2125
2120
|
};
|
|
2126
2121
|
};
|
|
2127
2122
|
|
|
2128
|
-
// src/feature/function/build/typescript/fingerprint.ts
|
|
2129
|
-
import { createHash as createHash2 } from "crypto";
|
|
2130
|
-
import { readFile as readFile4, readdir, stat as stat3 } from "fs/promises";
|
|
2131
|
-
import { basename as basename3, dirname as dirname6, extname as extname2, join as join7 } from "path";
|
|
2132
|
-
import parseStaticImports from "parse-static-imports";
|
|
2133
|
-
var extensions = ["js", "mjs", "jsx", "ts", "mts", "tsx"];
|
|
2134
|
-
var generateFileHashes = async (file, hashes) => {
|
|
2135
|
-
if (hashes.has(file)) {
|
|
2136
|
-
return;
|
|
2137
|
-
}
|
|
2138
|
-
const code = await readModuleFile(file);
|
|
2139
|
-
const deps = await findDependencies(file, code);
|
|
2140
|
-
const hash = createHash2("sha1").update(code).digest();
|
|
2141
|
-
hashes.set(file, hash);
|
|
2142
|
-
for (const dep of deps) {
|
|
2143
|
-
if (dep.startsWith("/")) {
|
|
2144
|
-
await generateFileHashes(dep, hashes);
|
|
2145
|
-
}
|
|
2146
|
-
}
|
|
2147
|
-
};
|
|
2148
|
-
var fingerprintFromFile = async (file) => {
|
|
2149
|
-
const hashes = /* @__PURE__ */ new Map();
|
|
2150
|
-
await generateFileHashes(file, hashes);
|
|
2151
|
-
const merge2 = Buffer.concat(Array.from(hashes.values()).sort());
|
|
2152
|
-
return createHash2("sha1").update(merge2).digest("hex");
|
|
2153
|
-
};
|
|
2154
|
-
var readModuleFile = (file) => {
|
|
2155
|
-
if (file.endsWith(".js")) {
|
|
2156
|
-
return readFiles([file, file.substring(0, file.length - 3) + ".ts"]);
|
|
2157
|
-
}
|
|
2158
|
-
if (!basename3(file).includes(".")) {
|
|
2159
|
-
return readFiles([
|
|
2160
|
-
file,
|
|
2161
|
-
...extensions.map((exp) => `${file}.${exp}`),
|
|
2162
|
-
...extensions.map((exp) => join7(file, `/index.${exp}`))
|
|
2163
|
-
]);
|
|
2164
|
-
}
|
|
2165
|
-
return readFile4(file, "utf8");
|
|
2166
|
-
};
|
|
2167
|
-
var readFiles = async (files) => {
|
|
2168
|
-
for (const file of files) {
|
|
2169
|
-
try {
|
|
2170
|
-
const s = await stat3(file);
|
|
2171
|
-
if (s.isFile()) {
|
|
2172
|
-
return readFile4(file, "utf8");
|
|
2173
|
-
}
|
|
2174
|
-
} catch (_) {
|
|
2175
|
-
continue;
|
|
2176
|
-
}
|
|
2177
|
-
}
|
|
2178
|
-
throw new Error(`No such file: ${files.join(", ")}`);
|
|
2179
|
-
};
|
|
2180
|
-
var findDependencies = async (file, code) => {
|
|
2181
|
-
const imports = await parseStaticImports(code);
|
|
2182
|
-
return imports.map((entry) => entry.moduleName).filter(Boolean).map((value) => value?.startsWith(".") ? join7(dirname6(file), value) : value);
|
|
2183
|
-
};
|
|
2184
|
-
|
|
2185
2123
|
// src/feature/function/build/zip.ts
|
|
2186
2124
|
import JSZip from "jszip";
|
|
2187
2125
|
var zipFiles = (files) => {
|
|
@@ -2208,12 +2146,14 @@ var createLambdaFunction = (group, ctx, ns, id, local2) => {
|
|
|
2208
2146
|
name = formatGlobalResourceName(ctx.appConfig.name, ns, id);
|
|
2209
2147
|
}
|
|
2210
2148
|
const props = deepmerge(ctx.appConfig.defaults.function, local2);
|
|
2211
|
-
const ext =
|
|
2149
|
+
const ext = extname2(props.file);
|
|
2212
2150
|
let code;
|
|
2213
2151
|
let sourceCodeHash;
|
|
2214
2152
|
if ([".ts", ".js", ".tsx", ".sx"].includes(ext)) {
|
|
2215
|
-
ctx.registerBuild("function", name, async (build3) => {
|
|
2216
|
-
const version = await
|
|
2153
|
+
ctx.registerBuild("function", name, async (build3, { packageVersions }) => {
|
|
2154
|
+
const version = await generateFileHash(props.file, {
|
|
2155
|
+
packageVersions
|
|
2156
|
+
});
|
|
2217
2157
|
return build3(version, async (write) => {
|
|
2218
2158
|
const bundle = await bundleTypeScript({ file: props.file });
|
|
2219
2159
|
const archive = await zipFiles(bundle.files);
|
|
@@ -2232,9 +2172,9 @@ var createLambdaFunction = (group, ctx, ns, id, local2) => {
|
|
|
2232
2172
|
key: `/lambda/${name}.zip`,
|
|
2233
2173
|
body: Asset.fromFile(getBuildPath("function", name, "bundle.zip"))
|
|
2234
2174
|
});
|
|
2235
|
-
} else if (
|
|
2175
|
+
} else if (basename3(props.file) === "dockerfile") {
|
|
2236
2176
|
ctx.registerBuild("function", name, async (build3) => {
|
|
2237
|
-
const basePath2 =
|
|
2177
|
+
const basePath2 = dirname6(props.file);
|
|
2238
2178
|
const version = await hashElement(basePath2, {
|
|
2239
2179
|
files: {
|
|
2240
2180
|
exclude: ["stack.json"]
|
|
@@ -2960,83 +2900,15 @@ var functionFeature = defineFeature({
|
|
|
2960
2900
|
import { constantCase as constantCase4, paramCase as paramCase5 } from "change-case";
|
|
2961
2901
|
import { generate } from "@awsless/graphql";
|
|
2962
2902
|
import { mergeTypeDefs } from "@graphql-tools/merge";
|
|
2963
|
-
import { readFile as
|
|
2903
|
+
import { readFile as readFile4 } from "fs/promises";
|
|
2964
2904
|
import { buildSchema, print } from "graphql";
|
|
2965
2905
|
import { Asset as Asset2, aws as aws8, Node as Node7 } from "@awsless/formation";
|
|
2966
|
-
import { createHash as createHash5 } from "crypto";
|
|
2967
|
-
|
|
2968
|
-
// src/build/fingerprint.ts
|
|
2969
2906
|
import { createHash as createHash3 } from "crypto";
|
|
2970
|
-
import { readFile as readFile5, readdir as readdir2, stat as stat4 } from "fs/promises";
|
|
2971
|
-
import { basename as basename5, dirname as dirname8, extname as extname4, join as join8 } from "path";
|
|
2972
|
-
import parseStaticImports2 from "parse-static-imports";
|
|
2973
|
-
var extensions2 = ["js", "mjs", "jsx", "ts", "mts", "tsx"];
|
|
2974
|
-
var generateFileHashes2 = async (file, hashes) => {
|
|
2975
|
-
if (hashes.has(file)) {
|
|
2976
|
-
return;
|
|
2977
|
-
}
|
|
2978
|
-
const code = await readModuleFile2(file);
|
|
2979
|
-
const deps = await findDependencies2(file, code);
|
|
2980
|
-
const hash = createHash3("sha1").update(code).digest();
|
|
2981
|
-
hashes.set(file, hash);
|
|
2982
|
-
for (const dep of deps) {
|
|
2983
|
-
if (dep.startsWith("/")) {
|
|
2984
|
-
await generateFileHashes2(dep, hashes);
|
|
2985
|
-
}
|
|
2986
|
-
}
|
|
2987
|
-
};
|
|
2988
|
-
var fingerprintFromFile2 = async (file) => {
|
|
2989
|
-
const hashes = /* @__PURE__ */ new Map();
|
|
2990
|
-
await generateFileHashes2(file, hashes);
|
|
2991
|
-
const merge2 = Buffer.concat(Array.from(hashes.values()).sort());
|
|
2992
|
-
return createHash3("sha1").update(merge2).digest("hex");
|
|
2993
|
-
};
|
|
2994
|
-
var fingerprintFromDirectory = async (dir) => {
|
|
2995
|
-
const hashes = /* @__PURE__ */ new Map();
|
|
2996
|
-
const files = await readdir2(dir, { recursive: true });
|
|
2997
|
-
for (const file of files) {
|
|
2998
|
-
if (extensions2.includes(extname4(file).substring(1)) && file.at(0) !== "_") {
|
|
2999
|
-
await generateFileHashes2(join8(dir, file), hashes);
|
|
3000
|
-
}
|
|
3001
|
-
}
|
|
3002
|
-
const merge2 = Buffer.concat(Array.from(hashes.values()).sort());
|
|
3003
|
-
return createHash3("sha1").update(merge2).digest("hex");
|
|
3004
|
-
};
|
|
3005
|
-
var readModuleFile2 = (file) => {
|
|
3006
|
-
if (file.endsWith(".js")) {
|
|
3007
|
-
return readFiles2([file, file.substring(0, file.length - 3) + ".ts"]);
|
|
3008
|
-
}
|
|
3009
|
-
if (!basename5(file).includes(".")) {
|
|
3010
|
-
return readFiles2([
|
|
3011
|
-
file,
|
|
3012
|
-
...extensions2.map((exp) => `${file}.${exp}`),
|
|
3013
|
-
...extensions2.map((exp) => join8(file, `/index.${exp}`))
|
|
3014
|
-
]);
|
|
3015
|
-
}
|
|
3016
|
-
return readFile5(file, "utf8");
|
|
3017
|
-
};
|
|
3018
|
-
var readFiles2 = async (files) => {
|
|
3019
|
-
for (const file of files) {
|
|
3020
|
-
try {
|
|
3021
|
-
const s = await stat4(file);
|
|
3022
|
-
if (s.isFile()) {
|
|
3023
|
-
return readFile5(file, "utf8");
|
|
3024
|
-
}
|
|
3025
|
-
} catch (_) {
|
|
3026
|
-
continue;
|
|
3027
|
-
}
|
|
3028
|
-
}
|
|
3029
|
-
throw new Error(`No such file: ${files.join(", ")}`);
|
|
3030
|
-
};
|
|
3031
|
-
var findDependencies2 = async (file, code) => {
|
|
3032
|
-
const imports = await parseStaticImports2(code);
|
|
3033
|
-
return imports.map((entry) => entry.moduleName).filter(Boolean).map((value) => value?.startsWith(".") ? join8(dirname8(file), value) : value);
|
|
3034
|
-
};
|
|
3035
2907
|
|
|
3036
2908
|
// src/util/id.ts
|
|
3037
|
-
import { createHash as
|
|
2909
|
+
import { createHash as createHash2 } from "crypto";
|
|
3038
2910
|
var shortId = (ns) => {
|
|
3039
|
-
return
|
|
2911
|
+
return createHash2("md5").update(ns).digest("hex").substring(0, 10);
|
|
3040
2912
|
};
|
|
3041
2913
|
|
|
3042
2914
|
// src/feature/domain/util.ts
|
|
@@ -3061,7 +2933,7 @@ var formatFullDomainName = (config2, id, subDomain) => {
|
|
|
3061
2933
|
import commonjs2 from "@rollup/plugin-commonjs";
|
|
3062
2934
|
import json2 from "@rollup/plugin-json";
|
|
3063
2935
|
import nodeResolve2 from "@rollup/plugin-node-resolve";
|
|
3064
|
-
import { dirname as
|
|
2936
|
+
import { dirname as dirname7 } from "path";
|
|
3065
2937
|
import { rollup as rollup2 } from "rollup";
|
|
3066
2938
|
import { minify as swcMinify2, swc as swc2 } from "rollup-plugin-swc3";
|
|
3067
2939
|
var buildTypeScriptResolver = async (input, { minify = false } = {}) => {
|
|
@@ -3085,7 +2957,7 @@ var buildTypeScriptResolver = async (input, { minify = false } = {}) => {
|
|
|
3085
2957
|
// minify,
|
|
3086
2958
|
// module: true,
|
|
3087
2959
|
jsc: {
|
|
3088
|
-
baseUrl:
|
|
2960
|
+
baseUrl: dirname7(input),
|
|
3089
2961
|
minify: { sourceMap: true }
|
|
3090
2962
|
},
|
|
3091
2963
|
sourceMaps: true
|
|
@@ -3118,6 +2990,7 @@ var buildTypeScriptResolver = async (input, { minify = false } = {}) => {
|
|
|
3118
2990
|
};
|
|
3119
2991
|
|
|
3120
2992
|
// src/feature/graphql/index.ts
|
|
2993
|
+
import { generateFileHash as generateFileHash2 } from "@awsless/ts-file-cache";
|
|
3121
2994
|
var defaultResolver = `
|
|
3122
2995
|
export function request(ctx) {
|
|
3123
2996
|
return {
|
|
@@ -3170,7 +3043,7 @@ var graphqlFeature = defineFeature({
|
|
|
3170
3043
|
for (const [id, files] of apis) {
|
|
3171
3044
|
const sources = await Promise.all(
|
|
3172
3045
|
files.map((file) => {
|
|
3173
|
-
return
|
|
3046
|
+
return readFile4(file, "utf8");
|
|
3174
3047
|
})
|
|
3175
3048
|
);
|
|
3176
3049
|
if (sources.length) {
|
|
@@ -3239,13 +3112,13 @@ var graphqlFeature = defineFeature({
|
|
|
3239
3112
|
for (const stack of ctx.stackConfigs) {
|
|
3240
3113
|
const file = stack.graphql?.[id]?.schema;
|
|
3241
3114
|
if (file) {
|
|
3242
|
-
const source = await
|
|
3243
|
-
const finger2 =
|
|
3115
|
+
const source = await readFile4(file, "utf8");
|
|
3116
|
+
const finger2 = createHash3("sha1").update(source).digest("hex");
|
|
3244
3117
|
sources.push(source);
|
|
3245
3118
|
fingers.push(finger2);
|
|
3246
3119
|
}
|
|
3247
3120
|
}
|
|
3248
|
-
const finger =
|
|
3121
|
+
const finger = createHash3("sha1").update(sources.sort().join(" ")).digest("hex");
|
|
3249
3122
|
return build3(finger, async (write) => {
|
|
3250
3123
|
const defs = mergeTypeDefs([scalarSchema, baseSchema, ...sources]);
|
|
3251
3124
|
const output = print(defs);
|
|
@@ -3260,9 +3133,11 @@ var graphqlFeature = defineFeature({
|
|
|
3260
3133
|
definition: Asset2.fromFile(getBuildPath("graphql-schema", name, "schema.gql"))
|
|
3261
3134
|
});
|
|
3262
3135
|
if (props.resolver) {
|
|
3263
|
-
ctx.registerBuild("graphql-resolver", id, async (build3) => {
|
|
3136
|
+
ctx.registerBuild("graphql-resolver", id, async (build3, { packageVersions }) => {
|
|
3264
3137
|
const resolver = props.resolver;
|
|
3265
|
-
const version = await
|
|
3138
|
+
const version = await generateFileHash2(resolver, {
|
|
3139
|
+
packageVersions
|
|
3140
|
+
});
|
|
3266
3141
|
return build3(version, async (write) => {
|
|
3267
3142
|
const file = await buildTypeScriptResolver(resolver);
|
|
3268
3143
|
if (!file) {
|
|
@@ -3342,9 +3217,11 @@ var graphqlFeature = defineFeature({
|
|
|
3342
3217
|
});
|
|
3343
3218
|
let code = Asset2.fromString(defaultResolver);
|
|
3344
3219
|
if ("resolver" in props2 && props2.resolver) {
|
|
3345
|
-
ctx.registerBuild("graphql-resolver", entryId, async (build3) => {
|
|
3220
|
+
ctx.registerBuild("graphql-resolver", entryId, async (build3, { packageVersions }) => {
|
|
3346
3221
|
const resolver = props2.resolver;
|
|
3347
|
-
const version = await
|
|
3222
|
+
const version = await generateFileHash2(resolver, {
|
|
3223
|
+
packageVersions
|
|
3224
|
+
});
|
|
3348
3225
|
return build3(version, async (write) => {
|
|
3349
3226
|
const file = await buildTypeScriptResolver(resolver);
|
|
3350
3227
|
if (!file) {
|
|
@@ -3360,7 +3237,7 @@ var graphqlFeature = defineFeature({
|
|
|
3360
3237
|
} else if (defaultProps.resolver) {
|
|
3361
3238
|
code = Asset2.fromFile(getBuildPath("graphql-resolver", id, "resolver.js"));
|
|
3362
3239
|
}
|
|
3363
|
-
new aws8.appsync.Resolver(resolverGroup, "resolver
|
|
3240
|
+
new aws8.appsync.Resolver(resolverGroup, "resolver", {
|
|
3364
3241
|
apiId,
|
|
3365
3242
|
typeName,
|
|
3366
3243
|
fieldName,
|
|
@@ -3551,7 +3428,7 @@ import { days as days3 } from "@awsless/duration";
|
|
|
3551
3428
|
import { Asset as Asset3, aws as aws10, combine, Node as Node9, Output as Output2, unwrap } from "@awsless/formation";
|
|
3552
3429
|
import { hashElement as hashElement2 } from "folder-hash";
|
|
3553
3430
|
import { mkdir as mkdir3 } from "fs/promises";
|
|
3554
|
-
import { dirname as
|
|
3431
|
+
import { dirname as dirname8 } from "path";
|
|
3555
3432
|
import { zip } from "zip-a-folder";
|
|
3556
3433
|
var instanceFeature = defineFeature({
|
|
3557
3434
|
name: "instance",
|
|
@@ -3622,7 +3499,7 @@ var instanceFeature = defineFeature({
|
|
|
3622
3499
|
}
|
|
3623
3500
|
});
|
|
3624
3501
|
await build3(version.hash, async () => {
|
|
3625
|
-
await mkdir3(
|
|
3502
|
+
await mkdir3(dirname8(bundleFile), { recursive: true });
|
|
3626
3503
|
await zip(props.code, bundleFile);
|
|
3627
3504
|
});
|
|
3628
3505
|
});
|
|
@@ -3731,6 +3608,7 @@ var onFailureFeature = defineFeature({
|
|
|
3731
3608
|
|
|
3732
3609
|
// src/feature/pubsub/index.ts
|
|
3733
3610
|
import { aws as aws12, Node as Node11 } from "@awsless/formation";
|
|
3611
|
+
import { constantCase as constantCase6 } from "change-case";
|
|
3734
3612
|
var pubsubFeature = defineFeature({
|
|
3735
3613
|
name: "pubsub",
|
|
3736
3614
|
onApp(ctx) {
|
|
@@ -3740,8 +3618,9 @@ var pubsubFeature = defineFeature({
|
|
|
3740
3618
|
const { lambda } = createLambdaFunction(group, ctx, "pubsub-authorizer", id, functionProps);
|
|
3741
3619
|
lambda.addEnvironment("PUBSUB_POLICY", JSON.stringify(props.policy));
|
|
3742
3620
|
lambda.addEnvironment("AWS_ACCOUNT_ID", ctx.accountId);
|
|
3621
|
+
const name = formatGlobalResourceName(ctx.app.name, "pubsub", id);
|
|
3743
3622
|
const authorizer = new aws12.iot.Authorizer(group, "authorizer", {
|
|
3744
|
-
name
|
|
3623
|
+
name,
|
|
3745
3624
|
functionArn: lambda.arn
|
|
3746
3625
|
});
|
|
3747
3626
|
new aws12.lambda.Permission(group, "permission", {
|
|
@@ -3750,6 +3629,7 @@ var pubsubFeature = defineFeature({
|
|
|
3750
3629
|
sourceArn: authorizer.arn,
|
|
3751
3630
|
action: "lambda:InvokeFunction"
|
|
3752
3631
|
});
|
|
3632
|
+
ctx.addEnv(`PUBSUB_${constantCase6(id)}_AUTHORIZER`, name);
|
|
3753
3633
|
}
|
|
3754
3634
|
ctx.onPolicy((policy) => {
|
|
3755
3635
|
policy.addStatement({
|
|
@@ -3782,7 +3662,7 @@ var pubsubFeature = defineFeature({
|
|
|
3782
3662
|
|
|
3783
3663
|
// src/feature/queue/index.ts
|
|
3784
3664
|
import { aws as aws13, Node as Node12 } from "@awsless/formation";
|
|
3785
|
-
import { camelCase as camelCase5, constantCase as
|
|
3665
|
+
import { camelCase as camelCase5, constantCase as constantCase7 } from "change-case";
|
|
3786
3666
|
import deepmerge2 from "deepmerge";
|
|
3787
3667
|
import { relative as relative3 } from "path";
|
|
3788
3668
|
var typeGenCode3 = `
|
|
@@ -3855,7 +3735,7 @@ var queueFeature = defineFeature({
|
|
|
3855
3735
|
actions: ["sqs:ReceiveMessage", "sqs:DeleteMessage", "sqs:GetQueueAttributes"],
|
|
3856
3736
|
resources: [queue2.arn]
|
|
3857
3737
|
});
|
|
3858
|
-
ctx.addEnv(`QUEUE_${
|
|
3738
|
+
ctx.addEnv(`QUEUE_${constantCase7(ctx.stack.name)}_${constantCase7(id)}_URL`, queue2.url);
|
|
3859
3739
|
ctx.onPolicy((policy2) => {
|
|
3860
3740
|
policy2.addStatement(queue2.permissions);
|
|
3861
3741
|
});
|
|
@@ -3865,7 +3745,7 @@ var queueFeature = defineFeature({
|
|
|
3865
3745
|
|
|
3866
3746
|
// src/feature/rest/index.ts
|
|
3867
3747
|
import { aws as aws14, Node as Node13 } from "@awsless/formation";
|
|
3868
|
-
import { constantCase as
|
|
3748
|
+
import { constantCase as constantCase8 } from "change-case";
|
|
3869
3749
|
var restFeature = defineFeature({
|
|
3870
3750
|
name: "rest",
|
|
3871
3751
|
onApp(ctx) {
|
|
@@ -3908,7 +3788,7 @@ var restFeature = defineFeature({
|
|
|
3908
3788
|
}
|
|
3909
3789
|
});
|
|
3910
3790
|
record.dependsOn(domain, mapping);
|
|
3911
|
-
ctx.bind(`REST_${
|
|
3791
|
+
ctx.bind(`REST_${constantCase8(id)}_ENDPOINT`, domainName);
|
|
3912
3792
|
} else {
|
|
3913
3793
|
}
|
|
3914
3794
|
}
|
|
@@ -3952,7 +3832,7 @@ var restFeature = defineFeature({
|
|
|
3952
3832
|
|
|
3953
3833
|
// src/feature/search/index.ts
|
|
3954
3834
|
import { aws as aws15, Node as Node14 } from "@awsless/formation";
|
|
3955
|
-
import { constantCase as
|
|
3835
|
+
import { constantCase as constantCase9 } from "change-case";
|
|
3956
3836
|
var typeGenCode4 = `
|
|
3957
3837
|
import { AnyStruct, Table } from '@awsless/open-search'
|
|
3958
3838
|
|
|
@@ -4008,7 +3888,7 @@ var searchFeature = defineFeature({
|
|
|
4008
3888
|
]
|
|
4009
3889
|
});
|
|
4010
3890
|
}
|
|
4011
|
-
ctx.addEnv(`SEARCH_${
|
|
3891
|
+
ctx.addEnv(`SEARCH_${constantCase9(ctx.stack.name)}_${constantCase9(id)}_DOMAIN`, openSearch.domainEndpoint);
|
|
4012
3892
|
ctx.onPolicy((policy) => {
|
|
4013
3893
|
policy.addStatement({
|
|
4014
3894
|
actions: ["es:ESHttp*"],
|
|
@@ -4023,11 +3903,11 @@ var searchFeature = defineFeature({
|
|
|
4023
3903
|
import { days as days4, seconds as seconds3 } from "@awsless/duration";
|
|
4024
3904
|
import { Asset as Asset4, aws as aws16, Node as Node15 } from "@awsless/formation";
|
|
4025
3905
|
import { glob as glob2 } from "glob";
|
|
4026
|
-
import { join as
|
|
3906
|
+
import { join as join7 } from "path";
|
|
4027
3907
|
|
|
4028
3908
|
// src/feature/site/util.ts
|
|
4029
3909
|
import { lookup, contentType } from "mime-types";
|
|
4030
|
-
import { extname as
|
|
3910
|
+
import { extname as extname3 } from "path";
|
|
4031
3911
|
var getCacheControl = (file) => {
|
|
4032
3912
|
switch (lookup(file)) {
|
|
4033
3913
|
case false:
|
|
@@ -4042,7 +3922,7 @@ var getCacheControl = (file) => {
|
|
|
4042
3922
|
}
|
|
4043
3923
|
};
|
|
4044
3924
|
var getContentType = (file) => {
|
|
4045
|
-
return contentType(
|
|
3925
|
+
return contentType(extname3(file)) || "text/html; charset=utf-8";
|
|
4046
3926
|
};
|
|
4047
3927
|
|
|
4048
3928
|
// src/feature/site/index.ts
|
|
@@ -4117,7 +3997,7 @@ var siteFeature = defineFeature({
|
|
|
4117
3997
|
const object = new aws16.s3.BucketObject(group, file, {
|
|
4118
3998
|
bucket: bucket.name,
|
|
4119
3999
|
key: file,
|
|
4120
|
-
body: Asset4.fromFile(
|
|
4000
|
+
body: Asset4.fromFile(join7(props.static, file)),
|
|
4121
4001
|
cacheControl: getCacheControl(file),
|
|
4122
4002
|
contentType: getContentType(file)
|
|
4123
4003
|
});
|
|
@@ -4325,7 +4205,7 @@ var storeFeature = defineFeature({
|
|
|
4325
4205
|
|
|
4326
4206
|
// src/feature/stream/index.ts
|
|
4327
4207
|
import { aws as aws18, Node as Node17 } from "@awsless/formation";
|
|
4328
|
-
import { constantCase as
|
|
4208
|
+
import { constantCase as constantCase10 } from "change-case";
|
|
4329
4209
|
var streamFeature = defineFeature({
|
|
4330
4210
|
name: "stream",
|
|
4331
4211
|
onStack(ctx) {
|
|
@@ -4339,7 +4219,7 @@ var streamFeature = defineFeature({
|
|
|
4339
4219
|
const streamKey = new aws18.ivs.StreamKey(group, "key", {
|
|
4340
4220
|
channel: channel.arn
|
|
4341
4221
|
});
|
|
4342
|
-
const prefix = `STREAM_${
|
|
4222
|
+
const prefix = `STREAM_${constantCase10(ctx.stack.name)}_${constantCase10(id)}`;
|
|
4343
4223
|
ctx.bind(`${prefix}_ENDPOINT`, channel.playbackUrl);
|
|
4344
4224
|
ctx.addEnv(`${prefix}_INGEST_ENDPOINT`, channel.ingestEndpoint);
|
|
4345
4225
|
ctx.addEnv(`${prefix}_STREAM_KEY`, streamKey.value);
|
|
@@ -4885,6 +4765,7 @@ var createApp = (props, filters = []) => {
|
|
|
4885
4765
|
};
|
|
4886
4766
|
|
|
4887
4767
|
// src/cli/ui/complex/build-assets.ts
|
|
4768
|
+
import { loadPackageDependencyVersions } from "@awsless/ts-file-cache";
|
|
4888
4769
|
import chalk3 from "chalk";
|
|
4889
4770
|
var buildAssets = async (builders, showResult = false) => {
|
|
4890
4771
|
if (builders.length === 0) {
|
|
@@ -4892,8 +4773,11 @@ var buildAssets = async (builders, showResult = false) => {
|
|
|
4892
4773
|
}
|
|
4893
4774
|
const results = [];
|
|
4894
4775
|
await task("Building assets", async (update) => {
|
|
4776
|
+
const packageVersions = await loadPackageDependencyVersions(".", "pnpm");
|
|
4895
4777
|
for (const builder of builders) {
|
|
4896
|
-
const result = await build(builder.type, builder.name, builder.builder
|
|
4778
|
+
const result = await build(builder.type, builder.name, builder.builder, {
|
|
4779
|
+
packageVersions
|
|
4780
|
+
});
|
|
4897
4781
|
results.push({ ...builder, result });
|
|
4898
4782
|
}
|
|
4899
4783
|
update("Done building assets.");
|
|
@@ -5106,8 +4990,72 @@ var del2 = (program2) => {
|
|
|
5106
4990
|
import { confirm as confirm4, isCancel as isCancel5 } from "@clack/prompts";
|
|
5107
4991
|
|
|
5108
4992
|
// src/cli/ui/complex/run-tests.ts
|
|
5109
|
-
import {
|
|
5110
|
-
import
|
|
4993
|
+
import { log as log8 } from "@clack/prompts";
|
|
4994
|
+
import chalk6 from "chalk";
|
|
4995
|
+
import { mkdir as mkdir4, readFile as readFile6, writeFile as writeFile3 } from "fs/promises";
|
|
4996
|
+
import { join as join10 } from "path";
|
|
4997
|
+
|
|
4998
|
+
// src/build/__fingerprint.ts
|
|
4999
|
+
import { createHash as createHash4 } from "crypto";
|
|
5000
|
+
import { readdir, readFile as readFile5, stat as stat3 } from "fs/promises";
|
|
5001
|
+
import { basename as basename4, dirname as dirname9, extname as extname4, join as join8 } from "path";
|
|
5002
|
+
import parseStaticImports from "parse-static-imports";
|
|
5003
|
+
var extensions = ["js", "mjs", "jsx", "ts", "mts", "tsx"];
|
|
5004
|
+
var generateFileHashes = async (file, hashes) => {
|
|
5005
|
+
if (hashes.has(file)) {
|
|
5006
|
+
return;
|
|
5007
|
+
}
|
|
5008
|
+
const code = await readModuleFile(file);
|
|
5009
|
+
const deps = await findDependencies(file, code);
|
|
5010
|
+
const hash = createHash4("sha1").update(code).digest();
|
|
5011
|
+
hashes.set(file, hash);
|
|
5012
|
+
for (const dep of deps) {
|
|
5013
|
+
if (dep.startsWith("/")) {
|
|
5014
|
+
await generateFileHashes(dep, hashes);
|
|
5015
|
+
}
|
|
5016
|
+
}
|
|
5017
|
+
};
|
|
5018
|
+
var fingerprintFromDirectory = async (dir) => {
|
|
5019
|
+
const hashes = /* @__PURE__ */ new Map();
|
|
5020
|
+
const files = await readdir(dir, { recursive: true });
|
|
5021
|
+
for (const file of files) {
|
|
5022
|
+
if (extensions.includes(extname4(file).substring(1)) && file.at(0) !== "_") {
|
|
5023
|
+
await generateFileHashes(join8(dir, file), hashes);
|
|
5024
|
+
}
|
|
5025
|
+
}
|
|
5026
|
+
const merge2 = Buffer.concat(Array.from(hashes.values()).sort());
|
|
5027
|
+
return createHash4("sha1").update(merge2).digest("hex");
|
|
5028
|
+
};
|
|
5029
|
+
var readModuleFile = (file) => {
|
|
5030
|
+
if (file.endsWith(".js")) {
|
|
5031
|
+
return readFiles([file, file.substring(0, file.length - 3) + ".ts"]);
|
|
5032
|
+
}
|
|
5033
|
+
if (!basename4(file).includes(".")) {
|
|
5034
|
+
return readFiles([
|
|
5035
|
+
file,
|
|
5036
|
+
...extensions.map((exp) => `${file}.${exp}`),
|
|
5037
|
+
...extensions.map((exp) => join8(file, `/index.${exp}`))
|
|
5038
|
+
]);
|
|
5039
|
+
}
|
|
5040
|
+
return readFile5(file, "utf8");
|
|
5041
|
+
};
|
|
5042
|
+
var readFiles = async (files) => {
|
|
5043
|
+
for (const file of files) {
|
|
5044
|
+
try {
|
|
5045
|
+
const s = await stat3(file);
|
|
5046
|
+
if (s.isFile()) {
|
|
5047
|
+
return readFile5(file, "utf8");
|
|
5048
|
+
}
|
|
5049
|
+
} catch (_) {
|
|
5050
|
+
continue;
|
|
5051
|
+
}
|
|
5052
|
+
}
|
|
5053
|
+
throw new Error(`No such file: ${files.join(", ")}`);
|
|
5054
|
+
};
|
|
5055
|
+
var findDependencies = async (file, code) => {
|
|
5056
|
+
const imports = await parseStaticImports(code);
|
|
5057
|
+
return imports.map((entry) => entry.moduleName).filter(Boolean).map((value) => value?.startsWith(".") ? join8(dirname9(file), value) : value);
|
|
5058
|
+
};
|
|
5111
5059
|
|
|
5112
5060
|
// src/test/reporter.ts
|
|
5113
5061
|
import { getSuites, getTests } from "@vitest/runner/utils";
|
|
@@ -5189,10 +5137,10 @@ import { startVitest } from "vitest/node";
|
|
|
5189
5137
|
import commonjs3 from "@rollup/plugin-commonjs";
|
|
5190
5138
|
import nodeResolve3 from "@rollup/plugin-node-resolve";
|
|
5191
5139
|
import json3 from "@rollup/plugin-json";
|
|
5192
|
-
import { dirname as
|
|
5140
|
+
import { dirname as dirname10, join as join9 } from "path";
|
|
5193
5141
|
import { fileURLToPath } from "url";
|
|
5194
5142
|
var startTest = async (props) => {
|
|
5195
|
-
const __dirname =
|
|
5143
|
+
const __dirname = dirname10(fileURLToPath(import.meta.url));
|
|
5196
5144
|
const result = await startVitest(
|
|
5197
5145
|
"test",
|
|
5198
5146
|
props.filters,
|
|
@@ -5206,7 +5154,7 @@ var startTest = async (props) => {
|
|
|
5206
5154
|
exclude: ["**/_*", "**/_*/**", ...configDefaults.exclude],
|
|
5207
5155
|
globals: true,
|
|
5208
5156
|
reporters: props.reporter,
|
|
5209
|
-
globalSetup:
|
|
5157
|
+
globalSetup: join9(__dirname, "test-global-setup.js")
|
|
5210
5158
|
// env: {
|
|
5211
5159
|
// TZ: 'UTC',
|
|
5212
5160
|
// },
|
|
@@ -5244,8 +5192,6 @@ var startTest = async (props) => {
|
|
|
5244
5192
|
};
|
|
5245
5193
|
|
|
5246
5194
|
// src/cli/ui/complex/run-tests.ts
|
|
5247
|
-
import { log as log8 } from "@clack/prompts";
|
|
5248
|
-
import chalk6 from "chalk";
|
|
5249
5195
|
var formatResult = (props) => {
|
|
5250
5196
|
const line = [`Test ${chalk6.magenta(props.stack)}`];
|
|
5251
5197
|
if (props.cached) {
|
|
@@ -5302,10 +5248,10 @@ var logTestErrors = (event) => {
|
|
|
5302
5248
|
var runTest = async (stack, dir, filters) => {
|
|
5303
5249
|
await mkdir4(directories.test, { recursive: true });
|
|
5304
5250
|
const fingerprint = await fingerprintFromDirectory(dir);
|
|
5305
|
-
const file =
|
|
5251
|
+
const file = join10(directories.test, `${stack}.json`);
|
|
5306
5252
|
const exists = await fileExist(file);
|
|
5307
5253
|
if (exists && !process.env.NO_CACHE) {
|
|
5308
|
-
const raw = await
|
|
5254
|
+
const raw = await readFile6(file, { encoding: "utf8" });
|
|
5309
5255
|
const data = JSON.parse(raw);
|
|
5310
5256
|
if (data.fingerprint === fingerprint) {
|
|
5311
5257
|
log8.step(
|
|
@@ -5568,7 +5514,7 @@ import { log as log9 } from "@clack/prompts";
|
|
|
5568
5514
|
|
|
5569
5515
|
// src/type-gen/generate.ts
|
|
5570
5516
|
import { mkdir as mkdir5, writeFile as writeFile4 } from "fs/promises";
|
|
5571
|
-
import { dirname as
|
|
5517
|
+
import { dirname as dirname11, join as join11, relative as relative5 } from "path";
|
|
5572
5518
|
var generateTypes = async (props) => {
|
|
5573
5519
|
const files = [];
|
|
5574
5520
|
await Promise.all(
|
|
@@ -5577,12 +5523,12 @@ var generateTypes = async (props) => {
|
|
|
5577
5523
|
...props,
|
|
5578
5524
|
async write(file, data, include = false) {
|
|
5579
5525
|
const code = data?.toString("utf8");
|
|
5580
|
-
const path =
|
|
5526
|
+
const path = join11(directories.types, file);
|
|
5581
5527
|
if (code) {
|
|
5582
5528
|
if (include) {
|
|
5583
5529
|
files.push(relative5(directories.root, path));
|
|
5584
5530
|
}
|
|
5585
|
-
await mkdir5(
|
|
5531
|
+
await mkdir5(dirname11(path), { recursive: true });
|
|
5586
5532
|
await writeFile4(path, code);
|
|
5587
5533
|
}
|
|
5588
5534
|
}
|
|
@@ -5591,7 +5537,7 @@ var generateTypes = async (props) => {
|
|
|
5591
5537
|
);
|
|
5592
5538
|
if (files.length) {
|
|
5593
5539
|
const code = files.map((file) => `/// <reference path='${file}' />`).join("\n");
|
|
5594
|
-
await writeFile4(
|
|
5540
|
+
await writeFile4(join11(directories.root, `awsless.d.ts`), code);
|
|
5595
5541
|
}
|
|
5596
5542
|
};
|
|
5597
5543
|
|