@bifravst/aws-cdk-lambda-helpers 2.3.10 → 3.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/IoTActionRole.js +11 -10
- package/dist/src/LambdaLogGroup.js +5 -11
- package/dist/src/LambdaSource.d.ts +1 -1
- package/dist/src/LambdaSource.js +3 -3
- package/dist/src/PackedLambdaFn.d.ts +1 -1
- package/dist/src/PackedLambdaFn.js +8 -10
- package/dist/src/cdk.d.ts +4 -4
- package/dist/src/checkSumOfFiles.spec.js +10 -11
- package/dist/src/checksumOfFiles.js +15 -13
- package/dist/src/commonParent.js +3 -5
- package/dist/src/commonParent.spec.js +18 -16
- package/dist/src/findDependencies.js +34 -61
- package/dist/src/findDependencies.spec.js +9 -9
- package/dist/src/isTest.js +1 -1
- package/dist/src/isTest.spec.js +4 -4
- package/dist/src/lambda.d.ts +2 -2
- package/dist/src/layer.d.ts +1 -1
- package/dist/src/module-folder-named-like-handler-bug.spec.js +9 -9
- package/dist/src/packLambda.js +48 -39
- package/dist/src/packLambda.spec.d.ts +1 -0
- package/dist/src/packLambda.spec.js +28 -0
- package/dist/src/packLambdaFromPath.d.ts +1 -1
- package/dist/src/packLambdaFromPath.js +6 -7
- package/dist/src/packLayer.js +27 -28
- package/dist/src/test/getFileFromZip.d.ts +1 -0
- package/dist/src/test/getFileFromZip.js +20 -0
- package/dist/src/test-data/module-folder-named-like-handler-bug/different-level/acme/lib.js +1 -0
- package/dist/src/test-data/module-folder-named-like-handler-bug/different-level/lambda/acme.js +2 -0
- package/dist/src/test-data/module-folder-named-like-handler-bug/same-level/acme/lib.js +1 -0
- package/dist/src/test-data/module-folder-named-like-handler-bug/same-level/acme.js +2 -0
- package/dist/src/test-data/resolve-paths/foo/1.js +1 -1
- package/dist/src/test-data/resolve-paths/foo/2.js +1 -1
- package/dist/src/test-data/resolve-paths/lambda.js +4 -0
- package/dist/src/updateImports.d.ts +1 -0
- package/dist/src/updateImports.js +1 -0
- package/dist/src/updateImports.spec.d.ts +1 -0
- package/dist/src/updateImports.spec.js +10 -0
- package/dist/src/util.d.ts +4 -4
- package/package.json +8 -5
- package/dist/src/test-data/resolve-paths/foo/1.d.ts +0 -1
- package/dist/src/test-data/resolve-paths/foo/2.d.ts +0 -1
- package/dist/src/test-data/resolve-paths/foo/index.d.ts +0 -1
@@ -2,11 +2,10 @@ import { aws_iam as IAM, Stack } from 'aws-cdk-lib';
|
|
2
2
|
import { Construct } from 'constructs';
|
3
3
|
/**
|
4
4
|
* Base role for IoT Actions that allows to publish to the 'errors' topic
|
5
|
-
*/
|
6
|
-
export class IoTActionRole extends Construct {
|
5
|
+
*/ export class IoTActionRole extends Construct {
|
7
6
|
role;
|
8
7
|
roleArn;
|
9
|
-
constructor(parent)
|
8
|
+
constructor(parent){
|
10
9
|
super(parent, 'errorActionRole');
|
11
10
|
this.role = new IAM.Role(this, 'iot-action-role', {
|
12
11
|
assumedBy: new IAM.ServicePrincipal('iot.amazonaws.com'),
|
@@ -14,14 +13,16 @@ export class IoTActionRole extends Construct {
|
|
14
13
|
rootPermissions: new IAM.PolicyDocument({
|
15
14
|
statements: [
|
16
15
|
new IAM.PolicyStatement({
|
17
|
-
actions: [
|
18
|
-
|
19
|
-
`arn:aws:iot:${Stack.of(this).region}:${Stack.of(this).account}:topic/errors`,
|
16
|
+
actions: [
|
17
|
+
'iot:Publish'
|
20
18
|
],
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
19
|
+
resources: [
|
20
|
+
`arn:aws:iot:${Stack.of(this).region}:${Stack.of(this).account}:topic/errors`
|
21
|
+
]
|
22
|
+
})
|
23
|
+
]
|
24
|
+
})
|
25
|
+
}
|
25
26
|
});
|
26
27
|
this.roleArn = this.role.roleArn;
|
27
28
|
}
|
@@ -3,20 +3,14 @@ import { Construct } from 'constructs';
|
|
3
3
|
import { isTest } from './isTest.js';
|
4
4
|
export class LambdaLogGroup extends Construct {
|
5
5
|
logGroup;
|
6
|
-
constructor(parent, id,
|
7
|
-
|
8
|
-
retention) {
|
6
|
+
constructor(parent, id, // Defaults to 30 days for production, 1 day for test
|
7
|
+
retention){
|
9
8
|
super(parent, id);
|
10
9
|
this.logGroup = new Logs.LogGroup(this, 'logGroup', {
|
11
|
-
retention: retention ??
|
12
|
-
(isTest(this)
|
13
|
-
? Logs.RetentionDays.ONE_DAY
|
14
|
-
: Logs.RetentionDays.ONE_MONTH),
|
10
|
+
retention: retention ?? (isTest(this) ? Logs.RetentionDays.ONE_DAY : Logs.RetentionDays.ONE_MONTH),
|
15
11
|
logGroupName: `/${Stack.of(this).stackName}/fn/${id}-${Names.uniqueId(this)}`,
|
16
|
-
logGroupClass: Logs.LogGroupClass.STANDARD,
|
17
|
-
removalPolicy: this.node.getContext('isTest') === true
|
18
|
-
? RemovalPolicy.DESTROY
|
19
|
-
: RemovalPolicy.RETAIN,
|
12
|
+
logGroupClass: Logs.LogGroupClass.STANDARD,
|
13
|
+
removalPolicy: this.node.getContext('isTest') === true ? RemovalPolicy.DESTROY : RemovalPolicy.RETAIN
|
20
14
|
});
|
21
15
|
}
|
22
16
|
}
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { aws_lambda as Lambda } from 'aws-cdk-lib';
|
2
2
|
import { Construct } from 'constructs';
|
3
|
-
import type { PackedLambda } from './packLambda.
|
3
|
+
import type { PackedLambda } from './packLambda.ts';
|
4
4
|
export declare class LambdaSource extends Construct {
|
5
5
|
readonly code: Lambda.S3Code;
|
6
6
|
constructor(parent: Construct, packedLambda: Pick<PackedLambda, 'zipFilePath' | 'id' | 'hash'>);
|
package/dist/src/LambdaSource.js
CHANGED
@@ -1,13 +1,13 @@
|
|
1
|
-
import { AssetHashType, aws_lambda as Lambda, aws_s3 as S3, aws_s3_assets as S3Assets
|
1
|
+
import { AssetHashType, aws_lambda as Lambda, aws_s3 as S3, aws_s3_assets as S3Assets } from 'aws-cdk-lib';
|
2
2
|
import { Construct } from 'constructs';
|
3
3
|
export class LambdaSource extends Construct {
|
4
4
|
code;
|
5
|
-
constructor(parent, packedLambda)
|
5
|
+
constructor(parent, packedLambda){
|
6
6
|
super(parent, `${packedLambda.id}Source`);
|
7
7
|
const asset = new S3Assets.Asset(this, 'asset', {
|
8
8
|
path: packedLambda.zipFilePath,
|
9
9
|
assetHash: packedLambda.hash,
|
10
|
-
assetHashType: AssetHashType.CUSTOM
|
10
|
+
assetHashType: AssetHashType.CUSTOM
|
11
11
|
});
|
12
12
|
const sourceCodeBucket = S3.Bucket.fromBucketName(this, 'bucket', asset.s3BucketName);
|
13
13
|
this.code = Lambda.Code.fromBucket(sourceCodeBucket, asset.s3ObjectKey);
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { aws_lambda as Lambda, type aws_logs as Logs } from 'aws-cdk-lib';
|
2
2
|
import { Construct } from 'constructs';
|
3
|
-
import type { PackedLambda } from './packLambda.
|
3
|
+
import type { PackedLambda } from './packLambda.ts';
|
4
4
|
/**
|
5
5
|
* Creates a Lambda function with useful defaults:
|
6
6
|
*
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { Permissions as SettingsPermissions } from '@bifravst/aws-ssm-settings-helpers/cdk';
|
2
|
-
import { Duration, aws_lambda as Lambda, Stack
|
2
|
+
import { Duration, aws_lambda as Lambda, Stack } from 'aws-cdk-lib';
|
3
3
|
import { Construct } from 'constructs';
|
4
4
|
import { LambdaLogGroup } from './LambdaLogGroup.js';
|
5
5
|
import { LambdaSource } from './LambdaSource.js';
|
@@ -18,15 +18,13 @@ import { LambdaSource } from './LambdaSource.js';
|
|
18
18
|
* DISABLE_METRICS: set to '1' of 'isTest'===true in the context
|
19
19
|
* - a LambdaLogGroup (if not provided)
|
20
20
|
* - policies that allow to access all SSM parameters below the current stack name
|
21
|
-
*/
|
22
|
-
export class PackedLambdaFn extends Construct {
|
21
|
+
*/ export class PackedLambdaFn extends Construct {
|
23
22
|
fn;
|
24
23
|
logGroup;
|
25
|
-
constructor(parent, id, source, props)
|
24
|
+
constructor(parent, id, source, props){
|
26
25
|
super(parent, id);
|
27
26
|
const { environment, initialPolicy, ...rest } = props;
|
28
|
-
this.logGroup =
|
29
|
-
props.logGroup ?? new LambdaLogGroup(this, 'fnLogs').logGroup;
|
27
|
+
this.logGroup = props.logGroup ?? new LambdaLogGroup(this, 'fnLogs').logGroup;
|
30
28
|
this.fn = new Lambda.Function(this, 'fn', {
|
31
29
|
architecture: Lambda.Architecture.ARM_64,
|
32
30
|
runtime: props.runtime ?? Lambda.Runtime.NODEJS_22_X,
|
@@ -39,16 +37,16 @@ export class PackedLambdaFn extends Construct {
|
|
39
37
|
DISABLE_METRICS: this.node.tryGetContext('isTest') === true ? '1' : '0',
|
40
38
|
PACKED_LAMBDA_ID: source.id,
|
41
39
|
PACKED_LAMBDA_HASH: source.hash,
|
42
|
-
...environment
|
40
|
+
...environment
|
43
41
|
},
|
44
42
|
initialPolicy: [
|
45
|
-
...
|
46
|
-
SettingsPermissions(Stack.of(this))
|
43
|
+
...initialPolicy ?? [],
|
44
|
+
SettingsPermissions(Stack.of(this))
|
47
45
|
],
|
48
46
|
...rest,
|
49
47
|
logGroup: this.logGroup,
|
50
48
|
handler: source.handler,
|
51
|
-
code: new LambdaSource(this, source).code
|
49
|
+
code: new LambdaSource(this, source).code
|
52
50
|
});
|
53
51
|
this.fn.node.addMetadata('packedLambda:id', source.id);
|
54
52
|
this.fn.node.addMetadata('packedLambda:hash', source.hash);
|
package/dist/src/cdk.d.ts
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
export * from './IoTActionRole.
|
2
|
-
export * from './LambdaLogGroup.
|
3
|
-
export * from './LambdaSource.
|
4
|
-
export * from './PackedLambdaFn.
|
1
|
+
export * from './IoTActionRole.ts';
|
2
|
+
export * from './LambdaLogGroup.ts';
|
3
|
+
export * from './LambdaSource.ts';
|
4
|
+
export * from './PackedLambdaFn.ts';
|
@@ -2,15 +2,14 @@ import assert from 'node:assert/strict';
|
|
2
2
|
import path from 'node:path';
|
3
3
|
import { describe, it } from 'node:test';
|
4
4
|
import { checkSumOfFiles } from './checksumOfFiles.js';
|
5
|
-
void describe('checkSumOfFiles()', ()
|
6
|
-
void it('should calculate a checksum of files', async ()
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
'baa003a894945a0d2519b1f4340caa97c462058f'));
|
5
|
+
void describe('checkSumOfFiles()', ()=>{
|
6
|
+
void it('should calculate a checksum of files', async ()=>assert.equal(await checkSumOfFiles([
|
7
|
+
// sha1sum cdk/helpers/lambdas/test-data/1.txt
|
8
|
+
// 6ae3f2029d36e029175cc225c2c4cda51a5ac602 cdk/helpers/lambdas/test-data/1.txt
|
9
|
+
path.join(process.cwd(), 'src', 'test-data', '1.txt'),
|
10
|
+
// sha1sum cdk/helpers/lambdas/test-data/2.txt
|
11
|
+
// 6a9c3333d7a3f9ee9fa1ef70224766fafb208fe4 cdk/helpers/lambdas/test-data/2.txt
|
12
|
+
path.join(process.cwd(), 'src', 'test-data', '2.txt')
|
13
|
+
]), // echo -n 6ae3f2029d36e029175cc225c2c4cda51a5ac6026a9c3333d7a3f9ee9fa1ef70224766fafb208fe4 | sha1sum
|
14
|
+
'baa003a894945a0d2519b1f4340caa97c462058f'));
|
16
15
|
});
|
@@ -2,26 +2,29 @@ import * as crypto from 'node:crypto';
|
|
2
2
|
import * as fs from 'node:fs';
|
3
3
|
/**
|
4
4
|
* Computes the combined checksum of the given files
|
5
|
-
*/
|
6
|
-
export const checkSumOfFiles = async (files) => {
|
5
|
+
*/ export const checkSumOfFiles = async (files)=>{
|
7
6
|
const fileChecksums = await checkSum(files);
|
8
|
-
const checksum = checkSumOfStrings([
|
7
|
+
const checksum = checkSumOfStrings([
|
8
|
+
...Object.entries(fileChecksums)
|
9
|
+
].map(([, hash])=>hash));
|
9
10
|
return checksum;
|
10
11
|
};
|
11
|
-
export const checkSumOfStrings = (strings)
|
12
|
+
export const checkSumOfStrings = (strings)=>{
|
12
13
|
const hash = crypto.createHash('sha1');
|
13
14
|
hash.update(strings.join(''));
|
14
15
|
return hash.digest('hex');
|
15
16
|
};
|
16
17
|
const hashCache = {};
|
17
|
-
const hashFile = async (filePath)
|
18
|
+
const hashFile = async (filePath)=>{
|
18
19
|
if (hashCache[filePath] === undefined) {
|
19
|
-
hashCache[filePath] = await new Promise((resolve)
|
20
|
+
hashCache[filePath] = await new Promise((resolve)=>{
|
20
21
|
const hash = crypto.createHash('sha1');
|
21
22
|
hash.setEncoding('hex');
|
22
23
|
const fileStream = fs.createReadStream(filePath);
|
23
|
-
fileStream.pipe(hash, {
|
24
|
-
|
24
|
+
fileStream.pipe(hash, {
|
25
|
+
end: false
|
26
|
+
});
|
27
|
+
fileStream.on('end', ()=>{
|
25
28
|
hash.end();
|
26
29
|
const h = hash.read().toString();
|
27
30
|
resolve(h);
|
@@ -32,11 +35,10 @@ const hashFile = async (filePath) => {
|
|
32
35
|
};
|
33
36
|
/**
|
34
37
|
* Computes the checksum for the given files
|
35
|
-
*/
|
36
|
-
const checkSum = async (files) => {
|
38
|
+
*/ const checkSum = async (files)=>{
|
37
39
|
const hashes = {};
|
38
|
-
await files.reduce(async (p, file)
|
39
|
-
|
40
|
-
|
40
|
+
await files.reduce(async (p, file)=>p.then(async ()=>{
|
41
|
+
hashes[file] = await hashFile(file);
|
42
|
+
}), Promise.resolve());
|
41
43
|
return hashes;
|
42
44
|
};
|
package/dist/src/commonParent.js
CHANGED
@@ -1,13 +1,11 @@
|
|
1
1
|
import { parse, sep } from 'node:path';
|
2
2
|
/**
|
3
3
|
* Returns the common ancestor directory from a list of files
|
4
|
-
*/
|
5
|
-
|
6
|
-
if (files.length === 1)
|
7
|
-
return parse(files[0] ?? '').dir + sep;
|
4
|
+
*/ export const commonParent = (files)=>{
|
5
|
+
if (files.length === 1) return parse(files[0] ?? '').dir + sep;
|
8
6
|
let index = 0;
|
9
7
|
let prefix = '/';
|
10
|
-
while
|
8
|
+
while(files.filter((f)=>f.startsWith(prefix)).length === files.length){
|
11
9
|
prefix = files[0]?.slice(0, index++) ?? '';
|
12
10
|
}
|
13
11
|
return prefix.slice(0, prefix.lastIndexOf('/') + 1);
|
@@ -1,20 +1,22 @@
|
|
1
1
|
import assert from 'node:assert/strict';
|
2
2
|
import { describe, it } from 'node:test';
|
3
3
|
import { commonParent } from './commonParent.js';
|
4
|
-
void describe('commonParent()', ()
|
5
|
-
void it('should return the common parent directory', ()
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
void it('should return the entire parent tree for a single file', ()
|
12
|
-
|
13
|
-
'/some/dir/lambda/
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
'/
|
18
|
-
|
19
|
-
|
4
|
+
void describe('commonParent()', ()=>{
|
5
|
+
void it('should return the common parent directory', ()=>assert.equal(commonParent([
|
6
|
+
'/some/dir/lambda/onMessage.ts',
|
7
|
+
'/some/dir/lambda/notifyClients.ts',
|
8
|
+
'/some/dir/lambda/wirepasPublish.ts',
|
9
|
+
'/some/dir/wirepas-5g-mesh-gateway/protobuf/ts/data_message.ts'
|
10
|
+
]), '/some/dir/'));
|
11
|
+
void it('should return the entire parent tree for a single file', ()=>assert.equal(commonParent([
|
12
|
+
'/some/dir/lambda/onMessage.ts'
|
13
|
+
]), '/some/dir/lambda/'));
|
14
|
+
void it('should return "/" if files have no common directory', ()=>assert.equal(commonParent([
|
15
|
+
'/some/dir/lambda/onMessage.ts',
|
16
|
+
'/other/dir/lambda/onMessage.ts'
|
17
|
+
]), '/'));
|
18
|
+
void it('should return the common ancestor only up until the directory level', ()=>assert.equal(commonParent([
|
19
|
+
'/some/dir/lambdas/cors.ts',
|
20
|
+
'/some/dir/lambdas/corsHeaders.ts'
|
21
|
+
]), '/some/dir/lambdas/'));
|
20
22
|
});
|
@@ -1,45 +1,36 @@
|
|
1
1
|
import { readFileSync, statSync } from 'node:fs';
|
2
2
|
import path from 'node:path';
|
3
|
-
import ts
|
3
|
+
import ts from 'typescript';
|
4
4
|
/**
|
5
5
|
* Resolve project-level dependencies for the given file using TypeScript compiler API
|
6
|
-
*/
|
7
|
-
export const findDependencies = (args) => {
|
6
|
+
*/ export const findDependencies = (args)=>{
|
8
7
|
const sourceFilePath = args.sourceFilePath;
|
9
8
|
const visited = args.visited ?? [];
|
10
9
|
const dependencies = args.imports ?? [];
|
11
10
|
const packages = args.packages ?? new Set();
|
12
11
|
const importsSubpathPatterns = args.importsSubpathPatterns ?? {};
|
13
|
-
if (visited.includes(sourceFilePath))
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
};
|
12
|
+
if (visited.includes(sourceFilePath)) return {
|
13
|
+
dependencies,
|
14
|
+
importsSubpathPatterns,
|
15
|
+
packages
|
16
|
+
};
|
19
17
|
const tsConfigFilePath = args.tsConfigFilePath;
|
20
|
-
const tsConfig = tsConfigFilePath !== undefined
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
/*setParentNodes */ true);
|
25
|
-
const parseChild = (node) => {
|
26
|
-
if (node.kind !== ts.SyntaxKind.ImportDeclaration &&
|
27
|
-
node.kind !== ts.SyntaxKind.ExportDeclaration)
|
28
|
-
return;
|
18
|
+
const tsConfig = tsConfigFilePath !== undefined ? JSON.parse(readFileSync(tsConfigFilePath, 'utf-8').toString()) : undefined;
|
19
|
+
const fileNode = ts.createSourceFile(sourceFilePath, readFileSync(sourceFilePath, 'utf-8').toString(), ts.ScriptTarget.ES2022, /*setParentNodes */ true);
|
20
|
+
const parseChild = (node)=>{
|
21
|
+
if (node.kind !== ts.SyntaxKind.ImportDeclaration && node.kind !== ts.SyntaxKind.ExportDeclaration) return;
|
29
22
|
const moduleSpecifier = node.moduleSpecifier.text;
|
30
23
|
const { resolvedPath: file } = resolve({
|
31
24
|
moduleSpecifier,
|
32
25
|
sourceFilePath,
|
33
26
|
tsConfigFilePath,
|
34
27
|
tsConfig,
|
35
|
-
importsSubpathPatterns
|
28
|
+
importsSubpathPatterns
|
36
29
|
});
|
37
30
|
try {
|
38
31
|
const s = statSync(file);
|
39
|
-
if (!s.isDirectory())
|
40
|
-
|
41
|
-
}
|
42
|
-
catch {
|
32
|
+
if (!s.isDirectory()) dependencies.push(file);
|
33
|
+
} catch {
|
43
34
|
// Module or file not found
|
44
35
|
visited.push(file);
|
45
36
|
packages.add(moduleSpecifier);
|
@@ -47,14 +38,14 @@ export const findDependencies = (args) => {
|
|
47
38
|
};
|
48
39
|
ts.forEachChild(fileNode, parseChild);
|
49
40
|
visited.push(sourceFilePath);
|
50
|
-
for (const file of dependencies)
|
41
|
+
for (const file of dependencies){
|
51
42
|
findDependencies({
|
52
43
|
sourceFilePath: file,
|
53
44
|
imports: dependencies,
|
54
45
|
visited,
|
55
46
|
tsConfigFilePath,
|
56
47
|
importsSubpathPatterns,
|
57
|
-
packages
|
48
|
+
packages
|
58
49
|
});
|
59
50
|
}
|
60
51
|
return {
|
@@ -62,71 +53,53 @@ export const findDependencies = (args) => {
|
|
62
53
|
importsSubpathPatterns,
|
63
54
|
packages: new Set([
|
64
55
|
...packages.difference(new Set([
|
65
|
-
'aws-lambda'
|
66
|
-
]))
|
67
|
-
]
|
68
|
-
.filter((p) => !p.startsWith('node:'))
|
69
|
-
.filter((p) => !p.startsWith('@aws-sdk/'))
|
70
|
-
.map((d) => {
|
56
|
+
'aws-lambda'
|
57
|
+
]))
|
58
|
+
].filter((p)=>!p.startsWith('node:')).filter((p)=>!p.startsWith('@aws-sdk/')).map((d)=>{
|
71
59
|
if (d.startsWith('@')) {
|
72
60
|
const [org, packageName] = d.split('/');
|
73
61
|
return `${org}/${packageName}`;
|
74
62
|
}
|
75
63
|
return d.split('/')[0];
|
76
|
-
}))
|
64
|
+
}))
|
77
65
|
};
|
78
66
|
};
|
79
|
-
const resolve = ({ moduleSpecifier, sourceFilePath, tsConfigFilePath, tsConfig, importsSubpathPatterns
|
80
|
-
if (moduleSpecifier.startsWith('.'))
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
// See https://nodejs.org/api/esm.html#mandatory-file-extensions
|
86
|
-
// So we need to replace the `.js` in the import specification to find the TypeScript source for the file.
|
87
|
-
// Example: import { Network, notifyClients } from './notifyClients.js'
|
88
|
-
// The source file for that is actually in './notifyClients.ts'
|
89
|
-
.replace(/\.js$/, '.ts'),
|
90
|
-
};
|
91
|
-
if (tsConfigFilePath !== undefined &&
|
92
|
-
tsConfig?.compilerOptions?.paths !== undefined) {
|
93
|
-
for (const [key, value] of Object.entries(tsConfig.compilerOptions.paths)) {
|
67
|
+
const resolve = ({ moduleSpecifier, sourceFilePath, tsConfigFilePath, tsConfig, importsSubpathPatterns })=>{
|
68
|
+
if (moduleSpecifier.startsWith('.')) return {
|
69
|
+
resolvedPath: path.resolve(path.parse(sourceFilePath).dir, moduleSpecifier)
|
70
|
+
};
|
71
|
+
if (tsConfigFilePath !== undefined && tsConfig?.compilerOptions?.paths !== undefined) {
|
72
|
+
for (const [key, value] of Object.entries(tsConfig.compilerOptions.paths)){
|
94
73
|
const [resolvedPath] = value;
|
95
|
-
if (resolvedPath === undefined)
|
96
|
-
continue;
|
74
|
+
if (resolvedPath === undefined) continue;
|
97
75
|
// Exact match
|
98
76
|
if (moduleSpecifier === key) {
|
99
77
|
const fullResolvedPath = path.join(path.parse(tsConfigFilePath).dir, tsConfig.compilerOptions.baseUrl, resolvedPath);
|
100
78
|
importsSubpathPatterns[key] = [
|
101
79
|
tsConfig.compilerOptions.baseUrl,
|
102
80
|
path.sep,
|
103
|
-
resolvedPath
|
81
|
+
resolvedPath
|
104
82
|
].join('');
|
105
83
|
return {
|
106
|
-
resolvedPath: fullResolvedPath
|
84
|
+
resolvedPath: fullResolvedPath
|
107
85
|
};
|
108
86
|
}
|
109
87
|
// Wildcard match
|
110
|
-
if (!key.includes('*'))
|
111
|
-
continue;
|
88
|
+
if (!key.includes('*')) continue;
|
112
89
|
const rx = new RegExp(`^${key.replace('*', '(?<wildcard>.*)')}`);
|
113
90
|
const maybeMatch = rx.exec(moduleSpecifier);
|
114
|
-
if (maybeMatch?.groups?.wildcard === undefined)
|
115
|
-
continue;
|
91
|
+
if (maybeMatch?.groups?.wildcard === undefined) continue;
|
116
92
|
importsSubpathPatterns[key] = [
|
117
93
|
tsConfig.compilerOptions.baseUrl,
|
118
94
|
path.sep,
|
119
|
-
resolvedPath
|
95
|
+
resolvedPath
|
120
96
|
].join('');
|
121
97
|
return {
|
122
|
-
resolvedPath: path
|
123
|
-
.resolve(path.parse(tsConfigFilePath).dir, tsConfig.compilerOptions.baseUrl, resolvedPath.replace('*', maybeMatch.groups.wildcard))
|
124
|
-
// Same as above, replace `.js` with `.ts`
|
125
|
-
.replace(/\.js$/, '.ts'),
|
98
|
+
resolvedPath: path.resolve(path.parse(tsConfigFilePath).dir, tsConfig.compilerOptions.baseUrl, resolvedPath.replace('*', maybeMatch.groups.wildcard))
|
126
99
|
};
|
127
100
|
}
|
128
101
|
}
|
129
102
|
return {
|
130
|
-
resolvedPath: moduleSpecifier
|
103
|
+
resolvedPath: moduleSpecifier
|
131
104
|
};
|
132
105
|
};
|
@@ -4,10 +4,10 @@ import { describe, it } from 'node:test';
|
|
4
4
|
import { URL } from 'node:url';
|
5
5
|
import { findDependencies } from './findDependencies.js';
|
6
6
|
const __dirname = new URL('.', import.meta.url).pathname;
|
7
|
-
void describe('findDependencies()', ()
|
8
|
-
void it('should return a list of external dependencies', ()
|
7
|
+
void describe('findDependencies()', ()=>{
|
8
|
+
void it('should return a list of external dependencies', ()=>{
|
9
9
|
const { packages } = findDependencies({
|
10
|
-
sourceFilePath: path.join(__dirname, '..', 'cdk', 'lambda.ts')
|
10
|
+
sourceFilePath: path.join(__dirname, '..', 'cdk', 'lambda.ts')
|
11
11
|
});
|
12
12
|
assert.equal(packages.has('id128'), true, "Should include the 'id128' package");
|
13
13
|
assert.equal(packages.has('aws-lambda'), false, "Should not include the type-only 'aws-lambda' package");
|
@@ -15,23 +15,23 @@ void describe('findDependencies()', () => {
|
|
15
15
|
assert.equal(packages.has('fp-ts'), true, 'Should include the top-level package only');
|
16
16
|
assert.equal(packages.has('@aws-sdk/client-dynamodb'), false, 'Should not include AWS SDK packages');
|
17
17
|
});
|
18
|
-
void it('should honor tsconfig.json paths', ()
|
18
|
+
void it('should honor tsconfig.json paths', ()=>{
|
19
19
|
const { dependencies } = findDependencies({
|
20
20
|
sourceFilePath: path.join(__dirname, 'test-data', 'resolve-paths', 'lambda.ts'),
|
21
|
-
tsConfigFilePath: path.join(__dirname, 'test-data', 'resolve-paths', 'tsconfig.json')
|
21
|
+
tsConfigFilePath: path.join(__dirname, 'test-data', 'resolve-paths', 'tsconfig.json')
|
22
22
|
});
|
23
23
|
assert.equal(dependencies.includes(path.join(__dirname, 'test-data', 'resolve-paths', 'foo', 'index.ts')), true, 'Should include the index.ts file');
|
24
24
|
assert.equal(dependencies.includes(path.join(__dirname, 'test-data', 'resolve-paths', 'foo', '1.ts')), true, 'Should include the module referenced in the index.ts file');
|
25
25
|
assert.equal(dependencies.includes(path.join(__dirname, 'test-data', 'resolve-paths', 'foo', '2.ts')), true, 'Should include the module file');
|
26
26
|
});
|
27
|
-
void it('should return an import map', ()
|
27
|
+
void it('should return an import map', ()=>{
|
28
28
|
const { importsSubpathPatterns } = findDependencies({
|
29
29
|
sourceFilePath: path.join(__dirname, 'test-data', 'resolve-paths', 'lambda.ts'),
|
30
|
-
tsConfigFilePath: path.join(__dirname, 'test-data', 'resolve-paths', 'tsconfig.json')
|
30
|
+
tsConfigFilePath: path.join(__dirname, 'test-data', 'resolve-paths', 'tsconfig.json')
|
31
31
|
});
|
32
32
|
assert.deepEqual(importsSubpathPatterns, {
|
33
|
-
'#foo': './foo/index.
|
34
|
-
'#foo/*': './foo/*'
|
33
|
+
'#foo': './foo/index.ts',
|
34
|
+
'#foo/*': './foo/*'
|
35
35
|
});
|
36
36
|
});
|
37
37
|
});
|
package/dist/src/isTest.js
CHANGED
@@ -1 +1 @@
|
|
1
|
-
export const isTest = (construct)
|
1
|
+
export const isTest = (construct)=>construct.node.tryGetContext('isTest') === true;
|
package/dist/src/isTest.spec.js
CHANGED
@@ -1,12 +1,12 @@
|
|
1
1
|
import assert from 'node:assert';
|
2
2
|
import { describe, it, mock } from 'node:test';
|
3
3
|
import { isTest } from './isTest.js';
|
4
|
-
void describe('isTest()', ()
|
5
|
-
void it('should return true if the construct is a test', ()
|
4
|
+
void describe('isTest()', ()=>{
|
5
|
+
void it('should return true if the construct is a test', ()=>{
|
6
6
|
const construct = {
|
7
7
|
node: {
|
8
|
-
tryGetContext: mock.fn(()
|
9
|
-
}
|
8
|
+
tryGetContext: mock.fn(()=>true)
|
9
|
+
}
|
10
10
|
};
|
11
11
|
const result = isTest(construct);
|
12
12
|
assert.equal(result, true);
|
package/dist/src/lambda.d.ts
CHANGED
@@ -1,2 +1,2 @@
|
|
1
|
-
export * from './packLambda.
|
2
|
-
export * from './packLambdaFromPath.
|
1
|
+
export * from './packLambda.ts';
|
2
|
+
export * from './packLambdaFromPath.ts';
|
package/dist/src/layer.d.ts
CHANGED
@@ -1 +1 @@
|
|
1
|
-
export * from './packLayer.
|
1
|
+
export * from './packLayer.ts';
|
@@ -6,14 +6,14 @@ import { describe, it } from 'node:test';
|
|
6
6
|
import { fileURLToPath } from 'node:url';
|
7
7
|
import { ImportFromFolderNameError, packLambda } from './packLambda.js';
|
8
8
|
const tmpDir = os.tmpdir();
|
9
|
-
void describe('packLambda()', ()
|
9
|
+
void describe('packLambda()', ()=>{
|
10
10
|
// See https://github.com/aws/aws-lambda-nodejs-runtime-interface-client/issues/93#issuecomment-2042201321
|
11
|
-
void it('should fail if it imports from a folder on the same level that has the same name as the handler module', async ()
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
void it('should not fail if it a folder with the same name is on a different level', async ()
|
16
|
-
|
17
|
-
|
18
|
-
|
11
|
+
void it('should fail if it imports from a folder on the same level that has the same name as the handler module', async ()=>assert.rejects(async ()=>packLambda({
|
12
|
+
sourceFilePath: path.join(dirname(fileURLToPath(import.meta.url)), 'test-data', 'module-folder-named-like-handler-bug', 'same-level', 'acme.ts'),
|
13
|
+
zipFilePath: path.join(await fs.mkdtemp(`${tmpDir}${path.sep}`), 'acme.zip')
|
14
|
+
}), ImportFromFolderNameError));
|
15
|
+
void it('should not fail if it a folder with the same name is on a different level', async ()=>assert.doesNotReject(async ()=>packLambda({
|
16
|
+
sourceFilePath: path.join(dirname(fileURLToPath(import.meta.url)), 'test-data', 'module-folder-named-like-handler-bug', 'different-level', 'lambda', 'acme.ts'),
|
17
|
+
zipFilePath: path.join(await fs.mkdtemp(`${tmpDir}${path.sep}`), 'acme.zip')
|
18
|
+
})));
|
19
19
|
});
|
package/dist/src/packLambda.js
CHANGED
@@ -7,30 +7,33 @@ import yazl from 'yazl';
|
|
7
7
|
import { checkSumOfFiles } from './checksumOfFiles.js';
|
8
8
|
import { commonParent } from './commonParent.js';
|
9
9
|
import { findDependencies } from './findDependencies.js';
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
.join('/')
|
17
|
-
// Replace leading slash
|
10
|
+
import { updateImports } from './updateImports.js';
|
11
|
+
const removeCommonAncestor = (parentDir)=>(filePath)=>{
|
12
|
+
const p = parse(filePath);
|
13
|
+
const jsFileName = [
|
14
|
+
p.dir.replace(parentDir.slice(0, parentDir.length - 1), ''),
|
15
|
+
`${p.name}.js`
|
16
|
+
].join('/')// Replace leading slash
|
18
17
|
.replace(/^\//, '');
|
19
|
-
|
20
|
-
};
|
18
|
+
return jsFileName;
|
19
|
+
};
|
21
20
|
/**
|
22
21
|
* In the bundle we only include code that's not in the layer.
|
23
|
-
*/
|
24
|
-
|
25
|
-
const { dependencies: deps, importsSubpathPatterns, packages, } = findDependencies({
|
22
|
+
*/ export const packLambda = async ({ sourceFilePath, zipFilePath, tsConfigFilePath, debug, progress })=>{
|
23
|
+
const { dependencies: deps, importsSubpathPatterns, packages } = findDependencies({
|
26
24
|
sourceFilePath,
|
27
|
-
tsConfigFilePath
|
25
|
+
tsConfigFilePath
|
28
26
|
});
|
29
|
-
debug?.(`${sourceFilePath}: dependencies`, [
|
30
|
-
|
27
|
+
debug?.(`${sourceFilePath}: dependencies`, [
|
28
|
+
...packages
|
29
|
+
].join(', '));
|
30
|
+
Object.entries(importsSubpathPatterns).forEach(([k, v])=>{
|
31
31
|
debug?.(`${sourceFilePath}:importsSubpathPattern`, `${k} -> ${v}`);
|
32
32
|
});
|
33
|
-
const lambdaFiles = [
|
33
|
+
const lambdaFiles = [
|
34
|
+
sourceFilePath,
|
35
|
+
...deps
|
36
|
+
];
|
34
37
|
const zipfile = new yazl.ZipFile();
|
35
38
|
const stripCommon = removeCommonAncestor(commonParent(lambdaFiles));
|
36
39
|
const handler = stripCommon(sourceFilePath);
|
@@ -38,21 +41,21 @@ export const packLambda = async ({ sourceFilePath, zipFilePath, tsConfigFilePath
|
|
38
41
|
const handlerInfo = path.parse(handler);
|
39
42
|
const handlerName = handlerInfo.name;
|
40
43
|
const handlerDir = handlerInfo.dir;
|
41
|
-
const handlerDepsFromSameDirectory = deps
|
42
|
-
|
43
|
-
.filter((d) => handlerDir === '' ? true : d.startsWith(`${handlerDir}${path.sep}`));
|
44
|
-
const handlerDepsFolderNames = new Set(handlerDepsFromSameDirectory.map((s) => s.split('/')[0]));
|
44
|
+
const handlerDepsFromSameDirectory = deps.map(stripCommon).filter((d)=>handlerDir === '' ? true : d.startsWith(`${handlerDir}${path.sep}`));
|
45
|
+
const handlerDepsFolderNames = new Set(handlerDepsFromSameDirectory.map((s)=>s.split('/')[0]));
|
45
46
|
if (handlerDepsFolderNames.has(handlerName)) {
|
46
47
|
throw new ImportFromFolderNameError(handlerName);
|
47
48
|
}
|
48
49
|
// Compile files
|
49
|
-
for (const file of lambdaFiles)
|
50
|
-
|
50
|
+
for (const file of lambdaFiles){
|
51
|
+
let compiled = (await swc.transformFile(file, {
|
51
52
|
jsc: {
|
52
|
-
target: 'es2022'
|
53
|
-
}
|
53
|
+
target: 'es2022'
|
54
|
+
}
|
54
55
|
})).code;
|
55
56
|
debug?.(`compiled`, compiled);
|
57
|
+
compiled = updateImports(compiled);
|
58
|
+
debug?.(`converted imports`, compiled);
|
56
59
|
const jsFileName = stripCommon(file);
|
57
60
|
zipfile.addBuffer(Buffer.from(compiled, 'utf-8'), jsFileName);
|
58
61
|
progress?.(`added`, jsFileName);
|
@@ -60,34 +63,40 @@ export const packLambda = async ({ sourceFilePath, zipFilePath, tsConfigFilePath
|
|
60
63
|
const hash = await checkSumOfFiles([
|
61
64
|
...lambdaFiles,
|
62
65
|
// Include this script, so artefact is updated if the way it's built is changed
|
63
|
-
fileURLToPath(import.meta.url)
|
66
|
+
fileURLToPath(import.meta.url)
|
64
67
|
]);
|
65
68
|
// Mark it as ES module
|
66
69
|
zipfile.addBuffer(Buffer.from(JSON.stringify({
|
67
70
|
type: 'module',
|
68
|
-
imports: importsSubpathPatterns,
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
71
|
+
imports: Object.fromEntries(Object.entries(importsSubpathPatterns).map(([k, v])=>[
|
72
|
+
k,
|
73
|
+
v.replace(/\.ts$/, '.js')
|
74
|
+
])),
|
75
|
+
dependencies: Object.fromEntries([
|
76
|
+
...packages.values()
|
77
|
+
].sort((a, b)=>a.localeCompare(b)).map((pkg)=>[
|
78
|
+
pkg,
|
79
|
+
'*'
|
80
|
+
]))
|
81
|
+
}, null, 2), 'utf-8'), 'package.json');
|
73
82
|
progress?.(`added`, 'package.json');
|
74
|
-
await new Promise((resolve)
|
75
|
-
zipfile.outputStream
|
76
|
-
.pipe(createWriteStream(zipFilePath))
|
77
|
-
.on('close', () => {
|
83
|
+
await new Promise((resolve)=>{
|
84
|
+
zipfile.outputStream.pipe(createWriteStream(zipFilePath)).on('close', ()=>{
|
78
85
|
resolve();
|
79
86
|
});
|
80
87
|
zipfile.end();
|
81
88
|
});
|
82
89
|
progress?.(`written`, zipFilePath);
|
83
|
-
return {
|
90
|
+
return {
|
91
|
+
handler: stripCommon(sourceFilePath),
|
92
|
+
hash
|
93
|
+
};
|
84
94
|
};
|
85
95
|
/**
|
86
96
|
* @see https://github.com/aws/aws-lambda-nodejs-runtime-interface-client/issues/93#issuecomment-2042201321
|
87
|
-
*/
|
88
|
-
export class ImportFromFolderNameError extends Error {
|
97
|
+
*/ export class ImportFromFolderNameError extends Error {
|
89
98
|
folderName;
|
90
|
-
constructor(folderName)
|
99
|
+
constructor(folderName){
|
91
100
|
super(`Import from folder with same name as handler ("${folderName}") not allowed!`);
|
92
101
|
this.name = 'ImportFromFolderNameError';
|
93
102
|
this.folderName = folderName;
|
@@ -0,0 +1 @@
|
|
1
|
+
export {};
|
@@ -0,0 +1,28 @@
|
|
1
|
+
import assert from 'node:assert';
|
2
|
+
import fs from 'node:fs/promises';
|
3
|
+
import os from 'node:os';
|
4
|
+
import path, { dirname } from 'node:path';
|
5
|
+
import { describe, it } from 'node:test';
|
6
|
+
import { fileURLToPath } from 'node:url';
|
7
|
+
import { packLambda } from './packLambda.js';
|
8
|
+
import { getFileFromZip } from './test/getFileFromZip.js';
|
9
|
+
const tmpDir = os.tmpdir();
|
10
|
+
await describe('packLambda()', async ()=>{
|
11
|
+
await it('should convert .ts imports to .js', async ()=>{
|
12
|
+
const zipFilePath = path.join(await fs.mkdtemp(`${tmpDir}${path.sep}`), 'resolve-paths.zip');
|
13
|
+
await packLambda({
|
14
|
+
sourceFilePath: path.join(dirname(fileURLToPath(import.meta.url)), 'test-data', 'resolve-paths', 'lambda.ts'),
|
15
|
+
tsConfigFilePath: path.join(dirname(fileURLToPath(import.meta.url)), 'test-data', 'resolve-paths', 'tsconfig.json'),
|
16
|
+
zipFilePath
|
17
|
+
});
|
18
|
+
const handler = await getFileFromZip(zipFilePath, 'lambda.js');
|
19
|
+
assert.equal(handler.includes(`import { foo2 } from '#foo/2.js'`), true, 'The import should be converted to .js');
|
20
|
+
await assert.doesNotReject(getFileFromZip(zipFilePath, 'foo/2.js'), 'The ZIP file should contain the imported file');
|
21
|
+
const pJson = JSON.parse(await getFileFromZip(zipFilePath, 'package.json'));
|
22
|
+
assert.equal(pJson.type, 'module', 'The package.json should contain "type": "module"');
|
23
|
+
assert.deepEqual(pJson.imports, {
|
24
|
+
'#foo': './foo/index.js',
|
25
|
+
'#foo/*': './foo/*'
|
26
|
+
}, 'The package.json should contain the imports');
|
27
|
+
});
|
28
|
+
});
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { type PackedLambda } from './packLambda.
|
1
|
+
import { type PackedLambda } from './packLambda.ts';
|
2
2
|
export declare const packLambdaFromPath: ({ id, sourceFilePath, handlerFunction: handlerFunctionArg, baseDir: baseDirArg, distDir: distDirArg, tsConfigFilePath, debug, progress, }: {
|
3
3
|
id: string;
|
4
4
|
sourceFilePath: string;
|
@@ -1,17 +1,16 @@
|
|
1
1
|
import { mkdir } from 'node:fs/promises';
|
2
2
|
import path from 'node:path';
|
3
3
|
import { packLambda } from './packLambda.js';
|
4
|
-
export const packLambdaFromPath = async ({ id, sourceFilePath, handlerFunction: handlerFunctionArg, baseDir: baseDirArg, distDir: distDirArg, tsConfigFilePath, debug, progress
|
4
|
+
export const packLambdaFromPath = async ({ id, sourceFilePath, handlerFunction: handlerFunctionArg, baseDir: baseDirArg, distDir: distDirArg, tsConfigFilePath, debug, progress })=>{
|
5
5
|
const distDir = distDirArg ?? path.join(process.cwd(), 'dist', 'lambdas');
|
6
6
|
const baseDir = baseDirArg ?? process.cwd();
|
7
7
|
const handlerFunction = handlerFunctionArg ?? 'handler';
|
8
8
|
try {
|
9
9
|
await mkdir(distDir, {
|
10
|
-
recursive: true
|
10
|
+
recursive: true
|
11
11
|
});
|
12
|
-
}
|
13
|
-
|
14
|
-
// Directory exists
|
12
|
+
} catch {
|
13
|
+
// Directory exists
|
15
14
|
}
|
16
15
|
const zipFile = path.join(distDir, `${id}.zip`);
|
17
16
|
const { handler, hash } = await packLambda({
|
@@ -19,12 +18,12 @@ export const packLambdaFromPath = async ({ id, sourceFilePath, handlerFunction:
|
|
19
18
|
zipFilePath: zipFile,
|
20
19
|
tsConfigFilePath,
|
21
20
|
debug,
|
22
|
-
progress
|
21
|
+
progress
|
23
22
|
});
|
24
23
|
return {
|
25
24
|
id,
|
26
25
|
zipFilePath: zipFile,
|
27
26
|
handler: handler.replace('.js', `.${handlerFunction}`),
|
28
|
-
hash
|
27
|
+
hash
|
29
28
|
};
|
30
29
|
};
|
package/dist/src/packLayer.js
CHANGED
@@ -6,7 +6,7 @@ import { fileURLToPath } from 'node:url';
|
|
6
6
|
import path from 'path';
|
7
7
|
import { ZipFile } from 'yazl';
|
8
8
|
import { checkSumOfFiles, checkSumOfStrings } from './checksumOfFiles.js';
|
9
|
-
export const packLayer = async ({ id, dependencies, baseDir, distDir, installCommand
|
9
|
+
export const packLayer = async ({ id, dependencies, baseDir, distDir, installCommand })=>{
|
10
10
|
const base = baseDir ?? process.cwd();
|
11
11
|
const dist = distDir ?? path.join(base, 'dist', 'layers');
|
12
12
|
const packageJsonFile = path.join(base, 'package.json');
|
@@ -15,28 +15,30 @@ export const packLayer = async ({ id, dependencies, baseDir, distDir, installCom
|
|
15
15
|
const layerDir = path.join(dist, id);
|
16
16
|
const nodejsDir = path.join(layerDir, 'nodejs');
|
17
17
|
try {
|
18
|
-
await rm(layerDir, {
|
19
|
-
|
20
|
-
|
21
|
-
|
18
|
+
await rm(layerDir, {
|
19
|
+
recursive: true
|
20
|
+
});
|
21
|
+
} catch {
|
22
|
+
// Folder does not exist.
|
22
23
|
}
|
23
|
-
await mkdir(nodejsDir, {
|
24
|
-
|
24
|
+
await mkdir(nodejsDir, {
|
25
|
+
recursive: true
|
26
|
+
});
|
27
|
+
const depsToBeInstalled = dependencies.reduce((resolved, dep)=>{
|
25
28
|
const resolvedDependency = deps[dep] ?? devDeps[dep];
|
26
|
-
if (resolvedDependency === undefined)
|
27
|
-
throw new Error(`Could not resolve dependency "${dep}" in ${packageJsonFile}!`);
|
29
|
+
if (resolvedDependency === undefined) throw new Error(`Could not resolve dependency "${dep}" in ${packageJsonFile}!`);
|
28
30
|
return {
|
29
31
|
...resolved,
|
30
|
-
[dep]: resolvedDependency
|
32
|
+
[dep]: resolvedDependency
|
31
33
|
};
|
32
34
|
}, {});
|
33
35
|
const checkSumFiles = [
|
34
36
|
// Include this script, so artefact is updated if the way it's built is changed
|
35
|
-
fileURLToPath(import.meta.url)
|
37
|
+
fileURLToPath(import.meta.url)
|
36
38
|
];
|
37
39
|
const packageJSON = path.join(nodejsDir, 'package.json');
|
38
40
|
await writeFile(packageJSON, JSON.stringify({
|
39
|
-
dependencies: depsToBeInstalled
|
41
|
+
dependencies: depsToBeInstalled
|
40
42
|
}), 'utf-8');
|
41
43
|
checkSumFiles.push(packageJSON);
|
42
44
|
let hasLockFile = true;
|
@@ -46,26 +48,25 @@ export const packLayer = async ({ id, dependencies, baseDir, distDir, installCom
|
|
46
48
|
const packageLock = path.join(nodejsDir, 'package-lock.json');
|
47
49
|
await copyFile(packageLockJsonFile, packageLock);
|
48
50
|
checkSumFiles.push(packageLock);
|
49
|
-
}
|
50
|
-
catch {
|
51
|
+
} catch {
|
51
52
|
hasLockFile = false;
|
52
|
-
|
53
|
+
// pass
|
53
54
|
}
|
54
|
-
await new Promise((resolve, reject)
|
55
|
+
await new Promise((resolve, reject)=>{
|
55
56
|
const [cmd, ...args] = installCommand?.({
|
56
57
|
packageFilePath: packageJSON,
|
57
|
-
packageLockFilePath: packageLockJsonFile
|
58
|
+
packageLockFilePath: packageLockJsonFile
|
58
59
|
}) ?? [
|
59
60
|
'npm',
|
60
61
|
hasLockFile ? 'ci' : 'i',
|
61
62
|
'--ignore-scripts',
|
62
63
|
'--only=prod',
|
63
|
-
'--no-audit'
|
64
|
+
'--no-audit'
|
64
65
|
];
|
65
66
|
const p = spawn(cmd, args, {
|
66
|
-
cwd: nodejsDir
|
67
|
+
cwd: nodejsDir
|
67
68
|
});
|
68
|
-
p.on('close', (code)
|
69
|
+
p.on('close', (code)=>{
|
69
70
|
if (code !== 0) {
|
70
71
|
const msg = `${cmd} ${args.join(' ')} in ${nodejsDir} exited with code ${code}.`;
|
71
72
|
return reject(new Error(msg));
|
@@ -75,17 +76,15 @@ export const packLayer = async ({ id, dependencies, baseDir, distDir, installCom
|
|
75
76
|
});
|
76
77
|
const filesToAdd = await glob(`**`, {
|
77
78
|
cwd: layerDir,
|
78
|
-
nodir: true
|
79
|
+
nodir: true
|
79
80
|
});
|
80
81
|
const zipfile = new ZipFile();
|
81
|
-
filesToAdd.forEach((f)
|
82
|
+
filesToAdd.forEach((f)=>{
|
82
83
|
zipfile.addFile(path.join(layerDir, f), f);
|
83
84
|
});
|
84
|
-
const zipFileName = await new Promise((resolve)
|
85
|
+
const zipFileName = await new Promise((resolve)=>{
|
85
86
|
const zipFileName = path.join(dist, `${id}.zip`);
|
86
|
-
zipfile.outputStream
|
87
|
-
.pipe(createWriteStream(zipFileName))
|
88
|
-
.on('close', () => {
|
87
|
+
zipfile.outputStream.pipe(createWriteStream(zipFileName)).on('close', ()=>{
|
89
88
|
resolve(zipFileName);
|
90
89
|
});
|
91
90
|
zipfile.end();
|
@@ -94,7 +93,7 @@ export const packLayer = async ({ id, dependencies, baseDir, distDir, installCom
|
|
94
93
|
layerZipFilePath: zipFileName,
|
95
94
|
hash: checkSumOfStrings([
|
96
95
|
JSON.stringify(dependencies),
|
97
|
-
await checkSumOfFiles(checkSumFiles)
|
98
|
-
])
|
96
|
+
await checkSumOfFiles(checkSumFiles)
|
97
|
+
])
|
99
98
|
};
|
100
99
|
};
|
@@ -0,0 +1 @@
|
|
1
|
+
export declare const getFileFromZip: (zipFilePath: string, filename: string) => Promise<string>;
|
@@ -0,0 +1,20 @@
|
|
1
|
+
import fs from 'node:fs';
|
2
|
+
import unzip from 'unzip-stream';
|
3
|
+
export const getFileFromZip = async (zipFilePath, filename)=>{
|
4
|
+
let found = false;
|
5
|
+
let content = '';
|
6
|
+
await new Promise((resolve, reject)=>{
|
7
|
+
fs.createReadStream(zipFilePath).pipe(unzip.Parse()).on('entry', (entry)=>{
|
8
|
+
if (entry.path === filename) {
|
9
|
+
found = true;
|
10
|
+
entry.on('data', (data)=>content += data);
|
11
|
+
} else {
|
12
|
+
entry.autodrain();
|
13
|
+
}
|
14
|
+
}).on('error', reject).on('end', resolve);
|
15
|
+
});
|
16
|
+
if (!found) {
|
17
|
+
throw new Error(`File not found in zip: ${filename}`);
|
18
|
+
}
|
19
|
+
return content;
|
20
|
+
};
|
@@ -0,0 +1 @@
|
|
1
|
+
export const hello = ()=>'Hello World!';
|
@@ -0,0 +1 @@
|
|
1
|
+
export const hello = ()=>'Hello World!';
|
@@ -1 +1 @@
|
|
1
|
-
export const foo = ()
|
1
|
+
export const foo = ()=>42;
|
@@ -1 +1 @@
|
|
1
|
-
export const foo2 = ()
|
1
|
+
export const foo2 = ()=>17;
|
@@ -0,0 +1 @@
|
|
1
|
+
export declare const updateImports: (source: string) => string;
|
@@ -0,0 +1 @@
|
|
1
|
+
export const updateImports = (source)=>source.replace(/from ['"](.+?)\.ts['"]/g, "from '$1.js'");
|
@@ -0,0 +1 @@
|
|
1
|
+
export {};
|
@@ -0,0 +1,10 @@
|
|
1
|
+
import assert from 'node:assert';
|
2
|
+
import { describe, it } from 'node:test';
|
3
|
+
import { updateImports } from './updateImports.js';
|
4
|
+
void describe('updateImports', ()=>{
|
5
|
+
void it('replaces .ts with .js in relative imports', ()=>{
|
6
|
+
const input = `import { foo } from './bar.js';`;
|
7
|
+
const expected = `import { foo } from './bar.js';`;
|
8
|
+
assert.equal(updateImports(input), expected);
|
9
|
+
});
|
10
|
+
});
|
package/dist/src/util.d.ts
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
export * from './checksumOfFiles.
|
2
|
-
export * from './commonParent.
|
3
|
-
export * from './findDependencies.
|
4
|
-
export * from './isTest.
|
1
|
+
export * from './checksumOfFiles.ts';
|
2
|
+
export * from './commonParent.ts';
|
3
|
+
export * from './findDependencies.ts';
|
4
|
+
export * from './isTest.ts';
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@bifravst/aws-cdk-lambda-helpers",
|
3
|
-
"version": "
|
3
|
+
"version": "3.0.1",
|
4
4
|
"description": "Helper functions which simplify working with TypeScript lambdas for AWS CDK.",
|
5
5
|
"exports": {
|
6
6
|
".": {
|
@@ -30,9 +30,10 @@
|
|
30
30
|
},
|
31
31
|
"type": "module",
|
32
32
|
"scripts": {
|
33
|
-
"test": "
|
33
|
+
"test": "node --experimental-strip-types --no-warnings --test ./src/*.spec.ts",
|
34
|
+
"test:e2e": "node --experimental-strip-types --test e2e.spec.ts",
|
34
35
|
"prepare": "husky",
|
35
|
-
"prepublishOnly": "npx tsc
|
36
|
+
"prepublishOnly": "node --experimental-strip-types npm-compile.ts && npx tsc -P tsconfig.npm.json --outDir ./dist/src"
|
36
37
|
},
|
37
38
|
"repository": {
|
38
39
|
"type": "git",
|
@@ -58,15 +59,17 @@
|
|
58
59
|
"@bifravst/from-env": "3.0.2",
|
59
60
|
"@bifravst/prettier-config": "1.1.4",
|
60
61
|
"@commitlint/config-conventional": "19.6.0",
|
62
|
+
"@swc/cli": "0.5.2",
|
61
63
|
"@types/aws-lambda": "8.10.146",
|
62
64
|
"@types/node": "22.10.2",
|
65
|
+
"@types/unzip-stream": "0.3.4",
|
63
66
|
"@types/yazl": "2.4.5",
|
64
67
|
"cdk": "2.173.4",
|
65
68
|
"commitlint": "19.6.1",
|
66
69
|
"husky": "9.1.7",
|
67
70
|
"id128": "1.6.6",
|
68
71
|
"lint-staged": "15.3.0",
|
69
|
-
"
|
72
|
+
"unzip-stream": "0.3.4"
|
70
73
|
},
|
71
74
|
"lint-staged": {
|
72
75
|
"*.ts": [
|
@@ -78,7 +81,7 @@
|
|
78
81
|
]
|
79
82
|
},
|
80
83
|
"engines": {
|
81
|
-
"node": ">=22",
|
84
|
+
"node": ">=22.7.0",
|
82
85
|
"npm": ">=10"
|
83
86
|
},
|
84
87
|
"release": {
|
@@ -1 +0,0 @@
|
|
1
|
-
export declare const foo: () => number;
|
@@ -1 +0,0 @@
|
|
1
|
-
export declare const foo2: () => number;
|
@@ -1 +0,0 @@
|
|
1
|
-
export { foo } from './1.js';
|