declapract-typescript-ehmpathy 0.40.0 → 0.41.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/dist/practices/cicd-common/best-practice/.github/workflows/.test.yml +17 -11
  2. package/dist/practices/cicd-common/best-practice/.github/workflows/release.yml +2 -2
  3. package/dist/practices/cicd-common/best-practice/.github/workflows/review.yml +1 -1
  4. package/dist/practices/cicd-service/best-practice/.agent/repo=.this/skills/use.rds.capacity.sh +45 -0
  5. package/dist/practices/cicd-service/best-practice/.agent/repo=.this/skills/use.vpc.tunnel.ts +79 -0
  6. package/dist/practices/cicd-service/best-practice/.github/workflows/.deploy-sls.yml +8 -31
  7. package/dist/practices/cicd-service/best-practice/.github/workflows/.sql-schema-control.yml +10 -49
  8. package/dist/practices/cicd-service/best-practice/.github/workflows/.terraform.yml +5 -5
  9. package/dist/practices/cicd-service/best-practice/.github/workflows/provision.yml +0 -2
  10. package/dist/practices/cicd-service/best-practice/package.json +7 -0
  11. package/dist/practices/cicd-service/best-practice/package.json.declapract.ts +3 -0
  12. package/dist/practices/domain/best-practice/package.json +2 -2
  13. package/dist/practices/git/best-practice/.gitignore.declapract.ts +1 -0
  14. package/dist/practices/package-json-order/best-practice/package.json.declapract.ts +8 -6
  15. package/dist/practices/persist-with-dynamodb/best-practice/package.json +5 -4
  16. package/dist/practices/persist-with-rds/bad-practices/old-packagejson-script-names/package.json +9 -0
  17. package/dist/practices/persist-with-rds/bad-practices/old-packagejson-script-names/package.json.declapract.ts +22 -0
  18. package/dist/practices/persist-with-rds/best-practice/config/dev.json +21 -13
  19. package/dist/practices/persist-with-rds/best-practice/config/prod.json +21 -13
  20. package/dist/practices/persist-with-rds/best-practice/config/prod.json.declapract.ts +13 -1
  21. package/dist/practices/persist-with-rds/best-practice/config/test.json +18 -13
  22. package/dist/practices/persist-with-rds/best-practice/config/test.json.declapract.ts +13 -1
  23. package/dist/practices/persist-with-rds/best-practice/package.json +8 -6
  24. package/dist/practices/persist-with-rds/best-practice/provision/schema/connection.config.js +10 -11
  25. package/dist/practices/persist-with-rds/best-practice/provision/schema/deploy.database.sh +1 -1
  26. package/dist/practices/persist-with-rds/best-practice/src/utils/config/Config.ts +26 -0
  27. package/dist/practices/persist-with-rds/best-practice/src/utils/config/Config.ts.declapract.ts +39 -0
  28. package/dist/practices/persist-with-rds/best-practice/src/utils/database/getDatabaseConnection.ts +30 -9
  29. package/dist/practices/provision-github/best-practice/package.json +2 -2
  30. package/dist/practices/tests/best-practice/package.json +1 -1
  31. package/dist/practices/typescript/best-practice/tsconfig.build.json +7 -6
  32. package/package.json +2 -2
@@ -36,7 +36,7 @@ jobs:
36
36
  - name: set node-version
37
37
  uses: actions/setup-node@v3
38
38
  with:
39
- node-version-file: '.nvmrc'
39
+ node-version-file: ".nvmrc"
40
40
 
41
41
  - name: get node-modules from cache
42
42
  uses: actions/cache/restore@v4
@@ -57,7 +57,7 @@ jobs:
57
57
  - name: set node-version
58
58
  uses: actions/setup-node@v3
59
59
  with:
60
- node-version-file: '.nvmrc'
60
+ node-version-file: ".nvmrc"
61
61
 
62
62
  - name: get node-modules from cache
63
63
  uses: actions/cache/restore@v4
@@ -78,7 +78,7 @@ jobs:
78
78
  - name: set node version
79
79
  uses: actions/setup-node@v3
80
80
  with:
81
- node-version-file: '.nvmrc'
81
+ node-version-file: ".nvmrc"
82
82
 
83
83
  - name: set terraform version
84
84
  uses: hashicorp/setup-terraform@v3
@@ -102,7 +102,7 @@ jobs:
102
102
  - name: set node-version
103
103
  uses: actions/setup-node@v3
104
104
  with:
105
- node-version-file: '.nvmrc'
105
+ node-version-file: ".nvmrc"
106
106
 
107
107
  - name: get node-modules from cache
108
108
  uses: actions/cache/restore@v4
@@ -123,7 +123,7 @@ jobs:
123
123
  - name: set node-version
124
124
  uses: actions/setup-node@v3
125
125
  with:
126
- node-version-file: '.nvmrc'
126
+ node-version-file: ".nvmrc"
127
127
 
128
128
  - name: get node-modules from cache
129
129
  uses: actions/cache/restore@v4
@@ -144,7 +144,7 @@ jobs:
144
144
  - name: set node-version
145
145
  uses: actions/setup-node@v3
146
146
  with:
147
- node-version-file: '.nvmrc'
147
+ node-version-file: ".nvmrc"
148
148
 
149
149
  - name: get node-modules from cache
150
150
  uses: actions/cache/restore@v4
@@ -168,8 +168,11 @@ jobs:
168
168
  && echo 'wrong aws account' && exit 1 \
169
169
  || echo 'correct aws account';
170
170
 
171
- - name: provision:integration-test-db
172
- run: npm run provision:integration-test-db --if-present
171
+ - name: start:testdb
172
+ run: npm run start:testdb --if-present
173
+
174
+ - name: start:livedb:dev
175
+ run: npm run start:livedb:dev --if-present
173
176
 
174
177
  - name: test:integration
175
178
  run: THOROUGH=true npm run test:integration
@@ -184,7 +187,7 @@ jobs:
184
187
  - name: set node-version
185
188
  uses: actions/setup-node@v3
186
189
  with:
187
- node-version-file: '.nvmrc'
190
+ node-version-file: ".nvmrc"
188
191
 
189
192
  - name: get node-modules from cache
190
193
  uses: actions/cache/restore@v4
@@ -208,8 +211,11 @@ jobs:
208
211
  && echo 'wrong aws account' && exit 1 \
209
212
  || echo 'correct aws account';
210
213
 
211
- - name: provision:integration-test-db
212
- run: npm run provision:integration-test-db --if-present
214
+ - name: start:testdb
215
+ run: npm run start:testdb --if-present
216
+
217
+ - name: start:livedb:dev
218
+ run: npm run start:livedb:dev --if-present
213
219
 
214
220
  - name: test:acceptance:locally
215
221
  run: npm run test:acceptance:locally
@@ -9,9 +9,9 @@ jobs:
9
9
  release-please:
10
10
  runs-on: ubuntu-24.04
11
11
  steps:
12
- - uses: google-github-actions/release-please-action@v3
12
+ - uses: google-github-actions/release-please-action@v3.7.6 # https://github.com/googleapis/release-please-action/issues/840
13
13
  with:
14
14
  token: ${{ secrets.RELEASE_PLEASE_GITHUB_TOKEN }}
15
15
  release-type: node
16
- pull-request-title-pattern: 'chore(release): v${version} 🎉'
16
+ pull-request-title-pattern: "chore(release): v${version} 🎉"
17
17
  changelog-path: changelog.md
@@ -12,7 +12,7 @@ permissions:
12
12
 
13
13
  jobs:
14
14
  pullreq-title:
15
- runs-on: ubuntu-latest
15
+ runs-on: ubuntu-24.04
16
16
  steps:
17
17
  - name: test:pullreq:title
18
18
  uses: amannn/action-semantic-pull-request@v5
@@ -0,0 +1,45 @@
1
+ #!/usr/bin/env bash
2
+ #
3
+ # SKILL: use.rds.capacity
4
+ #
5
+ # Ensures the RDS database has capacity and is ready to accept connections.
6
+ #
7
+ # What it does:
8
+ # 1. Opens a VPC tunnel to the database cluster (via use.vpc.tunnel)
9
+ # 2. Extracts the database host and port from the tunnel configuration
10
+ # 3. Polls the database until it responds (waking serverless RDS if paused)
11
+ #
12
+ # When to use:
13
+ # - Before running tests or migrations that need database access
14
+ # - When a serverless RDS instance may be paused and needs to be awakened
15
+ # - Any time you need to ensure the database is ready before proceeding
16
+ #
17
+ # Usage:
18
+ # STAGE=dev ./.agent/repo=.this/skills/use.rds.capacity.sh
19
+ #
20
+ # Prerequisites:
21
+ # - STAGE environment variable must be set
22
+ # - AWS credentials configured with SSM access
23
+ # - sudo access (for /etc/hosts modification via vpc tunnel)
24
+ # - pg_isready command available (postgresql-client)
25
+ #
26
+ set -eo pipefail
27
+
28
+ # failfast if STAGE is not declared
29
+ [[ -z "${STAGE:-}" ]] && echo "STAGE is not set" && exit 1
30
+
31
+ set -u
32
+
33
+ # ensure the dev tunnel is awake
34
+ .agent/repo=.this/skills/use.vpc.tunnel.ts
35
+
36
+ # ping until available
37
+ npx declastruct plan --wish .agent/repo=.this/skills/use.vpc.tunnel.ts --into .temp/tunnel.plan.json
38
+ DB_HOST=$(jq -r '.changes[] | select(.forResource.class == "DeclaredUnixHostAlias") | .state.desired.from' .temp/tunnel.plan.json)
39
+ DB_PORT=$(jq -r '.changes[] | select(.forResource.class == "DeclaredAwsVpcTunnel") | .state.desired.from.port' .temp/tunnel.plan.json)
40
+
41
+ # await for the database to have capacity (awakens serverless rds if paused)
42
+ echo "Awaiting database capacity at $DB_HOST:$DB_PORT..."
43
+ timeout 180 bash -c "until pg_isready -h $DB_HOST -p $DB_PORT; do sleep 5; done"
44
+ echo "Database is ready"
45
+
@@ -0,0 +1,79 @@
1
+ #!/bin/bash
2
+ //bin/true && exec npx declastruct apply --plan yolo --wish "$0"
3
+ //
4
+ // SKILL: use.vpc.tunnel
5
+ //
6
+ // Opens a secure VPC tunnel to the ahbodedb database cluster via AWS SSM.
7
+ //
8
+ // What it does:
9
+ // 1. Creates an SSM tunnel through the vpc-main-bastion to the database cluster
10
+ // 2. Binds the tunnel to localhost:$port
11
+ // 3. Adds a /etc/hosts alias so the database can be reached via a friendly hostname
12
+ //
13
+ // When to use:
14
+ // - Before acceptance tests that need remote database access
15
+ // - When you need to connect to the database locally
16
+ // - Any time local code needs to reach an RDS cluster in the VPC
17
+ //
18
+ // Usage:
19
+ // Direct execution: ./.agent/repo=.this/skills/use.vpc.tunnel.ts
20
+ // Via declastruct: npx declastruct apply --plan yolo --wish .agent/repo=.this/skills/use.vpc.tunnel.ts
21
+ //
22
+ // Prerequisites:
23
+ // - AWS credentials configured with SSM access
24
+ // - sudo access (for /etc/hosts modification), if not already set
25
+ //
26
+ // Why via declastruct:
27
+ // Declastruct enables declarative instructions — you specify *what* you want
28
+ // (a tunnel, a host alias) rather than *how* to get it (spawn ssm-proxy, edit
29
+ // /etc/hosts, track PIDs, cleanup, etc...). The runtime diffs current vs desired state
30
+ // and applies only necessary changes. This makes skills more intuitive and
31
+ // maintainable, as well as idempotent and safe to run repeatedly.
32
+ //
33
+ import { DeclastructProvider } from 'declastruct';
34
+ import {
35
+ DeclaredAwsVpcTunnel,
36
+ getDeclastructAwsProvider,
37
+ } from 'declastruct-aws';
38
+ import {
39
+ DeclaredUnixHostAlias,
40
+ getDeclastructUnixNetworkProvider,
41
+ } from 'declastruct-unix-network';
42
+
43
+ import { getConfig } from '../../../src/utils/config/getConfig';
44
+
45
+ export const getProviders = async (): Promise<DeclastructProvider[]> => [
46
+ await getDeclastructAwsProvider({}, { log: console }),
47
+ await getDeclastructUnixNetworkProvider({}, { log: console }),
48
+ ];
49
+
50
+ export const getResources = async () => {
51
+ // grab the config
52
+ const config = await getConfig();
53
+
54
+ // open the tunnel
55
+ const tunnel = DeclaredAwsVpcTunnel.as({
56
+ via: {
57
+ mechanism: 'aws.ssm',
58
+ bastion: { exid: 'vpc-main-bastion' },
59
+ },
60
+ into: {
61
+ cluster: { name: 'ahbodedb' },
62
+ },
63
+ from: {
64
+ host: 'localhost',
65
+ port: config.database.tunnel.local.port,
66
+ },
67
+ status: 'OPEN',
68
+ });
69
+
70
+ // bind the host alias
71
+ const hostAlias = DeclaredUnixHostAlias.as({
72
+ via: '/etc/hosts',
73
+ from: config.database.tunnel.local.host,
74
+ into: '127.0.0.1',
75
+ });
76
+
77
+ // instruct to set each
78
+ return [tunnel, hostAlias];
79
+ };
@@ -5,11 +5,11 @@ on:
5
5
  inputs:
6
6
  stage:
7
7
  type: string
8
- description: 'the stage to deploy to'
8
+ description: "the stage to deploy to"
9
9
  required: true
10
10
  github-environment:
11
11
  type: string
12
- description: 'the github environment that the apply step will be executed in'
12
+ description: "the github environment that the apply step will be executed in"
13
13
  required: true
14
14
  aws-region:
15
15
  type: string
@@ -31,9 +31,6 @@ on:
31
31
  aws-secret-access-key:
32
32
  required: true
33
33
  description: required credentials to authenticate with aws provider and state persistance
34
- open-vpn-config:
35
- required: false
36
- description: complete openvpn config required to enter the vpn, if needed
37
34
  pagerduty-integration-key:
38
35
  required: false
39
36
  description: enables sending pagerduty alarms on failure
@@ -52,7 +49,7 @@ jobs:
52
49
  - name: set node-version
53
50
  uses: actions/setup-node@v3
54
51
  with:
55
- node-version-file: '.nvmrc'
52
+ node-version-file: ".nvmrc"
56
53
 
57
54
  - name: configure aws credentials
58
55
  uses: aws-actions/configure-aws-credentials@v1
@@ -92,7 +89,7 @@ jobs:
92
89
  - name: set node-version
93
90
  uses: actions/setup-node@v3
94
91
  with:
95
- node-version-file: '.nvmrc'
92
+ node-version-file: ".nvmrc"
96
93
 
97
94
  - name: configure aws credentials
98
95
  uses: aws-actions/configure-aws-credentials@v1
@@ -119,40 +116,20 @@ jobs:
119
116
  if: steps.cache.outputs.cache-hit != 'true'
120
117
  run: npm ci --ignore-scripts --prefer-offline --no-audit
121
118
 
122
- - name: vpn:prepare
119
+ - name: vpc:tunnel:open
123
120
  if: inputs.needs-vpn-for-acceptance
124
- run: |
125
- sudo apt update \
126
- && sudo apt-get install openvpn openvpn-systemd-resolved \
127
- && mkdir ~/.vpn \
128
- && echo "${{ secrets.open-vpn-config }}" | base64 -d > ~/.vpn/vpn.connection.ovpn
129
-
130
- - name: vpn:connect
131
- if: inputs.needs-vpn-for-acceptance
132
- run: |
133
- # create the log file, so that we have permissions to read it
134
- touch openvpn.log
135
-
136
- # start openvpn in the background
137
- sudo openvpn --config ~/.vpn/vpn.connection.ovpn --daemon --log openvpn.log
138
-
139
- # wait until we've confirmed that it successfully connected; https://superuser.com/a/900134/425694
140
- ( tail -f -n0 openvpn.log & ) | grep -q "Initialization Sequence Completed"
121
+ run: STAGE=${{ inputs.stage }} .agent/repo=.this/skills/use.vpc.tunnel.ts
141
122
 
142
123
  - name: test:acceptance
143
124
  run: STAGE=${{ inputs.stage }} npm run test:acceptance
144
125
 
145
- - name: vpn:disconnect
146
- if: inputs.needs-vpn-for-acceptance
147
- run: sudo killall openvpn
148
-
149
126
  - name: alarm on failure
150
127
  env:
151
128
  PAGERDUTY_INTEGRATION_KEY: ${{ secrets.pagerduty-integration-key }}
152
129
  if: failure() && env.PAGERDUTY_INTEGRATION_KEY
153
130
  uses: Entle/action-pagerduty-alert@0.2.0 # https://github.com/marketplace/actions/pagerduty-alert
154
131
  with:
155
- pagerduty-integration-key: '${{ secrets.pagerduty-integration-key }}'
132
+ pagerduty-integration-key: "${{ secrets.pagerduty-integration-key }}"
156
133
  pagerduty-dedup-key: github_workflow_failed
157
134
 
158
135
  prune:
@@ -165,7 +142,7 @@ jobs:
165
142
  - name: set node-version
166
143
  uses: actions/setup-node@v3
167
144
  with:
168
- node-version-file: '.nvmrc'
145
+ node-version-file: ".nvmrc"
169
146
 
170
147
  - name: configure aws credentials
171
148
  uses: aws-actions/configure-aws-credentials@v1
@@ -5,14 +5,14 @@ on:
5
5
  inputs:
6
6
  stage:
7
7
  type: string
8
- description: 'the stage to execute against'
8
+ description: "the stage to execute against"
9
9
  required: true
10
10
  github-environment:
11
11
  type: string
12
- description: 'the github environment that the apply step will be executed in'
12
+ description: "the github environment that the apply step will be executed in"
13
13
  allow-apply:
14
14
  type: boolean
15
- description: 'whether the apply step is enabled. defaults to true on main'
15
+ description: "whether the apply step is enabled. defaults to true on main"
16
16
  default: ${{ github.ref == 'refs/heads/main' }}
17
17
  aws-region:
18
18
  type: string
@@ -27,9 +27,6 @@ on:
27
27
  aws-secret-access-key:
28
28
  required: true
29
29
  description: required credentials to authenticate with aws provider for db credentials
30
- open-vpn-config:
31
- required: true
32
- description: complete openvpn config required to enter the vpn
33
30
 
34
31
  jobs:
35
32
  install:
@@ -47,7 +44,7 @@ jobs:
47
44
  - name: set node-version
48
45
  uses: actions/setup-node@v3
49
46
  with:
50
- node-version-file: '.nvmrc'
47
+ node-version-file: ".nvmrc"
51
48
 
52
49
  - name: node-modules cache get
53
50
  uses: actions/cache/restore@v4
@@ -74,23 +71,8 @@ jobs:
74
71
  && echo 'wrong aws account' && exit 1 \
75
72
  || echo 'correct aws account';
76
73
 
77
- - name: vpn:prepare
78
- run: |
79
- sudo apt update \
80
- && sudo apt-get install openvpn openvpn-systemd-resolved \
81
- && mkdir ~/.vpn \
82
- && echo "${{ secrets.open-vpn-config }}" | base64 -d > ~/.vpn/vpn.connection.ovpn
83
-
84
- - name: vpn:connect
85
- run: |
86
- # create the log file, so that we have permissions to read it
87
- touch openvpn.log
88
-
89
- # start openvpn in the background
90
- sudo openvpn --config ~/.vpn/vpn.connection.ovpn --daemon --log openvpn.log
91
-
92
- # wait until we've confirmed that it successfully connected; https://superuser.com/a/900134/425694
93
- ( tail -f -n0 openvpn.log & ) | grep -q "Initialization Sequence Completed"
74
+ - name: vpc:tunnel:open
75
+ run: STAGE=${{ inputs.stage }} .agent/repo=.this/skills/use.vpc.tunnel.ts
94
76
 
95
77
  - name: plan
96
78
  run: STAGE=${{ inputs.stage }} npm run provision:schema:plan | tee ./plan.log
@@ -101,7 +83,7 @@ jobs:
101
83
  # check that there was not a connection error
102
84
  if grep "connect ETIMEDOUT" ./plan.log
103
85
  then
104
- echo "🛑 connection timed out, could not execute plan. is vpn working?"
86
+ echo "🛑 connection timed out, could not execute plan. is vpc tunnel working?"
105
87
  exit 1
106
88
  fi
107
89
 
@@ -116,9 +98,6 @@ jobs:
116
98
  - name: has changes planned?
117
99
  run: echo "${{ steps.evaluate-plan.outputs.has-changes-planned }}"
118
100
 
119
- - name: vpn:disconnect
120
- run: sudo killall openvpn
121
-
122
101
  apply:
123
102
  runs-on: ubuntu-24.04
124
103
  environment: ${{ inputs.github-environment }}
@@ -131,7 +110,7 @@ jobs:
131
110
  - name: set node-version
132
111
  uses: actions/setup-node@v3
133
112
  with:
134
- node-version-file: '.nvmrc'
113
+ node-version-file: ".nvmrc"
135
114
 
136
115
  - name: node-modules cache get
137
116
  uses: actions/cache/restore@v4
@@ -158,26 +137,8 @@ jobs:
158
137
  && echo 'wrong aws account' && exit 1 \
159
138
  || echo 'correct aws account';
160
139
 
161
- - name: vpn:prepare
162
- run: |
163
- sudo apt update \
164
- && sudo apt-get install openvpn openvpn-systemd-resolved \
165
- && mkdir ~/.vpn \
166
- && echo "${{ secrets.open-vpn-config }}" | base64 -d > ~/.vpn/vpn.connection.ovpn
167
-
168
- - name: vpn:connect
169
- run: |
170
- # create the log file, so that we have permissions to read it
171
- touch openvpn.log
172
-
173
- # start openvpn in the background
174
- sudo openvpn --config ~/.vpn/vpn.connection.ovpn --daemon --log openvpn.log
175
-
176
- # wait until we've confirmed that it successfully connected; https://superuser.com/a/900134/425694
177
- ( tail -f -n0 openvpn.log & ) | grep -q "Initialization Sequence Completed"
140
+ - name: vpc:tunnel:open
141
+ run: STAGE=${{ inputs.stage }} .agent/repo=.this/skills/use.vpc.tunnel.ts
178
142
 
179
143
  - name: apply
180
144
  run: STAGE=${{ inputs.stage }} npm run provision:schema:apply
181
-
182
- - name: vpn:disconnect
183
- run: sudo killall openvpn
@@ -5,13 +5,13 @@ on:
5
5
  inputs:
6
6
  working-directory:
7
7
  type: string
8
- description: 'the directory from within which to execute terraform commands'
8
+ description: "the directory from within which to execute terraform commands"
9
9
  github-environment:
10
10
  type: string
11
- description: 'the github environment that the apply step will be executed in'
11
+ description: "the github environment that the apply step will be executed in"
12
12
  allow-apply:
13
13
  type: boolean
14
- description: 'whether the apply step is enabled. defaults to true on main'
14
+ description: "whether the apply step is enabled. defaults to true on main"
15
15
  default: ${{ github.ref == 'refs/heads/main' }}
16
16
  aws-region:
17
17
  type: string
@@ -32,7 +32,7 @@ on:
32
32
 
33
33
  jobs:
34
34
  plan:
35
- runs-on: ubuntu-latest
35
+ runs-on: ubuntu-24.04
36
36
  defaults:
37
37
  run:
38
38
  working-directory: ${{ inputs.working-directory }}
@@ -85,7 +85,7 @@ jobs:
85
85
  run: echo "${{ steps.evaluate-plan.outputs.has-changes-planned }}"
86
86
 
87
87
  apply:
88
- runs-on: ubuntu-latest
88
+ runs-on: ubuntu-24.04
89
89
  environment: ${{ inputs.github-environment }}
90
90
  needs: plan
91
91
  if: ${{ inputs.allow-apply == true && needs.plan.outputs.has-changes-planned == 'true' }}
@@ -67,7 +67,6 @@ jobs:
67
67
  secrets:
68
68
  aws-access-key-id: ${{ secrets.DEV_AWS_ACCESS_KEY_ID }}
69
69
  aws-secret-access-key: ${{ secrets.DEV_AWS_SECRET_ACCESS_KEY }}
70
- open-vpn-config: ${{ secrets.DEV_OPEN_VPN_CONFIG }}
71
70
 
72
71
  sql-schema-prod:
73
72
  uses: ./.github/workflows/.sql-schema-control.yml
@@ -80,4 +79,3 @@ jobs:
80
79
  secrets:
81
80
  aws-access-key-id: ${{ secrets.PROD_AWS_ACCESS_KEY_ID }}
82
81
  aws-secret-access-key: ${{ secrets.PROD_AWS_SECRET_ACCESS_KEY }}
83
- open-vpn-config: ${{ secrets.PROD_OPEN_VPN_CONFIG }}
@@ -0,0 +1,7 @@
1
+ {
2
+ "devDependencies": {
3
+ "declastruct": "@declapract{check.minVersion('1.3.0')}",
4
+ "declastruct-aws": "@declapract{check.minVersion('1.0.3')}",
5
+ "declastruct-unix-network": "@declapract{check.minVersion('1.0.0')}"
6
+ }
7
+ }
@@ -0,0 +1,3 @@
1
+ import { FileCheckType } from 'declapract';
2
+
3
+ export const check = FileCheckType.CONTAINS;
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "dependencies": {
3
- "domain-objects": "@declapract{check.minVersion('0.29.1')}",
3
+ "domain-objects": "@declapract{check.minVersion('0.31.0')}",
4
4
  "joi": "@declapract{check.minVersion('17.4.0')}",
5
- "type-fns": "@declapract{check.minVersion('1.17.0')}"
5
+ "type-fns": "@declapract{check.minVersion('1.21.0')}"
6
6
  }
7
7
  }
@@ -11,6 +11,7 @@ const expectedIgnores = [
11
11
  '.terraform',
12
12
  '.terraform.lock',
13
13
  '.yalc',
14
+ '.temp',
14
15
  'coverage',
15
16
  'dist',
16
17
  'node_modules',
@@ -38,15 +38,17 @@ export const desiredRelativeKeyOrder = {
38
38
  'build:clean',
39
39
  'build:compile',
40
40
  'build',
41
- 'provision:docker:clear',
42
- 'provision:docker:prepare',
43
- 'provision:docker:up',
44
- 'provision:docker:await',
45
- 'provision:docker:down',
46
41
  'provision:schema:plan',
47
42
  'provision:schema:apply',
48
43
  'provision:schema:sync',
49
- 'provision:integration-test-db',
44
+ 'provision:testdb:docker:clear',
45
+ 'provision:testdb:docker:prepare',
46
+ 'provision:testdb:docker:up',
47
+ 'provision:testdb:docker:await',
48
+ 'provision:testdb:docker:down',
49
+ 'provision:testdb',
50
+ 'start:testdb',
51
+ 'start:livedb:dev',
50
52
  'test:commits',
51
53
  'test:types',
52
54
  'test:format:prettier',
@@ -7,9 +7,10 @@
7
7
  },
8
8
  "scripts": {
9
9
  "generate:dao:dynamodb": "npx dynamodb-dao-generator generate && npm run fix:format",
10
- "provision:docker:up": "docker compose -f ./provision/docker/integration-test-db/docker-compose.yml up -d --force-recreate --build --renew-anon-volumes",
11
- "provision:docker:down": "docker compose -f ./provision/docker/integration-test-db/docker-compose.yml down",
12
- "provision:dynamodb:schema": "terraform -chdir=provision/aws/environments/test apply -auto-approve",
13
- "provision:integration-test-db": "npm run provision:docker:up && npm run provision:dynamodb:schema"
10
+ "provision:testdb:docker:up": "docker compose -f ./provision/docker/integration-test-db/docker-compose.yml up -d --force-recreate --build --renew-anon-volumes",
11
+ "provision:testdb:docker:down": "docker compose -f ./provision/docker/integration-test-db/docker-compose.yml down",
12
+ "provision:testdb:dynamodb:schema": "terraform -chdir=provision/aws/environments/test apply -auto-approve",
13
+ "provision:testdb": "npm run provision:testdb:docker:up && npm run provision:testdb:dynamodb:schema",
14
+ "start:testdb": "npm run provision:testdb"
14
15
  }
15
16
  }
@@ -0,0 +1,9 @@
1
+ {
2
+ "scripts": {
3
+ "provision:docker:clear": "docker rm -f $(docker ps -a -f 'publish=7821' -q) 2>/dev/null || true && echo 'ensured port is available 👍'",
4
+ "provision:docker:prepare": "cp provision/schema/sql/init/.extensions.sql provision/docker/integration-test-db/init/extensions.sql && cp provision/schema/sql/init/.schema.sql provision/docker/integration-test-db/init/schema.sql && cp provision/schema/sql/init/.user.cicd.sql provision/docker/integration-test-db/init/user.cicd.sql",
5
+ "provision:docker:up": "docker compose -f ./provision/docker/integration-test-db/docker-compose.yml up -d --force-recreate --build --renew-anon-volumes",
6
+ "provision:docker:await": "docker compose -f ./provision/docker/integration-test-db/docker-compose.yml exec -T postgres /root/wait-for-postgres.sh",
7
+ "provision:docker:down": "docker compose -f ./provision/docker/integration-test-db/docker-compose.yml down"
8
+ }
9
+ }
@@ -0,0 +1,22 @@
1
+ import { FileCheckType, FileFixFunction } from 'declapract';
2
+
3
+ export const check = FileCheckType.CONTAINS;
4
+
5
+ export const fix: FileFixFunction = (contents) => {
6
+ if (!contents) return { contents }; // do nothing if no contents
7
+ const packageJSON = JSON.parse(contents);
8
+ const updatedPackageJSON = {
9
+ ...packageJSON,
10
+ scripts: {
11
+ ...packageJSON.scripts,
12
+ 'provision:docker:clear': undefined,
13
+ 'provision:docker:prepare': undefined,
14
+ 'provision:docker:up': undefined,
15
+ 'provision:docker:await': undefined,
16
+ 'provision:docker:down': undefined,
17
+ },
18
+ };
19
+ return {
20
+ contents: JSON.stringify(updatedPackageJSON, null, 2),
21
+ };
22
+ };
@@ -1,20 +1,28 @@
1
1
  {
2
2
  "database": {
3
- "admin": {
4
- "host": "@declapract{variable.databaseClusterHost.dev}",
5
- "port": 5432,
3
+ "target": {
6
4
  "database": "@declapract{variable.databaseName}",
7
- "schema": "@declapract{variable.databaseName}",
8
- "username": "@declapract{variable.databaseUserName.cicdUser}",
9
- "password": "__CHANG3_ME__"
5
+ "schema": "@declapract{variable.databaseName}"
10
6
  },
11
- "service": {
12
- "host": "@declapract{variable.databaseClusterHost.dev}",
13
- "port": 5432,
14
- "database": "@declapract{variable.databaseName}",
15
- "schema": "@declapract{variable.databaseName}",
16
- "username": "@declapract{variable.databaseUserName.serviceUser}",
17
- "password": "__CHANG3_ME__"
7
+ "role": {
8
+ "cicd": {
9
+ "username": "@declapract{variable.databaseUserName.cicdUser}",
10
+ "password": "__CHANG3_ME__"
11
+ },
12
+ "crud": {
13
+ "username": "@declapract{variable.databaseUserName.serviceUser}",
14
+ "password": "__CHANG3_ME__"
15
+ }
16
+ },
17
+ "tunnel": {
18
+ "local": {
19
+ "host": "@declapract{variable.databaseTunnelHost.dev}",
20
+ "port": 15432
21
+ },
22
+ "lambda": {
23
+ "host": "@declapract{variable.databaseClusterHost.dev}",
24
+ "port": 5432
25
+ }
18
26
  }
19
27
  }
20
28
  }
@@ -1,20 +1,28 @@
1
1
  {
2
2
  "database": {
3
- "admin": {
4
- "host": "@declapract{variable.databaseClusterHost.prod}",
5
- "port": 5432,
3
+ "target": {
6
4
  "database": "@declapract{variable.databaseName}",
7
- "schema": "@declapract{variable.databaseName}",
8
- "username": "@declapract{variable.databaseUserName.cicdUser}",
9
- "password": "__PARAM__"
5
+ "schema": "@declapract{variable.databaseName}"
10
6
  },
11
- "service": {
12
- "host": "@declapract{variable.databaseClusterHost.prod}",
13
- "port": 5432,
14
- "database": "@declapract{variable.databaseName}",
15
- "schema": "@declapract{variable.databaseName}",
16
- "username": "@declapract{variable.databaseUserName.serviceUser}",
17
- "password": "__PARAM__"
7
+ "role": {
8
+ "cicd": {
9
+ "username": "@declapract{variable.databaseUserName.cicdUser}",
10
+ "password": "__PARAM__"
11
+ },
12
+ "crud": {
13
+ "username": "@declapract{variable.databaseUserName.serviceUser}",
14
+ "password": "__PARAM__"
15
+ }
16
+ },
17
+ "tunnel": {
18
+ "local": {
19
+ "host": "@declapract{variable.databaseTunnelHost.prod}",
20
+ "port": 15433
21
+ },
22
+ "lambda": {
23
+ "host": "@declapract{variable.databaseClusterHost.prod}",
24
+ "port": 5432
25
+ }
18
26
  }
19
27
  }
20
28
  }
@@ -1,3 +1,15 @@
1
- import { FileCheckType } from 'declapract';
1
+ import { FileCheckType, FileFixFunction } from 'declapract';
2
2
 
3
3
  export const check = FileCheckType.CONTAINS;
4
+
5
+ export const fix: FileFixFunction = (contents, context) => {
6
+ if (!contents) return { contents: context.declaredFileContents }; // init as declared if file dne
7
+
8
+ return {
9
+ contents: JSON.stringify(
10
+ { ...JSON.parse(contents), ...JSON.parse(context.declaredFileContents!) },
11
+ null,
12
+ 2,
13
+ ),
14
+ };
15
+ };
@@ -1,20 +1,25 @@
1
1
  {
2
2
  "database": {
3
- "admin": {
4
- "host": "localhost",
5
- "port": 7821,
3
+ "target": {
6
4
  "database": "@declapract{variable.databaseName}",
7
- "schema": "@declapract{variable.databaseName}",
8
- "username": "@declapract{variable.databaseUserName.cicdUser}",
9
- "password": "__CHANG3_ME__"
5
+ "schema": "@declapract{variable.databaseName}"
10
6
  },
11
- "service": {
12
- "host": "localhost",
13
- "port": 7821,
14
- "database": "@declapract{variable.databaseName}",
15
- "schema": "@declapract{variable.databaseName}",
16
- "username": "@declapract{variable.databaseUserName.serviceUser}",
17
- "password": "__CHANG3_ME__"
7
+ "role": {
8
+ "cicd": {
9
+ "username": "@declapract{variable.databaseUserName.cicdUser}",
10
+ "password": "__CHANG3_ME__"
11
+ },
12
+ "crud": {
13
+ "username": "@declapract{variable.databaseUserName.serviceUser}",
14
+ "password": "__CHANG3_ME__"
15
+ }
16
+ },
17
+ "tunnel": {
18
+ "local": {
19
+ "host": "localhost",
20
+ "port": 7821
21
+ },
22
+ "lambda": null
18
23
  }
19
24
  }
20
25
  }
@@ -1,3 +1,15 @@
1
- import { FileCheckType } from 'declapract';
1
+ import { FileCheckType, FileFixFunction } from 'declapract';
2
2
 
3
3
  export const check = FileCheckType.CONTAINS;
4
+
5
+ export const fix: FileFixFunction = (contents, context) => {
6
+ if (!contents) return { contents: context.declaredFileContents }; // init as declared if file dne
7
+
8
+ return {
9
+ contents: JSON.stringify(
10
+ { ...JSON.parse(contents), ...JSON.parse(context.declaredFileContents!) },
11
+ null,
12
+ 2,
13
+ ),
14
+ };
15
+ };
@@ -16,14 +16,16 @@
16
16
  "generate:dao:postgres": "npx sql-dao-generator generate && npm run fix:format",
17
17
  "generate:schema": "npx sql-schema-generator generate -c codegen.sql.schema.yml && npm run fix:format",
18
18
  "generate:types-from-sql": "npx sql-code-generator generate -c codegen.sql.types.yml && npm run fix:format",
19
- "provision:docker:clear": "docker rm -f $(docker ps -a -f 'publish=7821' -q) 2>/dev/null || true && echo 'ensured port is available 👍'",
20
- "provision:docker:prepare": "cp provision/schema/sql/init/.extensions.sql provision/docker/integration-test-db/init/extensions.sql && cp provision/schema/sql/init/.schema.sql provision/docker/integration-test-db/init/schema.sql && cp provision/schema/sql/init/.user.cicd.sql provision/docker/integration-test-db/init/user.cicd.sql",
21
- "provision:docker:up": "docker compose -f ./provision/docker/integration-test-db/docker-compose.yml up -d --force-recreate --build --renew-anon-volumes",
22
- "provision:docker:await": "docker compose -f ./provision/docker/integration-test-db/docker-compose.yml exec -T postgres /root/wait-for-postgres.sh",
23
- "provision:docker:down": "docker compose -f ./provision/docker/integration-test-db/docker-compose.yml down",
24
19
  "provision:schema:plan": "npx sql-schema-control plan -c provision/schema/control.yml",
25
20
  "provision:schema:apply": "npx sql-schema-control apply -c provision/schema/control.yml",
26
21
  "provision:schema:sync": "npx sql-schema-control sync -c provision/schema/control.yml",
27
- "provision:integration-test-db": "npm run provision:docker:clear && npm run provision:docker:prepare && npm run provision:docker:up && npm run provision:docker:await && npm run provision:schema:plan && npm run provision:schema:apply && npm run provision:schema:plan"
22
+ "provision:testdb:docker:clear": "docker rm -f $(docker ps -a -f 'publish=7821' -q) 2>/dev/null || true && echo 'ensured port is available 👍'",
23
+ "provision:testdb:docker:prepare": "cp provision/schema/sql/init/.extensions.sql provision/docker/integration-test-db/init/extensions.sql && cp provision/schema/sql/init/.schema.sql provision/docker/integration-test-db/init/schema.sql && cp provision/schema/sql/init/.user.cicd.sql provision/docker/integration-test-db/init/user.cicd.sql",
24
+ "provision:testdb:docker:up": "docker compose -f ./provision/docker/integration-test-db/docker-compose.yml up -d --force-recreate --build --renew-anon-volumes",
25
+ "provision:testdb:docker:await": "docker compose -f ./provision/docker/integration-test-db/docker-compose.yml exec -T postgres /root/wait-for-postgres.sh",
26
+ "provision:testdb:docker:down": "docker compose -f ./provision/docker/integration-test-db/docker-compose.yml down",
27
+ "provision:testdb": "npm run provision:testdb:docker:clear && npm run provision:testdb:docker:prepare && npm run provision:testdb:docker:up && npm run provision:testdb:docker:await && npm run provision:schema:plan && npm run provision:schema:apply && npm run provision:schema:plan",
28
+ "start:testdb": "npm run provision:testdb",
29
+ "start:livedb:dev": "echo 'will ping the database until assured its not asleep' && STAGE=dev .agent/repo=.this/skills/use.rds.capacity.sh"
28
30
  }
29
31
  }
@@ -4,20 +4,19 @@ const configInstance = new Config();
4
4
  const getConfig = async () =>
5
5
  configInstance.get(process.env.STAGE || undefined);
6
6
 
7
- const promiseSchemaControlConfig = async () => {
7
+ const promiseSchemaControlCredentials = async () => {
8
8
  const config = await getConfig();
9
- const dbConfig = config.database.admin; // NOTE: schema control must have DDL privileges
10
- const schemaControlConfig = {
11
- host: dbConfig.host,
12
- port: dbConfig.port,
13
- database: dbConfig.schema, // i.e., db = schema
14
- schema: dbConfig.schema,
15
- username: dbConfig.username,
16
- password: dbConfig.password,
9
+ const credentials = {
10
+ host: config.database.tunnel.local.host,
11
+ port: config.database.tunnel.local.port,
12
+ database: config.database.target.database, // i.e., db = schema
13
+ schema: config.database.target.schema,
14
+ username: config.database.role.cicd.username,
15
+ password: config.database.role.cicd.password,
17
16
  };
18
- return schemaControlConfig;
17
+ return credentials;
19
18
  };
20
19
 
21
20
  module.exports = {
22
- promiseConfig: promiseSchemaControlConfig,
21
+ promiseConfig: promiseSchemaControlCredentials,
23
22
  };
@@ -54,7 +54,7 @@ fi;
54
54
 
55
55
 
56
56
  # define the postgres connecition string
57
- CLUSTER_HOST=$([ "$ENVIRONMENT" = 'prod' ] && echo "@declapract{variable.databaseClusterHost.prod}" || echo "@declapract{variable.databaseClusterHost.dev}");
57
+ CLUSTER_HOST=$([ "$ENVIRONMENT" = 'prod' ] && echo "@declapract{variable.databaseTunnelHost.prod}" || echo "@declapract{variable.databaseTunnelHost.dev}");
58
58
  CLUSTER_CONNECTION_STRING=postgresql://postgres:$POSTGRES_ADMIN_PASSWORD@$CLUSTER_HOST:5432
59
59
  ROOT_DB_CONNECTION_STRING=$CLUSTER_CONNECTION_STRING/postgres
60
60
  SVC_DB_CONNECTION_STRING=$CLUSTER_CONNECTION_STRING/@declapract{variable.databaseName}
@@ -0,0 +1,26 @@
1
+ database: {
2
+ target: {
3
+ database: string;
4
+ schema: string;
5
+ };
6
+ role: {
7
+ cicd: {
8
+ username: string;
9
+ password: string;
10
+ };
11
+ crud: {
12
+ username: string;
13
+ password: string;
14
+ };
15
+ };
16
+ tunnel: {
17
+ local: {
18
+ host: string;
19
+ port: number;
20
+ };
21
+ lambda: {
22
+ host: string;
23
+ port: number;
24
+ } | null;
25
+ };
26
+ };
@@ -0,0 +1,39 @@
1
+ import { FileCheckType, FileFixFunction } from 'declapract';
2
+ import { UnexpectedCodePathError } from 'helpful-errors';
3
+
4
+ export const check = FileCheckType.CONTAINS; // practice must contain this
5
+
6
+ const variantsToReplace = [
7
+ `
8
+ database: {
9
+ service: {
10
+ host: string;
11
+ port: number;
12
+ database: string;
13
+ schema: string;
14
+ username: string;
15
+ password: string;
16
+ };
17
+ };
18
+ `.trim(),
19
+ ];
20
+
21
+ export const fix: FileFixFunction = (contents, context) => {
22
+ // if no contents yet, can't fix
23
+ if (!contents) return { contents };
24
+
25
+ // otherwise, try and fix with one of the variants we support
26
+ const desiredContents = variantsToReplace.reduce(
27
+ (contentsNow, thisVariant) =>
28
+ contentsNow?.replace(
29
+ thisVariant,
30
+ context.declaredFileContents?.trim() ??
31
+ UnexpectedCodePathError.throw(
32
+ 'expected to have declared best practice but found null',
33
+ { context },
34
+ ),
35
+ ),
36
+ contents,
37
+ );
38
+ return { contents: desiredContents };
39
+ };
@@ -1,6 +1,8 @@
1
+ import { HelpfulError, UnexpectedCodePathError } from 'helpful-errors';
1
2
  import pg, { Client, QueryResult, QueryResultRow } from 'pg';
2
3
 
3
4
  import { getConfig } from '../config/getConfig';
5
+ import { environment } from '../environment';
4
6
 
5
7
  // https://github.com/brianc/node-postgres/pull/353#issuecomment-283709264
6
8
  pg.types.setTypeParser(20, (value) => parseInt(value, 10)); // cast bigints to numbers; by default, pg returns bigints as strings, since max val of bigint is bigger than max safe value in js
@@ -14,7 +16,7 @@ export interface DatabaseConnection {
14
16
  end: () => Promise<void>;
15
17
  }
16
18
 
17
- export class DatabaseQueryError extends Error {
19
+ export class DatabaseQueryError extends HelpfulError {
18
20
  constructor({
19
21
  sql,
20
22
  values,
@@ -33,27 +35,46 @@ sql:
33
35
  values:
34
36
  ${JSON.stringify(values)}
35
37
  `.trim();
36
- super(message);
38
+ super(message, { sql, values, cause: caught });
37
39
  }
38
40
  }
39
41
 
40
42
  export const getDatabaseConnection = async (): Promise<DatabaseConnection> => {
41
43
  const config = await getConfig();
42
- const dbConfig = config.database.service;
44
+ const target = config.database.target;
45
+ const role = config.database.role.crud;
46
+
47
+ // determine which tunnel to use based on environment.server
48
+ const tunnel =
49
+ environment.server === 'AWS:LAMBDA'
50
+ ? config.database.tunnel.lambda
51
+ : config.database.tunnel.local;
52
+
53
+ // ensure tunnel is defined for the requested server
54
+ if (!tunnel) {
55
+ throw new UnexpectedCodePathError(
56
+ `Database tunnel not configured for environment.server + env.access`,
57
+ { environment },
58
+ );
59
+ }
60
+
61
+ // instantiate the client
43
62
  const client = new Client({
44
- host: dbConfig.host,
45
- user: dbConfig.username,
46
- password: dbConfig.password,
47
- database: dbConfig.schema,
48
- port: dbConfig.port,
63
+ host: tunnel.host,
64
+ port: tunnel.port,
65
+ user: role.username,
66
+ password: role.password,
67
+ database: target.database,
49
68
  });
50
69
  await client.connect();
51
- await client.query(`SET search_path TO ${dbConfig.schema}, public;`); // https://www.postgresql.org/docs/current/ddl-schemas.html#DDL-SCHEMAS-
70
+ await client.query(`SET search_path TO ${target.schema}, public;`); // https://www.postgresql.org/docs/current/ddl-schemas.html#DDL-SCHEMAS-
52
71
  const dbConnection = {
53
72
  query: ({ sql, values }: { sql: string; values?: (string | number)[] }) =>
54
73
  client.query(sql, values),
55
74
  end: () => client.end(),
56
75
  };
76
+
77
+ // declare our interface
57
78
  return {
58
79
  query: (args: { sql: string; values?: any[] }) =>
59
80
  dbConnection.query(args).catch((error) => {
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "devDependencies": {
3
- "declastruct": "@declapract{check.minVersion('1.1.7')}",
4
- "declastruct-github": "@declapract{check.minVersion('1.1.1')}"
3
+ "declastruct": "@declapract{check.minVersion('1.3.0')}",
4
+ "declastruct-github": "@declapract{check.minVersion('1.0.3')}"
5
5
  }
6
6
  }
@@ -4,7 +4,7 @@
4
4
  "jest": "@declapract{check.minVersion('29.3.1')}",
5
5
  "test-fns": "@declapract{check.minVersion('1.4.2')}",
6
6
  "ts-jest": "@declapract{check.minVersion('29.4.5')}",
7
- "ts-node": "@declapract{check.minVersion('10.9.2')}",
7
+ "tsx": "@declapract{check.minVersion('4.20.6')}",
8
8
  "core-js": "@declapract{check.minVersion('3.26.1')}",
9
9
  "@babel/core": "@declapract{check.minVersion('7.28.5')}",
10
10
  "@babel/preset-env": "@declapract{check.minVersion('7.28.5')}",
@@ -4,13 +4,14 @@
4
4
  "rootDir": "src"
5
5
  },
6
6
  "include": [
7
- "src/**/*.ts"
7
+ "src/**/*.ts",
8
+ "nontyped_modules/**/*.d.ts"
8
9
  ],
9
10
  "exclude": [
10
- "**/*.test.(ts|js)", // all explicitly .test files are dev only assets too
11
- "**/.test/**/*.(ts|js)",
12
- "src/**/.scratch/**/*.(ts|js)",
13
- "src/**/__test_utils__/**/*.(ts|js)",
14
- "src/**/__test_assets__/**/*.(ts|js)"
11
+ "**/*.test.ts", // all explicitly .test files are dev only assets too
12
+ "**/*.test.js",
13
+ "**/.test/**/*",
14
+ "**/.scratch/**/*",
15
+ "**/__test*__/**/*" // todo: deprecate this pattern in favor of .test
15
16
  ]
16
17
  }
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "declapract-typescript-ehmpathy",
3
3
  "author": "ehmpathy",
4
4
  "description": "declapract best practices declarations for typescript",
5
- "version": "0.40.0",
5
+ "version": "0.41.0",
6
6
  "license": "MIT",
7
7
  "main": "src/index.js",
8
8
  "repository": "ehmpathy/declapract-typescript-ehmpathy",
@@ -44,7 +44,7 @@
44
44
  "domain-objects": "0.29.2",
45
45
  "expect": "29.4.2",
46
46
  "flat": "5.0.2",
47
- "helpful-errors": "1.3.8",
47
+ "helpful-errors": "1.5.3",
48
48
  "simple-log-methods": "0.5.0"
49
49
  },
50
50
  "peerDependencies": {