minimonolith 0.25.4 → 0.25.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +209 -209
- package/api/getNewAPI/handler.js +53 -53
- package/api/getSyncedHandler/handler.js +40 -40
- package/api/postModule/handler.js +31 -31
- package/api/services.js +15 -15
- package/autotest/getMethodTest/handler.js +14 -14
- package/autotest/getTestServerResponse/index.js +14 -14
- package/autotest/services.js +1 -1
- package/control/get/handler.js +8 -8
- package/control/services.js +1 -1
- package/database/post/handler.js +24 -24
- package/database/postConnection/handler.js +27 -27
- package/database/services.js +1 -1
- package/environment/getDBEnvVars/handler.js +6 -6
- package/environment/getENVEnvVars/handler.js +7 -7
- package/environment/postEnvFile/handler.js +20 -20
- package/environment/postTestEnvironment/index.js +6 -6
- package/environment/services.js +5 -5
- package/error/index.js +4 -4
- package/error/postCompiletime/index.js +13 -13
- package/error/postRuntime/index.js +7 -7
- package/health/post/handler.js +14 -14
- package/health/services.js +1 -1
- package/index.js +23 -23
- package/log/index.js +5 -5
- package/log/post/index.js +3 -3
- package/log/postError/index.js +3 -3
- package/log/postQA/index.js +3 -3
- package/model/getSynced/handler.js +28 -28
- package/model/post/handler.js +17 -17
- package/model/services.js +1 -1
- package/package.json +33 -33
- package/server/getNodejsServerHandler/index.js +70 -70
- package/server/getServerFactory/handler.js +17 -17
- package/server/services.js +1 -1
- package/service/getParsedCode/handler.js +22 -22
- package/service/getParsedEvent/handler.js +12 -12
- package/service/getResponseCode/handler.js +11 -11
- package/service/getRouteCode/handler.js +15 -15
- package/service/getType/handler.js +11 -11
- package/service/postResponse/index.js +13 -13
- package/service/postRoute/handler.js +82 -82
- package/service/services.js +9 -9
- package/setupTesting.js +19 -0
- package/yarn-error.log +0 -4104
package/README.md
CHANGED
|
@@ -1,209 +1,209 @@
|
|
|
1
|
-
# minimonolith
|
|
2
|
-
|
|
3
|
-
[](https://codecov.io/gh/DeepHackDev/minimonolith-lib)
|
|
4
|
-
|
|
5
|
-
`minimonolith` is a lightweight library designed to help you build serverless APIs using AWS Lambda, with a focus on simplicity and ease of use. The library provides a straightforward structure to organize your API's services, methods, validation, and models while handling common tasks like database connection and request validation.
|
|
6
|
-
|
|
7
|
-
In addition to its simplicity, `minimonolith` enables seamless inter-service communication within your API. This allows services to call one another's functionality without directly importing them, fostering a modular design. For example, you can call the get method of the todo service from the todoList service using SERVICES.todo.get({ id }). By registering services within the API, you can easily call their methods from other services, which not only promotes a clean architecture but also paves the way for future support of automated end-to-end testing.
|
|
8
|
-
|
|
9
|
-
## Example Project
|
|
10
|
-
|
|
11
|
-
Here's an example project using `minimonolith`:
|
|
12
|
-
|
|
13
|
-
```go
|
|
14
|
-
.
|
|
15
|
-
├── package.json
|
|
16
|
-
├── .gitignore
|
|
17
|
-
├── .env
|
|
18
|
-
├── server.js // For local development
|
|
19
|
-
├── index.js // Root of the code in a deployed AWS Lambda
|
|
20
|
-
└── todo
|
|
21
|
-
├── services.js // Module 'todo' exported services are declared here
|
|
22
|
-
├── model.js // Optional: Sequelize model for module 'todo' is declared here
|
|
23
|
-
└── get
|
|
24
|
-
├── handler.js // Module 'todo' service 'get' handler
|
|
25
|
-
└── in.js // Optional: Service 'get' input validation, if body not empty
|
|
26
|
-
└── in.js // Optional: Service 'get' output validation, if body not empty
|
|
27
|
-
```
|
|
28
|
-
|
|
29
|
-
### server.js
|
|
30
|
-
|
|
31
|
-
This file is used for local development. It runs a local server using `minimonolith`'s `getServer` function:
|
|
32
|
-
|
|
33
|
-
```js
|
|
34
|
-
// server.js
|
|
35
|
-
import { getServerFactory } from 'minimonolith';
|
|
36
|
-
|
|
37
|
-
const getServer = await getServerFactory()(;
|
|
38
|
-
const { lambdaHandler } = await import('./index.js');
|
|
39
|
-
|
|
40
|
-
getServer(lambdaHandler).listen(8080);
|
|
41
|
-
```
|
|
42
|
-
|
|
43
|
-
### index.js
|
|
44
|
-
|
|
45
|
-
This file serves as the root of the code in a deployed AWS Lambda:
|
|
46
|
-
|
|
47
|
-
```js
|
|
48
|
-
// index.js
|
|
49
|
-
'use strict';
|
|
50
|
-
|
|
51
|
-
import { getNewAPI } from 'minimonolith';
|
|
52
|
-
|
|
53
|
-
const API = getNewAPI({
|
|
54
|
-
PROD_ENV: process.env.PROD_ENV,
|
|
55
|
-
DEV_ENV: process.env.DEV_ENV,
|
|
56
|
-
});
|
|
57
|
-
|
|
58
|
-
await API.postHealthService();
|
|
59
|
-
await API.postModule('todo');
|
|
60
|
-
await API.postDatabaseService({
|
|
61
|
-
DB_DIALECT: process.env.DB_DIALECT,
|
|
62
|
-
DB_HOST: process.env.DB_HOST,
|
|
63
|
-
DB_PORT: process.env.DB_PORT,
|
|
64
|
-
DB_DB: process.env.DB_DB,
|
|
65
|
-
DB_USER: process.env.DB_USER,
|
|
66
|
-
DB_PASS: process.env.DB_PASS,
|
|
67
|
-
});
|
|
68
|
-
|
|
69
|
-
export const lambdaHandler = await API.getSyncedHandler();
|
|
70
|
-
```
|
|
71
|
-
|
|
72
|
-
### todo/index.js
|
|
73
|
-
|
|
74
|
-
Here, we declare the service routes for the `todo` module:
|
|
75
|
-
|
|
76
|
-
```js
|
|
77
|
-
// todo/services.js
|
|
78
|
-
export default ['getAll', 'get:id', 'post', 'patch:id', 'delete:id'];
|
|
79
|
-
```
|
|
80
|
-
|
|
81
|
-
### todo/model.js
|
|
82
|
-
|
|
83
|
-
In this file, we define a Sequelize model for the `todo` module:
|
|
84
|
-
|
|
85
|
-
```js
|
|
86
|
-
// todo/model.js
|
|
87
|
-
export default moduleName => (orm, types) => {
|
|
88
|
-
const schema = orm.define(moduleName, {
|
|
89
|
-
name: {
|
|
90
|
-
type: types.STRING,
|
|
91
|
-
allowNull: false
|
|
92
|
-
},
|
|
93
|
-
});
|
|
94
|
-
|
|
95
|
-
schema.associate = MODELS => {}; // e.g. MODELS.todo.belongsTo(MODELS.todoList, {...});
|
|
96
|
-
|
|
97
|
-
return schema;
|
|
98
|
-
};
|
|
99
|
-
```
|
|
100
|
-
|
|
101
|
-
### todo/get/index.js
|
|
102
|
-
|
|
103
|
-
This file contains the `get:id` route for the `todo` module. It retrieves a todo item by its ID:
|
|
104
|
-
|
|
105
|
-
```js
|
|
106
|
-
// todo/get/index.js
|
|
107
|
-
export default async ({ body, MODELS }) => {
|
|
108
|
-
return await MODELS.todo.findOne({ where: { id: body.id } });
|
|
109
|
-
}
|
|
110
|
-
```
|
|
111
|
-
|
|
112
|
-
### todo/get/valid.js
|
|
113
|
-
|
|
114
|
-
This file validates the `get:id` service's input, ensuring that the provided `id` is a string and exists in the `todo` model:
|
|
115
|
-
|
|
116
|
-
```js
|
|
117
|
-
// todo/get/valid.js
|
|
118
|
-
import { z, zdb } from 'minimonolith';
|
|
119
|
-
|
|
120
|
-
export default ({ MODELS }) => ({
|
|
121
|
-
id: z.string()
|
|
122
|
-
.superRefine(zdb.getIsSafeInt('id'))
|
|
123
|
-
.transform(id => parseInt(id))
|
|
124
|
-
.superRefine(zdb.getExists(MODELS.todo, 'id')),
|
|
125
|
-
})
|
|
126
|
-
```
|
|
127
|
-
|
|
128
|
-
## Response Codes
|
|
129
|
-
|
|
130
|
-
### Success
|
|
131
|
-
|
|
132
|
-
- POST -> 201
|
|
133
|
-
- DELETE -> 204
|
|
134
|
-
- Everything else -> 200
|
|
135
|
-
|
|
136
|
-
### Invalid Request
|
|
137
|
-
|
|
138
|
-
- ANY -> 400
|
|
139
|
-
|
|
140
|
-
### Runtime Error
|
|
141
|
-
|
|
142
|
-
- ANY -> 500
|
|
143
|
-
|
|
144
|
-
## App Environments
|
|
145
|
-
|
|
146
|
-
There are 4 possible environments:
|
|
147
|
-
1. DEV=TRUE + PROD=FALSE: This is the standard DEV environment
|
|
148
|
-
2. DEV=FALSE + PROD=FALSE: This is the standard QA environment
|
|
149
|
-
3. DEV=FALSE + PROD=TRUE: This is the stnadard PROD environment
|
|
150
|
-
4. DEV=TRUE + PROD=TRUE: This allows to test the behavior of PROD within the "new concept" of DEV environment
|
|
151
|
-
|
|
152
|
-
To better understand their relevance:
|
|
153
|
-
1. The "new concept" DEV environments (DEV=TRUE) aim to make the api crash if an "important" error happens
|
|
154
|
-
- Its current only difference is it makes it crash on error at service registration phase
|
|
155
|
-
- Some may think QA should also fail on "important" errors They can use DEV=TRUE there But some do training activities on QA that must be minimally disrupted
|
|
156
|
-
2. The "new concept" QA environments (PROD=FALSE) aim at logging data about the system which on production environments would be forbiden personal information
|
|
157
|
-
- This is relevant because replication of QA activities (even security QA activities) depend heavily on this
|
|
158
|
-
|
|
159
|
-
The current App environment is determined on the values of DEV ENV [TRUE/FALSE] and PROD_ENV [TRUE/FALSE]
|
|
160
|
-
Assuming using same env variables as used at index.js above
|
|
161
|
-
|
|
162
|
-
```makefile
|
|
163
|
-
# .env standard dev environment
|
|
164
|
-
DEV_ENV=TRUE
|
|
165
|
-
PROD_ENV=FALSE
|
|
166
|
-
TEST_ENV=FALSE
|
|
167
|
-
[...]
|
|
168
|
-
```
|
|
169
|
-
|
|
170
|
-
*NOTICE*: Default environment it is assumed standard PROD environment (DEV=FLASE + PROD=TRUE)
|
|
171
|
-
- This means that sequelize will not alter automatically tables having mismatches with defined model.js files
|
|
172
|
-
- Database dialect/credentials detected will not be printed
|
|
173
|
-
- Critical errors will not make the app crash
|
|
174
|
-
|
|
175
|
-
## Database Authentication
|
|
176
|
-
|
|
177
|
-
To set up authentication for the database you need to pass necessary variables to postDatabaseService as at index.js above.
|
|
178
|
-
Assuming using same env variable names as at index.js above
|
|
179
|
-
|
|
180
|
-
For MySQL:
|
|
181
|
-
|
|
182
|
-
```makefile
|
|
183
|
-
DEV_ENV=TRUE
|
|
184
|
-
PROD_ENV=FALSE
|
|
185
|
-
DB_DIALECT=mysql
|
|
186
|
-
DB_HOST=<your_database_endpoint>
|
|
187
|
-
DB_PORT=<your_database_port>
|
|
188
|
-
DB_DB=<your_database_name>
|
|
189
|
-
DB_USER=<your_database_username>
|
|
190
|
-
DB_PASS=<your_database_password>
|
|
191
|
-
```
|
|
192
|
-
|
|
193
|
-
For SQLite in memory:
|
|
194
|
-
|
|
195
|
-
```makefile
|
|
196
|
-
DEV_ENV=TRUE
|
|
197
|
-
PROD_ENV=FALSE
|
|
198
|
-
DB_DIALECT=sqlite
|
|
199
|
-
DB_DB=<your_database_name>
|
|
200
|
-
DB_STORAGE=:memory: # Need to also pass to API.postDatabaseService()
|
|
201
|
-
```
|
|
202
|
-
|
|
203
|
-
Make sure to replace the placeholders with your actual database credentials.
|
|
204
|
-
- `DEV_ENV=TRUE` allows Sequelize to alter table structure automatically when working locally
|
|
205
|
-
- `PROD_ENV=FALSE` allows logging of DB credentials for debugging purposes in non-production environments
|
|
206
|
-
- We consider high quality logging important for app performance and evolution
|
|
207
|
-
- However we recommend automatic DB credentials updates (daily) High quality logging does not mean
|
|
208
|
-
giving away your infraestructure to hackers
|
|
209
|
-
- At the risk of stating the obvious do not store personal information at the QA database
|
|
1
|
+
# minimonolith
|
|
2
|
+
|
|
3
|
+
[](https://codecov.io/gh/DeepHackDev/minimonolith-lib)
|
|
4
|
+
|
|
5
|
+
`minimonolith` is a lightweight library designed to help you build serverless APIs using AWS Lambda, with a focus on simplicity and ease of use. The library provides a straightforward structure to organize your API's services, methods, validation, and models while handling common tasks like database connection and request validation.
|
|
6
|
+
|
|
7
|
+
In addition to its simplicity, `minimonolith` enables seamless inter-service communication within your API. This allows services to call one another's functionality without directly importing them, fostering a modular design. For example, you can call the get method of the todo service from the todoList service using SERVICES.todo.get({ id }). By registering services within the API, you can easily call their methods from other services, which not only promotes a clean architecture but also paves the way for future support of automated end-to-end testing.
|
|
8
|
+
|
|
9
|
+
## Example Project
|
|
10
|
+
|
|
11
|
+
Here's an example project using `minimonolith`:
|
|
12
|
+
|
|
13
|
+
```go
|
|
14
|
+
.
|
|
15
|
+
├── package.json
|
|
16
|
+
├── .gitignore
|
|
17
|
+
├── .env
|
|
18
|
+
├── server.js // For local development
|
|
19
|
+
├── index.js // Root of the code in a deployed AWS Lambda
|
|
20
|
+
└── todo
|
|
21
|
+
├── services.js // Module 'todo' exported services are declared here
|
|
22
|
+
├── model.js // Optional: Sequelize model for module 'todo' is declared here
|
|
23
|
+
└── get
|
|
24
|
+
├── handler.js // Module 'todo' service 'get' handler
|
|
25
|
+
└── in.js // Optional: Service 'get' input validation, if body not empty
|
|
26
|
+
└── in.js // Optional: Service 'get' output validation, if body not empty
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
### server.js
|
|
30
|
+
|
|
31
|
+
This file is used for local development. It runs a local server using `minimonolith`'s `getServer` function:
|
|
32
|
+
|
|
33
|
+
```js
|
|
34
|
+
// server.js
|
|
35
|
+
import { getServerFactory } from 'minimonolith';
|
|
36
|
+
|
|
37
|
+
const getServer = await getServerFactory()(;
|
|
38
|
+
const { lambdaHandler } = await import('./index.js');
|
|
39
|
+
|
|
40
|
+
getServer(lambdaHandler).listen(8080);
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
### index.js
|
|
44
|
+
|
|
45
|
+
This file serves as the root of the code in a deployed AWS Lambda:
|
|
46
|
+
|
|
47
|
+
```js
|
|
48
|
+
// index.js
|
|
49
|
+
'use strict';
|
|
50
|
+
|
|
51
|
+
import { getNewAPI } from 'minimonolith';
|
|
52
|
+
|
|
53
|
+
const API = getNewAPI({
|
|
54
|
+
PROD_ENV: process.env.PROD_ENV,
|
|
55
|
+
DEV_ENV: process.env.DEV_ENV,
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
await API.postHealthService();
|
|
59
|
+
await API.postModule('todo');
|
|
60
|
+
await API.postDatabaseService({
|
|
61
|
+
DB_DIALECT: process.env.DB_DIALECT,
|
|
62
|
+
DB_HOST: process.env.DB_HOST,
|
|
63
|
+
DB_PORT: process.env.DB_PORT,
|
|
64
|
+
DB_DB: process.env.DB_DB,
|
|
65
|
+
DB_USER: process.env.DB_USER,
|
|
66
|
+
DB_PASS: process.env.DB_PASS,
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
export const lambdaHandler = await API.getSyncedHandler();
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
### todo/index.js
|
|
73
|
+
|
|
74
|
+
Here, we declare the service routes for the `todo` module:
|
|
75
|
+
|
|
76
|
+
```js
|
|
77
|
+
// todo/services.js
|
|
78
|
+
export default ['getAll', 'get:id', 'post', 'patch:id', 'delete:id'];
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
### todo/model.js
|
|
82
|
+
|
|
83
|
+
In this file, we define a Sequelize model for the `todo` module:
|
|
84
|
+
|
|
85
|
+
```js
|
|
86
|
+
// todo/model.js
|
|
87
|
+
export default moduleName => (orm, types) => {
|
|
88
|
+
const schema = orm.define(moduleName, {
|
|
89
|
+
name: {
|
|
90
|
+
type: types.STRING,
|
|
91
|
+
allowNull: false
|
|
92
|
+
},
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
schema.associate = MODELS => {}; // e.g. MODELS.todo.belongsTo(MODELS.todoList, {...});
|
|
96
|
+
|
|
97
|
+
return schema;
|
|
98
|
+
};
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
### todo/get/index.js
|
|
102
|
+
|
|
103
|
+
This file contains the `get:id` route for the `todo` module. It retrieves a todo item by its ID:
|
|
104
|
+
|
|
105
|
+
```js
|
|
106
|
+
// todo/get/index.js
|
|
107
|
+
export default async ({ body, MODELS }) => {
|
|
108
|
+
return await MODELS.todo.findOne({ where: { id: body.id } });
|
|
109
|
+
}
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
### todo/get/valid.js
|
|
113
|
+
|
|
114
|
+
This file validates the `get:id` service's input, ensuring that the provided `id` is a string and exists in the `todo` model:
|
|
115
|
+
|
|
116
|
+
```js
|
|
117
|
+
// todo/get/valid.js
|
|
118
|
+
import { z, zdb } from 'minimonolith';
|
|
119
|
+
|
|
120
|
+
export default ({ MODELS }) => ({
|
|
121
|
+
id: z.string()
|
|
122
|
+
.superRefine(zdb.getIsSafeInt('id'))
|
|
123
|
+
.transform(id => parseInt(id))
|
|
124
|
+
.superRefine(zdb.getExists(MODELS.todo, 'id')),
|
|
125
|
+
})
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
## Response Codes
|
|
129
|
+
|
|
130
|
+
### Success
|
|
131
|
+
|
|
132
|
+
- POST -> 201
|
|
133
|
+
- DELETE -> 204
|
|
134
|
+
- Everything else -> 200
|
|
135
|
+
|
|
136
|
+
### Invalid Request
|
|
137
|
+
|
|
138
|
+
- ANY -> 400
|
|
139
|
+
|
|
140
|
+
### Runtime Error
|
|
141
|
+
|
|
142
|
+
- ANY -> 500
|
|
143
|
+
|
|
144
|
+
## App Environments
|
|
145
|
+
|
|
146
|
+
There are 4 possible environments:
|
|
147
|
+
1. DEV=TRUE + PROD=FALSE: This is the standard DEV environment
|
|
148
|
+
2. DEV=FALSE + PROD=FALSE: This is the standard QA environment
|
|
149
|
+
3. DEV=FALSE + PROD=TRUE: This is the stnadard PROD environment
|
|
150
|
+
4. DEV=TRUE + PROD=TRUE: This allows to test the behavior of PROD within the "new concept" of DEV environment
|
|
151
|
+
|
|
152
|
+
To better understand their relevance:
|
|
153
|
+
1. The "new concept" DEV environments (DEV=TRUE) aim to make the api crash if an "important" error happens
|
|
154
|
+
- Its current only difference is it makes it crash on error at service registration phase
|
|
155
|
+
- Some may think QA should also fail on "important" errors They can use DEV=TRUE there But some do training activities on QA that must be minimally disrupted
|
|
156
|
+
2. The "new concept" QA environments (PROD=FALSE) aim at logging data about the system which on production environments would be forbiden personal information
|
|
157
|
+
- This is relevant because replication of QA activities (even security QA activities) depend heavily on this
|
|
158
|
+
|
|
159
|
+
The current App environment is determined on the values of DEV ENV [TRUE/FALSE] and PROD_ENV [TRUE/FALSE]
|
|
160
|
+
Assuming using same env variables as used at index.js above
|
|
161
|
+
|
|
162
|
+
```makefile
|
|
163
|
+
# .env standard dev environment
|
|
164
|
+
DEV_ENV=TRUE
|
|
165
|
+
PROD_ENV=FALSE
|
|
166
|
+
TEST_ENV=FALSE
|
|
167
|
+
[...]
|
|
168
|
+
```
|
|
169
|
+
|
|
170
|
+
*NOTICE*: Default environment it is assumed standard PROD environment (DEV=FLASE + PROD=TRUE)
|
|
171
|
+
- This means that sequelize will not alter automatically tables having mismatches with defined model.js files
|
|
172
|
+
- Database dialect/credentials detected will not be printed
|
|
173
|
+
- Critical errors will not make the app crash
|
|
174
|
+
|
|
175
|
+
## Database Authentication
|
|
176
|
+
|
|
177
|
+
To set up authentication for the database you need to pass necessary variables to postDatabaseService as at index.js above.
|
|
178
|
+
Assuming using same env variable names as at index.js above
|
|
179
|
+
|
|
180
|
+
For MySQL:
|
|
181
|
+
|
|
182
|
+
```makefile
|
|
183
|
+
DEV_ENV=TRUE
|
|
184
|
+
PROD_ENV=FALSE
|
|
185
|
+
DB_DIALECT=mysql
|
|
186
|
+
DB_HOST=<your_database_endpoint>
|
|
187
|
+
DB_PORT=<your_database_port>
|
|
188
|
+
DB_DB=<your_database_name>
|
|
189
|
+
DB_USER=<your_database_username>
|
|
190
|
+
DB_PASS=<your_database_password>
|
|
191
|
+
```
|
|
192
|
+
|
|
193
|
+
For SQLite in memory:
|
|
194
|
+
|
|
195
|
+
```makefile
|
|
196
|
+
DEV_ENV=TRUE
|
|
197
|
+
PROD_ENV=FALSE
|
|
198
|
+
DB_DIALECT=sqlite
|
|
199
|
+
DB_DB=<your_database_name>
|
|
200
|
+
DB_STORAGE=:memory: # Need to also pass to API.postDatabaseService()
|
|
201
|
+
```
|
|
202
|
+
|
|
203
|
+
Make sure to replace the placeholders with your actual database credentials.
|
|
204
|
+
- `DEV_ENV=TRUE` allows Sequelize to alter table structure automatically when working locally
|
|
205
|
+
- `PROD_ENV=FALSE` allows logging of DB credentials for debugging purposes in non-production environments
|
|
206
|
+
- We consider high quality logging important for app performance and evolution
|
|
207
|
+
- However we recommend automatic DB credentials updates (daily) High quality logging does not mean
|
|
208
|
+
giving away your infraestructure to hackers
|
|
209
|
+
- At the risk of stating the obvious do not store personal information at the QA database
|
package/api/getNewAPI/handler.js
CHANGED
|
@@ -1,53 +1,53 @@
|
|
|
1
|
-
import getLambdaAPI from 'lambda-api';
|
|
2
|
-
|
|
3
|
-
import { getNewLib, /*LOG_SERVICE*/ } from '@minimonolith/lib';
|
|
4
|
-
|
|
5
|
-
import { setAPI } from '../api.js';
|
|
6
|
-
|
|
7
|
-
import CORS_HEADERS from '../corsHeaders.js';
|
|
8
|
-
|
|
9
|
-
export default async ({ body, SERVICES }) => {
|
|
10
|
-
const DEV_ENV = body.DEV_ENV;
|
|
11
|
-
const PROD_ENV = body.PROD_ENV;
|
|
12
|
-
const TEST_ENV = body.TEST_ENV;
|
|
13
|
-
|
|
14
|
-
const API = {
|
|
15
|
-
ROUTES: getLambdaAPI(),
|
|
16
|
-
//LIB: await getNewLib({ DEBUG: 'TRUE' }),
|
|
17
|
-
LIB: await getNewLib(),
|
|
18
|
-
SCHEMAS: {},
|
|
19
|
-
MODELS: {},
|
|
20
|
-
ORM: undefined,
|
|
21
|
-
DEV_ENV: DEV_ENV,
|
|
22
|
-
PROD_ENV: PROD_ENV,
|
|
23
|
-
TEST_ENV: TEST_ENV,
|
|
24
|
-
};
|
|
25
|
-
|
|
26
|
-
API.ROUTES.use((req, res, next) => {
|
|
27
|
-
if (req.headers['x-trigger-error']) {
|
|
28
|
-
/*
|
|
29
|
-
LOG_SERVICE.post({
|
|
30
|
-
ROUTE_CODE: 'LAMBDA_API',
|
|
31
|
-
MESSAGE: 'X_TRIGGER_ERROR_HEADER'
|
|
32
|
-
});
|
|
33
|
-
*/
|
|
34
|
-
throw new Error('X_TRIGGER_ERROR_HEADER');
|
|
35
|
-
}
|
|
36
|
-
//console.log('passes here');
|
|
37
|
-
res.cors(); next();
|
|
38
|
-
});
|
|
39
|
-
|
|
40
|
-
API.ROUTES.options('/*', (req, res) => {
|
|
41
|
-
for (let k in CORS_HEADERS) { res.header(k, CORS_HEADERS[k]); }
|
|
42
|
-
res.status(200).send({})
|
|
43
|
-
});
|
|
44
|
-
|
|
45
|
-
API.postHealthService = SERVICES.health.post.handler;
|
|
46
|
-
API.postModule = SERVICES.api.postModule.handler;
|
|
47
|
-
API.postDatabaseService = SERVICES.database.post.handler;
|
|
48
|
-
API.getSyncedHandler = SERVICES.api.getSyncedHandler.handler;
|
|
49
|
-
|
|
50
|
-
setAPI(API);
|
|
51
|
-
|
|
52
|
-
return API;
|
|
53
|
-
};
|
|
1
|
+
import getLambdaAPI from 'lambda-api';
|
|
2
|
+
|
|
3
|
+
import { getNewLib, /*LOG_SERVICE*/ } from '@minimonolith/lib';
|
|
4
|
+
|
|
5
|
+
import { setAPI } from '../api.js';
|
|
6
|
+
|
|
7
|
+
import CORS_HEADERS from '../corsHeaders.js';
|
|
8
|
+
|
|
9
|
+
export default async ({ body, SERVICES }) => {
|
|
10
|
+
const DEV_ENV = body.DEV_ENV;
|
|
11
|
+
const PROD_ENV = body.PROD_ENV;
|
|
12
|
+
const TEST_ENV = body.TEST_ENV;
|
|
13
|
+
|
|
14
|
+
const API = {
|
|
15
|
+
ROUTES: getLambdaAPI(),
|
|
16
|
+
//LIB: await getNewLib({ DEBUG: 'TRUE' }),
|
|
17
|
+
LIB: await getNewLib(),
|
|
18
|
+
SCHEMAS: {},
|
|
19
|
+
MODELS: {},
|
|
20
|
+
ORM: undefined,
|
|
21
|
+
DEV_ENV: DEV_ENV,
|
|
22
|
+
PROD_ENV: PROD_ENV,
|
|
23
|
+
TEST_ENV: TEST_ENV,
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
API.ROUTES.use((req, res, next) => {
|
|
27
|
+
if (req.headers['x-trigger-error']) {
|
|
28
|
+
/*
|
|
29
|
+
LOG_SERVICE.post({
|
|
30
|
+
ROUTE_CODE: 'LAMBDA_API',
|
|
31
|
+
MESSAGE: 'X_TRIGGER_ERROR_HEADER'
|
|
32
|
+
});
|
|
33
|
+
*/
|
|
34
|
+
throw new Error('X_TRIGGER_ERROR_HEADER');
|
|
35
|
+
}
|
|
36
|
+
//console.log('passes here');
|
|
37
|
+
res.cors(); next();
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
API.ROUTES.options('/*', (req, res) => {
|
|
41
|
+
for (let k in CORS_HEADERS) { res.header(k, CORS_HEADERS[k]); }
|
|
42
|
+
res.status(200).send({})
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
API.postHealthService = SERVICES.health.post.handler;
|
|
46
|
+
API.postModule = SERVICES.api.postModule.handler;
|
|
47
|
+
API.postDatabaseService = SERVICES.database.post.handler;
|
|
48
|
+
API.getSyncedHandler = SERVICES.api.getSyncedHandler.handler;
|
|
49
|
+
|
|
50
|
+
setAPI(API);
|
|
51
|
+
|
|
52
|
+
return API;
|
|
53
|
+
};
|
|
@@ -1,40 +1,40 @@
|
|
|
1
|
-
import getLambdaAPI from 'lambda-api';
|
|
2
|
-
//import { LOG_SERVICE } from '@minimonolith/lib';
|
|
3
|
-
|
|
4
|
-
import API from '../api.js';
|
|
5
|
-
|
|
6
|
-
export default async ({ body, ROUTE_CODE, SERVICES }) => {
|
|
7
|
-
|
|
8
|
-
API().ROUTES.use((err, req, res, next) => {
|
|
9
|
-
//LOG_SERVICE.postError({ ROUTE_CODE: 'LAMBDA_API', ERROR: err });
|
|
10
|
-
console.log({ ROUTE_CODE: 'LAMBDA_API', ERROR: err });
|
|
11
|
-
res.cors(); //next();
|
|
12
|
-
res.status(500).send({ ROUTE_CODE: 'LAMBDA_API', ERROR: err.message });
|
|
13
|
-
});
|
|
14
|
-
|
|
15
|
-
if (API().ORM) await SERVICES.model.getSynced.handler();
|
|
16
|
-
|
|
17
|
-
API().LIB.postAdditionalKey('MODELS', API().MODELS);
|
|
18
|
-
|
|
19
|
-
//LOG_SERVICE.post({ ROUTE_CODE, INFO: 'LISTENING' });
|
|
20
|
-
console.log({ ROUTE_CODE, INFO: 'LISTENING' });
|
|
21
|
-
|
|
22
|
-
return async (event, context) => {
|
|
23
|
-
/*
|
|
24
|
-
LOG_SERVICE.post({
|
|
25
|
-
ROUTE_CODE: 'LAMBDA_EVENT',
|
|
26
|
-
PATH: event.requestContext?.http?.path,
|
|
27
|
-
METHOD: event.requestContext?.http?.method,
|
|
28
|
-
});
|
|
29
|
-
*/
|
|
30
|
-
console.log({
|
|
31
|
-
ROUTE_CODE: 'LAMBDA_EVENT',
|
|
32
|
-
PATH: event.requestContext?.http?.path,
|
|
33
|
-
METHOD: event.requestContext?.http?.method,
|
|
34
|
-
});
|
|
35
|
-
|
|
36
|
-
//API().ROUTES.routes(true);
|
|
37
|
-
//return await api.run({ event, context });
|
|
38
|
-
return await API().ROUTES.run(event, context);
|
|
39
|
-
};
|
|
40
|
-
};
|
|
1
|
+
import getLambdaAPI from 'lambda-api';
|
|
2
|
+
//import { LOG_SERVICE } from '@minimonolith/lib';
|
|
3
|
+
|
|
4
|
+
import API from '../api.js';
|
|
5
|
+
|
|
6
|
+
export default async ({ body, ROUTE_CODE, SERVICES }) => {
|
|
7
|
+
|
|
8
|
+
API().ROUTES.use((err, req, res, next) => {
|
|
9
|
+
//LOG_SERVICE.postError({ ROUTE_CODE: 'LAMBDA_API', ERROR: err });
|
|
10
|
+
console.log({ ROUTE_CODE: 'LAMBDA_API', ERROR: err });
|
|
11
|
+
res.cors(); //next();
|
|
12
|
+
res.status(500).send({ ROUTE_CODE: 'LAMBDA_API', ERROR: err.message });
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
if (API().ORM) await SERVICES.model.getSynced.handler();
|
|
16
|
+
|
|
17
|
+
API().LIB.postAdditionalKey('MODELS', API().MODELS);
|
|
18
|
+
|
|
19
|
+
//LOG_SERVICE.post({ ROUTE_CODE, INFO: 'LISTENING' });
|
|
20
|
+
console.log({ ROUTE_CODE, INFO: 'LISTENING' });
|
|
21
|
+
|
|
22
|
+
return async (event, context) => {
|
|
23
|
+
/*
|
|
24
|
+
LOG_SERVICE.post({
|
|
25
|
+
ROUTE_CODE: 'LAMBDA_EVENT',
|
|
26
|
+
PATH: event.requestContext?.http?.path,
|
|
27
|
+
METHOD: event.requestContext?.http?.method,
|
|
28
|
+
});
|
|
29
|
+
*/
|
|
30
|
+
console.log({
|
|
31
|
+
ROUTE_CODE: 'LAMBDA_EVENT',
|
|
32
|
+
PATH: event.requestContext?.http?.path,
|
|
33
|
+
METHOD: event.requestContext?.http?.method,
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
//API().ROUTES.routes(true);
|
|
37
|
+
//return await api.run({ event, context });
|
|
38
|
+
return await API().ROUTES.run(event, context);
|
|
39
|
+
};
|
|
40
|
+
};
|
|
@@ -1,31 +1,31 @@
|
|
|
1
|
-
import fs from 'fs';
|
|
2
|
-
import url from 'url'
|
|
3
|
-
import path from 'path';
|
|
4
|
-
|
|
5
|
-
import { PATH_MODULE } from '@minimonolith/lib';
|
|
6
|
-
|
|
7
|
-
export default async ({ body, SERVICES }) => {
|
|
8
|
-
const moduleName = body.moduleName;
|
|
9
|
-
const srcFolder = body.srcFolder;
|
|
10
|
-
const modulesFolder = body.modulesFolder;
|
|
11
|
-
|
|
12
|
-
const projectRoot = PATH_MODULE.getProjectRoot(
|
|
13
|
-
import.meta.url, modulesFolder);
|
|
14
|
-
const projectRootPath = projectRoot + path.sep;
|
|
15
|
-
const projectRelativeServicePath = path.join('.',
|
|
16
|
-
srcFolder, moduleName) + path.sep;
|
|
17
|
-
//console.log('postmodule', srcFolder, projectRelativeServicePath);
|
|
18
|
-
|
|
19
|
-
await SERVICES.api.postRawModule.handler({
|
|
20
|
-
moduleName, srcFolder, modulesFolder });
|
|
21
|
-
const moduleURL = new URL(projectRelativeServicePath, projectRootPath);
|
|
22
|
-
//console.log('postmodule', moduleURL.href);
|
|
23
|
-
await SERVICES.model.post.handler({ moduleName, moduleURL });
|
|
24
|
-
|
|
25
|
-
const apiServices = await SERVICES.api.getServices.handler();
|
|
26
|
-
for (let serviceName in apiServices[moduleName]) {
|
|
27
|
-
const serviceCode = apiServices[moduleName][serviceName].code;
|
|
28
|
-
//console.log('POST_ROUTE', moduleName, serviceCode, apiServices);
|
|
29
|
-
await SERVICES.service.postRoute.handler({ moduleName, serviceCode });
|
|
30
|
-
}
|
|
31
|
-
};
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import url from 'url'
|
|
3
|
+
import path from 'path';
|
|
4
|
+
|
|
5
|
+
import { PATH_MODULE } from '@minimonolith/lib';
|
|
6
|
+
|
|
7
|
+
export default async ({ body, SERVICES }) => {
|
|
8
|
+
const moduleName = body.moduleName;
|
|
9
|
+
const srcFolder = body.srcFolder;
|
|
10
|
+
const modulesFolder = body.modulesFolder;
|
|
11
|
+
|
|
12
|
+
const projectRoot = PATH_MODULE.getProjectRoot(
|
|
13
|
+
import.meta.url, modulesFolder);
|
|
14
|
+
const projectRootPath = projectRoot + path.sep;
|
|
15
|
+
const projectRelativeServicePath = path.join('.',
|
|
16
|
+
srcFolder, moduleName) + path.sep;
|
|
17
|
+
//console.log('postmodule', srcFolder, projectRelativeServicePath);
|
|
18
|
+
|
|
19
|
+
await SERVICES.api.postRawModule.handler({
|
|
20
|
+
moduleName, srcFolder, modulesFolder });
|
|
21
|
+
const moduleURL = new URL(projectRelativeServicePath, projectRootPath);
|
|
22
|
+
//console.log('postmodule', moduleURL.href);
|
|
23
|
+
await SERVICES.model.post.handler({ moduleName, moduleURL });
|
|
24
|
+
|
|
25
|
+
const apiServices = await SERVICES.api.getServices.handler();
|
|
26
|
+
for (let serviceName in apiServices[moduleName]) {
|
|
27
|
+
const serviceCode = apiServices[moduleName][serviceName].code;
|
|
28
|
+
//console.log('POST_ROUTE', moduleName, serviceCode, apiServices);
|
|
29
|
+
await SERVICES.service.postRoute.handler({ moduleName, serviceCode });
|
|
30
|
+
}
|
|
31
|
+
};
|