@backstage/backend-test-utils 1.0.1-next.1 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +32 -0
- package/dist/backend-app-api/src/lib/DependencyGraph.cjs.js +182 -0
- package/dist/backend-app-api/src/lib/DependencyGraph.cjs.js.map +1 -0
- package/dist/backend-app-api/src/wiring/ServiceRegistry.cjs.js +240 -0
- package/dist/backend-app-api/src/wiring/ServiceRegistry.cjs.js.map +1 -0
- package/dist/cache/TestCaches.cjs.js +159 -0
- package/dist/cache/TestCaches.cjs.js.map +1 -0
- package/dist/cache/memcache.cjs.js +62 -0
- package/dist/cache/memcache.cjs.js.map +1 -0
- package/dist/cache/redis.cjs.js +62 -0
- package/dist/cache/redis.cjs.js.map +1 -0
- package/dist/cache/types.cjs.js +25 -0
- package/dist/cache/types.cjs.js.map +1 -0
- package/dist/database/TestDatabases.cjs.js +128 -0
- package/dist/database/TestDatabases.cjs.js.map +1 -0
- package/dist/database/mysql.cjs.js +188 -0
- package/dist/database/mysql.cjs.js.map +1 -0
- package/dist/database/postgres.cjs.js +143 -0
- package/dist/database/postgres.cjs.js.map +1 -0
- package/dist/database/sqlite.cjs.js +40 -0
- package/dist/database/sqlite.cjs.js.map +1 -0
- package/dist/database/types.cjs.js +68 -0
- package/dist/database/types.cjs.js.map +1 -0
- package/dist/filesystem/MockDirectory.cjs.js +173 -0
- package/dist/filesystem/MockDirectory.cjs.js.map +1 -0
- package/dist/index.cjs.js +25 -2327
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +44 -2
- package/dist/msw/registerMswTestHooks.cjs.js +10 -0
- package/dist/msw/registerMswTestHooks.cjs.js.map +1 -0
- package/dist/next/services/MockAuthService.cjs.js +111 -0
- package/dist/next/services/MockAuthService.cjs.js.map +1 -0
- package/dist/next/services/MockHttpAuthService.cjs.js +87 -0
- package/dist/next/services/MockHttpAuthService.cjs.js.map +1 -0
- package/dist/next/services/MockRootLoggerService.cjs.js +49 -0
- package/dist/next/services/MockRootLoggerService.cjs.js.map +1 -0
- package/dist/next/services/MockUserInfoService.cjs.js +26 -0
- package/dist/next/services/MockUserInfoService.cjs.js.map +1 -0
- package/dist/next/services/mockCredentials.cjs.js +148 -0
- package/dist/next/services/mockCredentials.cjs.js.map +1 -0
- package/dist/next/services/mockServices.cjs.js +294 -0
- package/dist/next/services/mockServices.cjs.js.map +1 -0
- package/dist/next/wiring/ServiceFactoryTester.cjs.js +61 -0
- package/dist/next/wiring/ServiceFactoryTester.cjs.js.map +1 -0
- package/dist/next/wiring/TestBackend.cjs.js +258 -0
- package/dist/next/wiring/TestBackend.cjs.js.map +1 -0
- package/dist/util/errorHandler.cjs.js +18 -0
- package/dist/util/errorHandler.cjs.js.map +1 -0
- package/dist/util/getDockerImageForName.cjs.js +8 -0
- package/dist/util/getDockerImageForName.cjs.js.map +1 -0
- package/dist/util/isDockerDisabledForTests.cjs.js +8 -0
- package/dist/util/isDockerDisabledForTests.cjs.js.map +1 -0
- package/package.json +11 -11
package/dist/index.cjs.js
CHANGED
|
@@ -1,2332 +1,30 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
var
|
|
4
|
-
var
|
|
5
|
-
var
|
|
6
|
-
var
|
|
7
|
-
var
|
|
8
|
-
var
|
|
9
|
-
var
|
|
10
|
-
var
|
|
11
|
-
var
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
var permissions = require('@backstage/backend-defaults/permissions');
|
|
25
|
-
var rootHealth = require('@backstage/backend-defaults/rootHealth');
|
|
26
|
-
var rootHttpRouter = require('@backstage/backend-defaults/rootHttpRouter');
|
|
27
|
-
var rootLifecycle = require('@backstage/backend-defaults/rootLifecycle');
|
|
28
|
-
var scheduler = require('@backstage/backend-defaults/scheduler');
|
|
29
|
-
var urlReader = require('@backstage/backend-defaults/urlReader');
|
|
30
|
-
var config = require('@backstage/config');
|
|
31
|
-
var pluginEventsNode = require('@backstage/plugin-events-node');
|
|
32
|
-
var cookie = require('cookie');
|
|
33
|
-
var express = require('express');
|
|
34
|
-
|
|
35
|
-
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
36
|
-
|
|
37
|
-
var Keyv__default = /*#__PURE__*/_interopDefaultCompat(Keyv);
|
|
38
|
-
var KeyvMemcache__default = /*#__PURE__*/_interopDefaultCompat(KeyvMemcache);
|
|
39
|
-
var KeyvRedis__default = /*#__PURE__*/_interopDefaultCompat(KeyvRedis);
|
|
40
|
-
var knexFactory__default = /*#__PURE__*/_interopDefaultCompat(knexFactory);
|
|
41
|
-
var yn__default = /*#__PURE__*/_interopDefaultCompat(yn);
|
|
42
|
-
var os__default = /*#__PURE__*/_interopDefaultCompat(os);
|
|
43
|
-
var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
|
|
44
|
-
var textextensions__default = /*#__PURE__*/_interopDefaultCompat(textextensions);
|
|
45
|
-
var express__default = /*#__PURE__*/_interopDefaultCompat(express);
|
|
46
|
-
|
|
47
|
-
function isDockerDisabledForTests() {
|
|
48
|
-
return Boolean(process.env.BACKSTAGE_TEST_DISABLE_DOCKER) || !Boolean(process.env.CI);
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
async function attemptMemcachedConnection(connection) {
|
|
52
|
-
const startTime = Date.now();
|
|
53
|
-
for (; ; ) {
|
|
54
|
-
try {
|
|
55
|
-
const store = new KeyvMemcache__default.default(connection);
|
|
56
|
-
const keyv = new Keyv__default.default({ store });
|
|
57
|
-
const value = uuid.v4();
|
|
58
|
-
await keyv.set("test", value);
|
|
59
|
-
if (await keyv.get("test") === value) {
|
|
60
|
-
return keyv;
|
|
61
|
-
}
|
|
62
|
-
} catch (e) {
|
|
63
|
-
if (Date.now() - startTime > 3e4) {
|
|
64
|
-
throw new Error(
|
|
65
|
-
`Timed out waiting for memcached to be ready for connections, ${e}`
|
|
66
|
-
);
|
|
67
|
-
}
|
|
68
|
-
}
|
|
69
|
-
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
70
|
-
}
|
|
71
|
-
}
|
|
72
|
-
async function connectToExternalMemcache(connection) {
|
|
73
|
-
const keyv = await attemptMemcachedConnection(connection);
|
|
74
|
-
return {
|
|
75
|
-
store: "memcache",
|
|
76
|
-
connection,
|
|
77
|
-
keyv,
|
|
78
|
-
stop: async () => await keyv.disconnect()
|
|
79
|
-
};
|
|
80
|
-
}
|
|
81
|
-
async function startMemcachedContainer(image) {
|
|
82
|
-
const { GenericContainer } = await import('testcontainers');
|
|
83
|
-
const container = await new GenericContainer(image).withExposedPorts(11211).start();
|
|
84
|
-
const host = container.getHost();
|
|
85
|
-
const port = container.getMappedPort(11211);
|
|
86
|
-
const connection = `${host}:${port}`;
|
|
87
|
-
const keyv = await attemptMemcachedConnection(connection);
|
|
88
|
-
return {
|
|
89
|
-
store: "memcache",
|
|
90
|
-
connection,
|
|
91
|
-
keyv,
|
|
92
|
-
stop: async () => {
|
|
93
|
-
await keyv.disconnect();
|
|
94
|
-
await container.stop({ timeout: 1e4 });
|
|
95
|
-
}
|
|
96
|
-
};
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
async function attemptRedisConnection(connection) {
|
|
100
|
-
const startTime = Date.now();
|
|
101
|
-
for (; ; ) {
|
|
102
|
-
try {
|
|
103
|
-
const store = new KeyvRedis__default.default(connection);
|
|
104
|
-
const keyv = new Keyv__default.default({ store });
|
|
105
|
-
const value = uuid.v4();
|
|
106
|
-
await keyv.set("test", value);
|
|
107
|
-
if (await keyv.get("test") === value) {
|
|
108
|
-
return keyv;
|
|
109
|
-
}
|
|
110
|
-
} catch (e) {
|
|
111
|
-
if (Date.now() - startTime > 3e4) {
|
|
112
|
-
throw new Error(
|
|
113
|
-
`Timed out waiting for redis to be ready for connections, ${e}`
|
|
114
|
-
);
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
118
|
-
}
|
|
119
|
-
}
|
|
120
|
-
async function connectToExternalRedis(connection) {
|
|
121
|
-
const keyv = await attemptRedisConnection(connection);
|
|
122
|
-
return {
|
|
123
|
-
store: "redis",
|
|
124
|
-
connection,
|
|
125
|
-
keyv,
|
|
126
|
-
stop: async () => await keyv.disconnect()
|
|
127
|
-
};
|
|
128
|
-
}
|
|
129
|
-
async function startRedisContainer(image) {
|
|
130
|
-
const { GenericContainer } = await import('testcontainers');
|
|
131
|
-
const container = await new GenericContainer(image).withExposedPorts(6379).start();
|
|
132
|
-
const host = container.getHost();
|
|
133
|
-
const port = container.getMappedPort(6379);
|
|
134
|
-
const connection = `redis://${host}:${port}`;
|
|
135
|
-
const keyv = await attemptRedisConnection(connection);
|
|
136
|
-
return {
|
|
137
|
-
store: "redis",
|
|
138
|
-
connection,
|
|
139
|
-
keyv,
|
|
140
|
-
stop: async () => {
|
|
141
|
-
await keyv.disconnect();
|
|
142
|
-
await container.stop({ timeout: 1e4 });
|
|
143
|
-
}
|
|
144
|
-
};
|
|
145
|
-
}
|
|
146
|
-
|
|
147
|
-
const getDockerImageForName = (name) => {
|
|
148
|
-
return process.env.BACKSTAGE_TEST_DOCKER_REGISTRY ? `${process.env.BACKSTAGE_TEST_DOCKER_REGISTRY}/${name}` : name;
|
|
149
|
-
};
|
|
150
|
-
|
|
151
|
-
const allCaches = Object.freeze({
|
|
152
|
-
REDIS_7: {
|
|
153
|
-
name: "Redis 7.x",
|
|
154
|
-
store: "redis",
|
|
155
|
-
dockerImageName: getDockerImageForName("redis:7"),
|
|
156
|
-
connectionStringEnvironmentVariableName: "BACKSTAGE_TEST_CACHE_REDIS7_CONNECTION_STRING"
|
|
157
|
-
},
|
|
158
|
-
MEMCACHED_1: {
|
|
159
|
-
name: "Memcached 1.x",
|
|
160
|
-
store: "memcache",
|
|
161
|
-
dockerImageName: getDockerImageForName("memcached:1"),
|
|
162
|
-
connectionStringEnvironmentVariableName: "BACKSTAGE_TEST_CACHE_MEMCACHED1_CONNECTION_STRING"
|
|
163
|
-
},
|
|
164
|
-
MEMORY: {
|
|
165
|
-
name: "In-memory",
|
|
166
|
-
store: "memory"
|
|
167
|
-
}
|
|
3
|
+
var TestCaches = require('./cache/TestCaches.cjs.js');
|
|
4
|
+
var TestDatabases = require('./database/TestDatabases.cjs.js');
|
|
5
|
+
var registerMswTestHooks = require('./msw/registerMswTestHooks.cjs.js');
|
|
6
|
+
var MockDirectory = require('./filesystem/MockDirectory.cjs.js');
|
|
7
|
+
var ServiceFactoryTester = require('./next/wiring/ServiceFactoryTester.cjs.js');
|
|
8
|
+
var TestBackend = require('./next/wiring/TestBackend.cjs.js');
|
|
9
|
+
var mockServices = require('./next/services/mockServices.cjs.js');
|
|
10
|
+
var mockCredentials = require('./next/services/mockCredentials.cjs.js');
|
|
11
|
+
var errorHandler = require('./util/errorHandler.cjs.js');
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
exports.TestCaches = TestCaches.TestCaches;
|
|
16
|
+
exports.TestDatabases = TestDatabases.TestDatabases;
|
|
17
|
+
exports.registerMswTestHooks = registerMswTestHooks.registerMswTestHooks;
|
|
18
|
+
exports.createMockDirectory = MockDirectory.createMockDirectory;
|
|
19
|
+
exports.ServiceFactoryTester = ServiceFactoryTester.ServiceFactoryTester;
|
|
20
|
+
exports.startTestBackend = TestBackend.startTestBackend;
|
|
21
|
+
Object.defineProperty(exports, "mockServices", {
|
|
22
|
+
enumerable: true,
|
|
23
|
+
get: function () { return mockServices.mockServices; }
|
|
168
24
|
});
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
supportedIds;
|
|
173
|
-
static defaultIds;
|
|
174
|
-
/**
|
|
175
|
-
* Creates an empty `TestCaches` instance, and sets up Jest to clean up all of
|
|
176
|
-
* its acquired resources after all tests finish.
|
|
177
|
-
*
|
|
178
|
-
* You typically want to create just a single instance like this at the top of
|
|
179
|
-
* your test file or `describe` block, and then call `init` many times on that
|
|
180
|
-
* instance inside the individual tests. Spinning up a "physical" cache
|
|
181
|
-
* instance takes a considerable amount of time, slowing down tests. But
|
|
182
|
-
* wiping the contents of an instance using `init` is very fast.
|
|
183
|
-
*/
|
|
184
|
-
static create(options) {
|
|
185
|
-
const ids = options?.ids;
|
|
186
|
-
const disableDocker = options?.disableDocker ?? isDockerDisabledForTests();
|
|
187
|
-
let testCacheIds;
|
|
188
|
-
if (ids) {
|
|
189
|
-
testCacheIds = ids;
|
|
190
|
-
} else if (TestCaches.defaultIds) {
|
|
191
|
-
testCacheIds = TestCaches.defaultIds;
|
|
192
|
-
} else {
|
|
193
|
-
testCacheIds = Object.keys(allCaches);
|
|
194
|
-
}
|
|
195
|
-
const supportedIds = testCacheIds.filter((id) => {
|
|
196
|
-
const properties = allCaches[id];
|
|
197
|
-
if (!properties) {
|
|
198
|
-
return false;
|
|
199
|
-
}
|
|
200
|
-
if (properties.connectionStringEnvironmentVariableName && process.env[properties.connectionStringEnvironmentVariableName]) {
|
|
201
|
-
return true;
|
|
202
|
-
}
|
|
203
|
-
if (!properties.dockerImageName) {
|
|
204
|
-
return true;
|
|
205
|
-
}
|
|
206
|
-
if (disableDocker) {
|
|
207
|
-
return false;
|
|
208
|
-
}
|
|
209
|
-
return true;
|
|
210
|
-
});
|
|
211
|
-
const caches = new TestCaches(supportedIds);
|
|
212
|
-
if (supportedIds.length > 0) {
|
|
213
|
-
afterAll(async () => {
|
|
214
|
-
await caches.shutdown();
|
|
215
|
-
});
|
|
216
|
-
}
|
|
217
|
-
return caches;
|
|
218
|
-
}
|
|
219
|
-
static setDefaults(options) {
|
|
220
|
-
TestCaches.defaultIds = options.ids;
|
|
221
|
-
}
|
|
222
|
-
constructor(supportedIds) {
|
|
223
|
-
this.instanceById = /* @__PURE__ */ new Map();
|
|
224
|
-
this.supportedIds = supportedIds;
|
|
225
|
-
}
|
|
226
|
-
supports(id) {
|
|
227
|
-
return this.supportedIds.includes(id);
|
|
228
|
-
}
|
|
229
|
-
eachSupportedId() {
|
|
230
|
-
return this.supportedIds.map((id) => [id]);
|
|
231
|
-
}
|
|
232
|
-
/**
|
|
233
|
-
* Returns a fresh, empty cache for the given driver.
|
|
234
|
-
*
|
|
235
|
-
* @param id - The ID of the cache to use, e.g. 'REDIS_7'
|
|
236
|
-
* @returns Cache connection properties
|
|
237
|
-
*/
|
|
238
|
-
async init(id) {
|
|
239
|
-
const properties = allCaches[id];
|
|
240
|
-
if (!properties) {
|
|
241
|
-
const candidates = Object.keys(allCaches).join(", ");
|
|
242
|
-
throw new Error(
|
|
243
|
-
`Unknown test cache ${id}, possible values are ${candidates}`
|
|
244
|
-
);
|
|
245
|
-
}
|
|
246
|
-
if (!this.supportedIds.includes(id)) {
|
|
247
|
-
const candidates = this.supportedIds.join(", ");
|
|
248
|
-
throw new Error(
|
|
249
|
-
`Unsupported test cache ${id} for this environment, possible values are ${candidates}`
|
|
250
|
-
);
|
|
251
|
-
}
|
|
252
|
-
let instance = this.instanceById.get(id);
|
|
253
|
-
if (!instance) {
|
|
254
|
-
instance = await this.initAny(properties);
|
|
255
|
-
this.instanceById.set(id, instance);
|
|
256
|
-
}
|
|
257
|
-
await instance.keyv.clear();
|
|
258
|
-
return {
|
|
259
|
-
store: instance.store,
|
|
260
|
-
connection: instance.connection,
|
|
261
|
-
keyv: instance.keyv
|
|
262
|
-
};
|
|
263
|
-
}
|
|
264
|
-
async initAny(properties) {
|
|
265
|
-
switch (properties.store) {
|
|
266
|
-
case "memcache":
|
|
267
|
-
return this.initMemcached(properties);
|
|
268
|
-
case "redis":
|
|
269
|
-
return this.initRedis(properties);
|
|
270
|
-
case "memory":
|
|
271
|
-
return {
|
|
272
|
-
store: "memory",
|
|
273
|
-
connection: "memory",
|
|
274
|
-
keyv: new Keyv__default.default(),
|
|
275
|
-
stop: async () => {
|
|
276
|
-
}
|
|
277
|
-
};
|
|
278
|
-
default:
|
|
279
|
-
throw new Error(`Unknown cache store '${properties.store}'`);
|
|
280
|
-
}
|
|
281
|
-
}
|
|
282
|
-
async initMemcached(properties) {
|
|
283
|
-
const envVarName = properties.connectionStringEnvironmentVariableName;
|
|
284
|
-
if (envVarName) {
|
|
285
|
-
const connectionString = process.env[envVarName];
|
|
286
|
-
if (connectionString) {
|
|
287
|
-
return connectToExternalMemcache(connectionString);
|
|
288
|
-
}
|
|
289
|
-
}
|
|
290
|
-
return await startMemcachedContainer(properties.dockerImageName);
|
|
291
|
-
}
|
|
292
|
-
async initRedis(properties) {
|
|
293
|
-
const envVarName = properties.connectionStringEnvironmentVariableName;
|
|
294
|
-
if (envVarName) {
|
|
295
|
-
const connectionString = process.env[envVarName];
|
|
296
|
-
if (connectionString) {
|
|
297
|
-
return connectToExternalRedis(connectionString);
|
|
298
|
-
}
|
|
299
|
-
}
|
|
300
|
-
return await startRedisContainer(properties.dockerImageName);
|
|
301
|
-
}
|
|
302
|
-
async shutdown() {
|
|
303
|
-
const instances = [...this.instanceById.values()];
|
|
304
|
-
this.instanceById.clear();
|
|
305
|
-
await Promise.all(
|
|
306
|
-
instances.map(
|
|
307
|
-
({ stop }) => stop().catch((error) => {
|
|
308
|
-
console.warn(`TestCaches: Failed to stop container`, { error });
|
|
309
|
-
})
|
|
310
|
-
)
|
|
311
|
-
);
|
|
312
|
-
}
|
|
313
|
-
}
|
|
314
|
-
|
|
315
|
-
const allDatabases = Object.freeze({
|
|
316
|
-
POSTGRES_16: {
|
|
317
|
-
name: "Postgres 16.x",
|
|
318
|
-
driver: "pg",
|
|
319
|
-
dockerImageName: getDockerImageForName("postgres:16"),
|
|
320
|
-
connectionStringEnvironmentVariableName: "BACKSTAGE_TEST_DATABASE_POSTGRES16_CONNECTION_STRING"
|
|
321
|
-
},
|
|
322
|
-
POSTGRES_15: {
|
|
323
|
-
name: "Postgres 15.x",
|
|
324
|
-
driver: "pg",
|
|
325
|
-
dockerImageName: getDockerImageForName("postgres:15"),
|
|
326
|
-
connectionStringEnvironmentVariableName: "BACKSTAGE_TEST_DATABASE_POSTGRES15_CONNECTION_STRING"
|
|
327
|
-
},
|
|
328
|
-
POSTGRES_14: {
|
|
329
|
-
name: "Postgres 14.x",
|
|
330
|
-
driver: "pg",
|
|
331
|
-
dockerImageName: getDockerImageForName("postgres:14"),
|
|
332
|
-
connectionStringEnvironmentVariableName: "BACKSTAGE_TEST_DATABASE_POSTGRES14_CONNECTION_STRING"
|
|
333
|
-
},
|
|
334
|
-
POSTGRES_13: {
|
|
335
|
-
name: "Postgres 13.x",
|
|
336
|
-
driver: "pg",
|
|
337
|
-
dockerImageName: getDockerImageForName("postgres:13"),
|
|
338
|
-
connectionStringEnvironmentVariableName: "BACKSTAGE_TEST_DATABASE_POSTGRES13_CONNECTION_STRING"
|
|
339
|
-
},
|
|
340
|
-
POSTGRES_12: {
|
|
341
|
-
name: "Postgres 12.x",
|
|
342
|
-
driver: "pg",
|
|
343
|
-
dockerImageName: getDockerImageForName("postgres:12"),
|
|
344
|
-
connectionStringEnvironmentVariableName: "BACKSTAGE_TEST_DATABASE_POSTGRES12_CONNECTION_STRING"
|
|
345
|
-
},
|
|
346
|
-
POSTGRES_11: {
|
|
347
|
-
name: "Postgres 11.x",
|
|
348
|
-
driver: "pg",
|
|
349
|
-
dockerImageName: getDockerImageForName("postgres:11"),
|
|
350
|
-
connectionStringEnvironmentVariableName: "BACKSTAGE_TEST_DATABASE_POSTGRES11_CONNECTION_STRING"
|
|
351
|
-
},
|
|
352
|
-
POSTGRES_9: {
|
|
353
|
-
name: "Postgres 9.x",
|
|
354
|
-
driver: "pg",
|
|
355
|
-
dockerImageName: getDockerImageForName("postgres:9"),
|
|
356
|
-
connectionStringEnvironmentVariableName: "BACKSTAGE_TEST_DATABASE_POSTGRES9_CONNECTION_STRING"
|
|
357
|
-
},
|
|
358
|
-
MYSQL_8: {
|
|
359
|
-
name: "MySQL 8.x",
|
|
360
|
-
driver: "mysql2",
|
|
361
|
-
dockerImageName: getDockerImageForName("mysql:8"),
|
|
362
|
-
connectionStringEnvironmentVariableName: "BACKSTAGE_TEST_DATABASE_MYSQL8_CONNECTION_STRING"
|
|
363
|
-
},
|
|
364
|
-
SQLITE_3: {
|
|
365
|
-
name: "SQLite 3.x",
|
|
366
|
-
driver: "better-sqlite3"
|
|
367
|
-
}
|
|
25
|
+
Object.defineProperty(exports, "mockCredentials", {
|
|
26
|
+
enumerable: true,
|
|
27
|
+
get: function () { return mockCredentials.mockCredentials; }
|
|
368
28
|
});
|
|
369
|
-
|
|
370
|
-
pool: {
|
|
371
|
-
min: 0,
|
|
372
|
-
max: 50
|
|
373
|
-
}
|
|
374
|
-
};
|
|
375
|
-
|
|
376
|
-
async function waitForMysqlReady(connection) {
|
|
377
|
-
const startTime = Date.now();
|
|
378
|
-
let lastError;
|
|
379
|
-
let attempts = 0;
|
|
380
|
-
for (; ; ) {
|
|
381
|
-
attempts += 1;
|
|
382
|
-
let knex;
|
|
383
|
-
try {
|
|
384
|
-
knex = knexFactory__default.default({
|
|
385
|
-
client: "mysql2",
|
|
386
|
-
connection: {
|
|
387
|
-
// make a copy because the driver mutates this
|
|
388
|
-
...connection
|
|
389
|
-
}
|
|
390
|
-
});
|
|
391
|
-
const result = await knex.select(knex.raw("version() AS version"));
|
|
392
|
-
if (Array.isArray(result) && result[0]?.version) {
|
|
393
|
-
return;
|
|
394
|
-
}
|
|
395
|
-
} catch (e) {
|
|
396
|
-
lastError = e;
|
|
397
|
-
} finally {
|
|
398
|
-
await knex?.destroy();
|
|
399
|
-
}
|
|
400
|
-
if (Date.now() - startTime > 3e4) {
|
|
401
|
-
throw new Error(
|
|
402
|
-
`Timed out waiting for the database to be ready for connections, ${attempts} attempts, ${lastError ? `last error was ${errors.stringifyError(lastError)}` : "(no errors thrown)"}`
|
|
403
|
-
);
|
|
404
|
-
}
|
|
405
|
-
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
406
|
-
}
|
|
407
|
-
}
|
|
408
|
-
async function startMysqlContainer(image) {
|
|
409
|
-
const user = "root";
|
|
410
|
-
const password = uuid.v4();
|
|
411
|
-
const { GenericContainer } = await import('testcontainers');
|
|
412
|
-
const container = await new GenericContainer(image).withExposedPorts(3306).withEnvironment({ MYSQL_ROOT_PASSWORD: password }).withTmpFs({ "/var/lib/mysql": "rw" }).start();
|
|
413
|
-
const host = container.getHost();
|
|
414
|
-
const port = container.getMappedPort(3306);
|
|
415
|
-
const connection = { host, port, user, password };
|
|
416
|
-
const stopContainer = async () => {
|
|
417
|
-
await container.stop({ timeout: 1e4 });
|
|
418
|
-
};
|
|
419
|
-
await waitForMysqlReady(connection);
|
|
420
|
-
return { connection, stopContainer };
|
|
421
|
-
}
|
|
422
|
-
function parseMysqlConnectionString(connectionString) {
|
|
423
|
-
try {
|
|
424
|
-
const {
|
|
425
|
-
protocol,
|
|
426
|
-
username,
|
|
427
|
-
password,
|
|
428
|
-
port,
|
|
429
|
-
hostname,
|
|
430
|
-
pathname,
|
|
431
|
-
searchParams
|
|
432
|
-
} = new URL(connectionString);
|
|
433
|
-
if (protocol !== "mysql:") {
|
|
434
|
-
throw new Error(`Unknown protocol ${protocol}`);
|
|
435
|
-
} else if (!username || !password) {
|
|
436
|
-
throw new Error(`Missing username/password`);
|
|
437
|
-
} else if (!pathname.match(/^\/[^/]+$/)) {
|
|
438
|
-
throw new Error(`Expected single path segment`);
|
|
439
|
-
}
|
|
440
|
-
const result = {
|
|
441
|
-
user: username,
|
|
442
|
-
password,
|
|
443
|
-
host: hostname,
|
|
444
|
-
port: Number(port || 3306),
|
|
445
|
-
database: decodeURIComponent(pathname.substring(1))
|
|
446
|
-
};
|
|
447
|
-
const ssl = searchParams.get("ssl");
|
|
448
|
-
if (ssl) {
|
|
449
|
-
result.ssl = ssl;
|
|
450
|
-
}
|
|
451
|
-
const debug = searchParams.get("debug");
|
|
452
|
-
if (debug) {
|
|
453
|
-
result.debug = yn__default.default(debug);
|
|
454
|
-
}
|
|
455
|
-
return result;
|
|
456
|
-
} catch (e) {
|
|
457
|
-
throw new Error(`Error while parsing MySQL connection string, ${e}`, e);
|
|
458
|
-
}
|
|
459
|
-
}
|
|
460
|
-
class MysqlEngine {
|
|
461
|
-
static async create(properties) {
|
|
462
|
-
const { connectionStringEnvironmentVariableName, dockerImageName } = properties;
|
|
463
|
-
if (connectionStringEnvironmentVariableName) {
|
|
464
|
-
const connectionString = process.env[connectionStringEnvironmentVariableName];
|
|
465
|
-
if (connectionString) {
|
|
466
|
-
const connection = parseMysqlConnectionString(connectionString);
|
|
467
|
-
return new MysqlEngine(
|
|
468
|
-
properties,
|
|
469
|
-
connection
|
|
470
|
-
);
|
|
471
|
-
}
|
|
472
|
-
}
|
|
473
|
-
if (dockerImageName) {
|
|
474
|
-
const { connection, stopContainer } = await startMysqlContainer(
|
|
475
|
-
dockerImageName
|
|
476
|
-
);
|
|
477
|
-
return new MysqlEngine(properties, connection, stopContainer);
|
|
478
|
-
}
|
|
479
|
-
throw new Error(`Test databasee for ${properties.name} not configured`);
|
|
480
|
-
}
|
|
481
|
-
#properties;
|
|
482
|
-
#connection;
|
|
483
|
-
#knexInstances;
|
|
484
|
-
#databaseNames;
|
|
485
|
-
#stopContainer;
|
|
486
|
-
constructor(properties, connection, stopContainer) {
|
|
487
|
-
this.#properties = properties;
|
|
488
|
-
this.#connection = connection;
|
|
489
|
-
this.#knexInstances = [];
|
|
490
|
-
this.#databaseNames = [];
|
|
491
|
-
this.#stopContainer = stopContainer;
|
|
492
|
-
}
|
|
493
|
-
async createDatabaseInstance() {
|
|
494
|
-
const adminConnection = this.#connectAdmin();
|
|
495
|
-
try {
|
|
496
|
-
const databaseName = `db${crypto.randomBytes(16).toString("hex")}`;
|
|
497
|
-
await adminConnection.raw("CREATE DATABASE ??", [databaseName]);
|
|
498
|
-
this.#databaseNames.push(databaseName);
|
|
499
|
-
const knexInstance = knexFactory__default.default({
|
|
500
|
-
client: this.#properties.driver,
|
|
501
|
-
connection: {
|
|
502
|
-
...this.#connection,
|
|
503
|
-
database: databaseName
|
|
504
|
-
},
|
|
505
|
-
...LARGER_POOL_CONFIG
|
|
506
|
-
});
|
|
507
|
-
this.#knexInstances.push(knexInstance);
|
|
508
|
-
return knexInstance;
|
|
509
|
-
} finally {
|
|
510
|
-
await adminConnection.destroy();
|
|
511
|
-
}
|
|
512
|
-
}
|
|
513
|
-
async shutdown() {
|
|
514
|
-
for (const instance of this.#knexInstances) {
|
|
515
|
-
await instance.destroy();
|
|
516
|
-
}
|
|
517
|
-
const adminConnection = this.#connectAdmin();
|
|
518
|
-
try {
|
|
519
|
-
for (const databaseName of this.#databaseNames) {
|
|
520
|
-
await adminConnection.raw("DROP DATABASE ??", [databaseName]);
|
|
521
|
-
}
|
|
522
|
-
} finally {
|
|
523
|
-
await adminConnection.destroy();
|
|
524
|
-
}
|
|
525
|
-
await this.#stopContainer?.();
|
|
526
|
-
}
|
|
527
|
-
#connectAdmin() {
|
|
528
|
-
const connection = {
|
|
529
|
-
...this.#connection,
|
|
530
|
-
database: null
|
|
531
|
-
};
|
|
532
|
-
return knexFactory__default.default({
|
|
533
|
-
client: this.#properties.driver,
|
|
534
|
-
connection,
|
|
535
|
-
pool: {
|
|
536
|
-
min: 0,
|
|
537
|
-
max: 1,
|
|
538
|
-
acquireTimeoutMillis: 2e4,
|
|
539
|
-
createTimeoutMillis: 2e4,
|
|
540
|
-
createRetryIntervalMillis: 1e3
|
|
541
|
-
}
|
|
542
|
-
});
|
|
543
|
-
}
|
|
544
|
-
}
|
|
545
|
-
|
|
546
|
-
async function waitForPostgresReady(connection) {
|
|
547
|
-
const startTime = Date.now();
|
|
548
|
-
let lastError;
|
|
549
|
-
let attempts = 0;
|
|
550
|
-
for (; ; ) {
|
|
551
|
-
attempts += 1;
|
|
552
|
-
let knex;
|
|
553
|
-
try {
|
|
554
|
-
knex = knexFactory__default.default({
|
|
555
|
-
client: "pg",
|
|
556
|
-
connection: {
|
|
557
|
-
// make a copy because the driver mutates this
|
|
558
|
-
...connection
|
|
559
|
-
}
|
|
560
|
-
});
|
|
561
|
-
const result = await knex.select(knex.raw("version()"));
|
|
562
|
-
if (Array.isArray(result) && result[0]?.version) {
|
|
563
|
-
return;
|
|
564
|
-
}
|
|
565
|
-
} catch (e) {
|
|
566
|
-
lastError = e;
|
|
567
|
-
} finally {
|
|
568
|
-
await knex?.destroy();
|
|
569
|
-
}
|
|
570
|
-
if (Date.now() - startTime > 3e4) {
|
|
571
|
-
throw new Error(
|
|
572
|
-
`Timed out waiting for the database to be ready for connections, ${attempts} attempts, ${lastError ? `last error was ${errors.stringifyError(lastError)}` : "(no errors thrown)"}`
|
|
573
|
-
);
|
|
574
|
-
}
|
|
575
|
-
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
576
|
-
}
|
|
577
|
-
}
|
|
578
|
-
async function startPostgresContainer(image) {
|
|
579
|
-
const user = "postgres";
|
|
580
|
-
const password = uuid.v4();
|
|
581
|
-
const { GenericContainer } = await import('testcontainers');
|
|
582
|
-
const container = await new GenericContainer(image).withExposedPorts(5432).withEnvironment({ POSTGRES_PASSWORD: password }).withTmpFs({ "/var/lib/postgresql/data": "rw" }).start();
|
|
583
|
-
const host = container.getHost();
|
|
584
|
-
const port = container.getMappedPort(5432);
|
|
585
|
-
const connection = { host, port, user, password };
|
|
586
|
-
const stopContainer = async () => {
|
|
587
|
-
await container.stop({ timeout: 1e4 });
|
|
588
|
-
};
|
|
589
|
-
await waitForPostgresReady(connection);
|
|
590
|
-
return { connection, stopContainer };
|
|
591
|
-
}
|
|
592
|
-
class PostgresEngine {
|
|
593
|
-
static async create(properties) {
|
|
594
|
-
const { connectionStringEnvironmentVariableName, dockerImageName } = properties;
|
|
595
|
-
if (connectionStringEnvironmentVariableName) {
|
|
596
|
-
const connectionString = process.env[connectionStringEnvironmentVariableName];
|
|
597
|
-
if (connectionString) {
|
|
598
|
-
const connection = pgConnectionString.parse(connectionString);
|
|
599
|
-
return new PostgresEngine(
|
|
600
|
-
properties,
|
|
601
|
-
connection
|
|
602
|
-
);
|
|
603
|
-
}
|
|
604
|
-
}
|
|
605
|
-
if (dockerImageName) {
|
|
606
|
-
const { connection, stopContainer } = await startPostgresContainer(
|
|
607
|
-
dockerImageName
|
|
608
|
-
);
|
|
609
|
-
return new PostgresEngine(properties, connection, stopContainer);
|
|
610
|
-
}
|
|
611
|
-
throw new Error(`Test databasee for ${properties.name} not configured`);
|
|
612
|
-
}
|
|
613
|
-
#properties;
|
|
614
|
-
#connection;
|
|
615
|
-
#knexInstances;
|
|
616
|
-
#databaseNames;
|
|
617
|
-
#stopContainer;
|
|
618
|
-
constructor(properties, connection, stopContainer) {
|
|
619
|
-
this.#properties = properties;
|
|
620
|
-
this.#connection = connection;
|
|
621
|
-
this.#knexInstances = [];
|
|
622
|
-
this.#databaseNames = [];
|
|
623
|
-
this.#stopContainer = stopContainer;
|
|
624
|
-
}
|
|
625
|
-
async createDatabaseInstance() {
|
|
626
|
-
const adminConnection = this.#connectAdmin();
|
|
627
|
-
try {
|
|
628
|
-
const databaseName = `db${crypto.randomBytes(16).toString("hex")}`;
|
|
629
|
-
await adminConnection.raw("CREATE DATABASE ??", [databaseName]);
|
|
630
|
-
this.#databaseNames.push(databaseName);
|
|
631
|
-
const knexInstance = knexFactory__default.default({
|
|
632
|
-
client: this.#properties.driver,
|
|
633
|
-
connection: {
|
|
634
|
-
...this.#connection,
|
|
635
|
-
database: databaseName
|
|
636
|
-
},
|
|
637
|
-
...LARGER_POOL_CONFIG
|
|
638
|
-
});
|
|
639
|
-
this.#knexInstances.push(knexInstance);
|
|
640
|
-
return knexInstance;
|
|
641
|
-
} finally {
|
|
642
|
-
await adminConnection.destroy();
|
|
643
|
-
}
|
|
644
|
-
}
|
|
645
|
-
async shutdown() {
|
|
646
|
-
for (const instance of this.#knexInstances) {
|
|
647
|
-
await instance.destroy();
|
|
648
|
-
}
|
|
649
|
-
const adminConnection = this.#connectAdmin();
|
|
650
|
-
try {
|
|
651
|
-
for (const databaseName of this.#databaseNames) {
|
|
652
|
-
await adminConnection.raw("DROP DATABASE ??", [databaseName]);
|
|
653
|
-
}
|
|
654
|
-
} finally {
|
|
655
|
-
await adminConnection.destroy();
|
|
656
|
-
}
|
|
657
|
-
await this.#stopContainer?.();
|
|
658
|
-
}
|
|
659
|
-
#connectAdmin() {
|
|
660
|
-
return knexFactory__default.default({
|
|
661
|
-
client: this.#properties.driver,
|
|
662
|
-
connection: {
|
|
663
|
-
...this.#connection,
|
|
664
|
-
database: "postgres"
|
|
665
|
-
},
|
|
666
|
-
pool: {
|
|
667
|
-
acquireTimeoutMillis: 1e4
|
|
668
|
-
}
|
|
669
|
-
});
|
|
670
|
-
}
|
|
671
|
-
}
|
|
672
|
-
|
|
673
|
-
class SqliteEngine {
|
|
674
|
-
static async create(properties) {
|
|
675
|
-
return new SqliteEngine(properties);
|
|
676
|
-
}
|
|
677
|
-
#properties;
|
|
678
|
-
#instances;
|
|
679
|
-
constructor(properties) {
|
|
680
|
-
this.#properties = properties;
|
|
681
|
-
this.#instances = [];
|
|
682
|
-
}
|
|
683
|
-
async createDatabaseInstance() {
|
|
684
|
-
const instance = knexFactory__default.default({
|
|
685
|
-
client: this.#properties.driver,
|
|
686
|
-
connection: ":memory:",
|
|
687
|
-
useNullAsDefault: true
|
|
688
|
-
});
|
|
689
|
-
instance.client.pool.on("createSuccess", (_eventId, resource) => {
|
|
690
|
-
resource.run("PRAGMA foreign_keys = ON", () => {
|
|
691
|
-
});
|
|
692
|
-
});
|
|
693
|
-
this.#instances.push(instance);
|
|
694
|
-
return instance;
|
|
695
|
-
}
|
|
696
|
-
async shutdown() {
|
|
697
|
-
for (const instance of this.#instances) {
|
|
698
|
-
await instance.destroy();
|
|
699
|
-
}
|
|
700
|
-
}
|
|
701
|
-
}
|
|
702
|
-
|
|
703
|
-
class TestDatabases {
|
|
704
|
-
engineFactoryByDriver = {
|
|
705
|
-
pg: PostgresEngine.create,
|
|
706
|
-
mysql: MysqlEngine.create,
|
|
707
|
-
mysql2: MysqlEngine.create,
|
|
708
|
-
"better-sqlite3": SqliteEngine.create,
|
|
709
|
-
sqlite3: SqliteEngine.create
|
|
710
|
-
};
|
|
711
|
-
engineByTestDatabaseId;
|
|
712
|
-
supportedIds;
|
|
713
|
-
static defaultIds;
|
|
714
|
-
/**
|
|
715
|
-
* Creates an empty `TestDatabases` instance, and sets up Jest to clean up
|
|
716
|
-
* all of its acquired resources after all tests finish.
|
|
717
|
-
*
|
|
718
|
-
* You typically want to create just a single instance like this at the top
|
|
719
|
-
* of your test file or `describe` block, and then call `init` many times on
|
|
720
|
-
* that instance inside the individual tests. Spinning up a "physical"
|
|
721
|
-
* database instance takes a considerable amount of time, slowing down tests.
|
|
722
|
-
* But initializing a new logical database inside that instance using `init`
|
|
723
|
-
* is very fast.
|
|
724
|
-
*/
|
|
725
|
-
static create(options) {
|
|
726
|
-
const ids = options?.ids;
|
|
727
|
-
const disableDocker = options?.disableDocker ?? isDockerDisabledForTests();
|
|
728
|
-
let testDatabaseIds;
|
|
729
|
-
if (ids) {
|
|
730
|
-
testDatabaseIds = ids;
|
|
731
|
-
} else if (TestDatabases.defaultIds) {
|
|
732
|
-
testDatabaseIds = TestDatabases.defaultIds;
|
|
733
|
-
} else {
|
|
734
|
-
testDatabaseIds = Object.keys(allDatabases);
|
|
735
|
-
}
|
|
736
|
-
const supportedIds = testDatabaseIds.filter((id) => {
|
|
737
|
-
const properties = allDatabases[id];
|
|
738
|
-
if (!properties) {
|
|
739
|
-
return false;
|
|
740
|
-
}
|
|
741
|
-
if (properties.connectionStringEnvironmentVariableName && process.env[properties.connectionStringEnvironmentVariableName]) {
|
|
742
|
-
return true;
|
|
743
|
-
}
|
|
744
|
-
if (!properties.dockerImageName) {
|
|
745
|
-
return true;
|
|
746
|
-
}
|
|
747
|
-
if (disableDocker) {
|
|
748
|
-
return false;
|
|
749
|
-
}
|
|
750
|
-
return true;
|
|
751
|
-
});
|
|
752
|
-
const databases = new TestDatabases(supportedIds);
|
|
753
|
-
if (supportedIds.length > 0) {
|
|
754
|
-
afterAll(async () => {
|
|
755
|
-
await databases.shutdown();
|
|
756
|
-
});
|
|
757
|
-
}
|
|
758
|
-
return databases;
|
|
759
|
-
}
|
|
760
|
-
static setDefaults(options) {
|
|
761
|
-
TestDatabases.defaultIds = options.ids;
|
|
762
|
-
}
|
|
763
|
-
constructor(supportedIds) {
|
|
764
|
-
this.engineByTestDatabaseId = /* @__PURE__ */ new Map();
|
|
765
|
-
this.supportedIds = supportedIds;
|
|
766
|
-
}
|
|
767
|
-
supports(id) {
|
|
768
|
-
return this.supportedIds.includes(id);
|
|
769
|
-
}
|
|
770
|
-
eachSupportedId() {
|
|
771
|
-
return this.supportedIds.map((id) => [id]);
|
|
772
|
-
}
|
|
773
|
-
/**
|
|
774
|
-
* Returns a fresh, unique, empty logical database on an instance of the
|
|
775
|
-
* given database ID platform.
|
|
776
|
-
*
|
|
777
|
-
* @param id - The ID of the database platform to use, e.g. 'POSTGRES_13'
|
|
778
|
-
* @returns A `Knex` connection object
|
|
779
|
-
*/
|
|
780
|
-
async init(id) {
|
|
781
|
-
const properties = allDatabases[id];
|
|
782
|
-
if (!properties) {
|
|
783
|
-
const candidates = Object.keys(allDatabases).join(", ");
|
|
784
|
-
throw new Error(
|
|
785
|
-
`Unknown test database ${id}, possible values are ${candidates}`
|
|
786
|
-
);
|
|
787
|
-
}
|
|
788
|
-
if (!this.supportedIds.includes(id)) {
|
|
789
|
-
const candidates = this.supportedIds.join(", ");
|
|
790
|
-
throw new Error(
|
|
791
|
-
`Unsupported test database ${id} for this environment, possible values are ${candidates}`
|
|
792
|
-
);
|
|
793
|
-
}
|
|
794
|
-
let engine = this.engineByTestDatabaseId.get(id);
|
|
795
|
-
if (!engine) {
|
|
796
|
-
const factory = this.engineFactoryByDriver[properties.driver];
|
|
797
|
-
if (!factory) {
|
|
798
|
-
throw new Error(`Unknown database driver ${properties.driver}`);
|
|
799
|
-
}
|
|
800
|
-
engine = await factory(properties);
|
|
801
|
-
this.engineByTestDatabaseId.set(id, engine);
|
|
802
|
-
}
|
|
803
|
-
return await engine.createDatabaseInstance();
|
|
804
|
-
}
|
|
805
|
-
async shutdown() {
|
|
806
|
-
const engines = [...this.engineByTestDatabaseId.values()];
|
|
807
|
-
this.engineByTestDatabaseId.clear();
|
|
808
|
-
for (const engine of engines) {
|
|
809
|
-
try {
|
|
810
|
-
await engine.shutdown();
|
|
811
|
-
} catch (error) {
|
|
812
|
-
console.warn(`TestDatabases: Failed to shutdown engine`, {
|
|
813
|
-
engine,
|
|
814
|
-
error
|
|
815
|
-
});
|
|
816
|
-
}
|
|
817
|
-
}
|
|
818
|
-
}
|
|
819
|
-
}
|
|
820
|
-
|
|
821
|
-
function registerMswTestHooks(worker) {
|
|
822
|
-
beforeAll(() => worker.listen({ onUnhandledRequest: "error" }));
|
|
823
|
-
afterAll(() => worker.close());
|
|
824
|
-
afterEach(() => worker.resetHandlers());
|
|
825
|
-
}
|
|
826
|
-
|
|
827
|
-
const tmpdirMarker = Symbol("os-tmpdir-mock");
|
|
828
|
-
class MockDirectoryImpl {
|
|
829
|
-
#root;
|
|
830
|
-
constructor(root) {
|
|
831
|
-
this.#root = root;
|
|
832
|
-
}
|
|
833
|
-
get path() {
|
|
834
|
-
return this.#root;
|
|
835
|
-
}
|
|
836
|
-
resolve(...paths) {
|
|
837
|
-
return path.resolve(this.#root, ...paths);
|
|
838
|
-
}
|
|
839
|
-
setContent(root) {
|
|
840
|
-
this.remove();
|
|
841
|
-
return this.addContent(root);
|
|
842
|
-
}
|
|
843
|
-
addContent(root) {
|
|
844
|
-
const entries = this.#transformInput(root);
|
|
845
|
-
for (const entry of entries) {
|
|
846
|
-
const fullPath = path.resolve(this.#root, entry.path);
|
|
847
|
-
if (!backendPluginApi.isChildPath(this.#root, fullPath)) {
|
|
848
|
-
throw new Error(
|
|
849
|
-
`Provided path must resolve to a child path of the mock directory, got '${fullPath}'`
|
|
850
|
-
);
|
|
851
|
-
}
|
|
852
|
-
if (entry.type === "dir") {
|
|
853
|
-
fs__default.default.ensureDirSync(fullPath);
|
|
854
|
-
} else if (entry.type === "file") {
|
|
855
|
-
fs__default.default.ensureDirSync(path.dirname(fullPath));
|
|
856
|
-
fs__default.default.writeFileSync(fullPath, entry.content);
|
|
857
|
-
} else if (entry.type === "callback") {
|
|
858
|
-
fs__default.default.ensureDirSync(path.dirname(fullPath));
|
|
859
|
-
entry.callback({
|
|
860
|
-
path: fullPath,
|
|
861
|
-
symlink(target) {
|
|
862
|
-
fs__default.default.symlinkSync(target, fullPath);
|
|
863
|
-
}
|
|
864
|
-
});
|
|
865
|
-
}
|
|
866
|
-
}
|
|
867
|
-
}
|
|
868
|
-
content(options) {
|
|
869
|
-
const shouldReadAsText = (typeof options?.shouldReadAsText === "boolean" ? () => options?.shouldReadAsText : options?.shouldReadAsText) ?? ((path$1) => textextensions__default.default.includes(path.extname(path$1).slice(1)));
|
|
870
|
-
const root = path.resolve(this.#root, options?.path ?? "");
|
|
871
|
-
if (!backendPluginApi.isChildPath(this.#root, root)) {
|
|
872
|
-
throw new Error(
|
|
873
|
-
`Provided path must resolve to a child path of the mock directory, got '${root}'`
|
|
874
|
-
);
|
|
875
|
-
}
|
|
876
|
-
function read(path$1) {
|
|
877
|
-
if (!fs__default.default.pathExistsSync(path$1)) {
|
|
878
|
-
return void 0;
|
|
879
|
-
}
|
|
880
|
-
const entries = fs__default.default.readdirSync(path$1, { withFileTypes: true });
|
|
881
|
-
return Object.fromEntries(
|
|
882
|
-
entries.map((entry) => {
|
|
883
|
-
const fullPath = path.resolve(path$1, entry.name);
|
|
884
|
-
if (entry.isDirectory()) {
|
|
885
|
-
return [entry.name, read(fullPath)];
|
|
886
|
-
}
|
|
887
|
-
const content = fs__default.default.readFileSync(fullPath);
|
|
888
|
-
const relativePosixPath = path.relative(root, fullPath).split(path.win32.sep).join(path.posix.sep);
|
|
889
|
-
if (shouldReadAsText(relativePosixPath, content)) {
|
|
890
|
-
return [entry.name, content.toString("utf8")];
|
|
891
|
-
}
|
|
892
|
-
return [entry.name, content];
|
|
893
|
-
})
|
|
894
|
-
);
|
|
895
|
-
}
|
|
896
|
-
return read(root);
|
|
897
|
-
}
|
|
898
|
-
clear = () => {
|
|
899
|
-
this.setContent({});
|
|
900
|
-
};
|
|
901
|
-
remove = () => {
|
|
902
|
-
fs__default.default.rmSync(this.#root, { recursive: true, force: true, maxRetries: 10 });
|
|
903
|
-
};
|
|
904
|
-
#transformInput(input) {
|
|
905
|
-
const entries = [];
|
|
906
|
-
function traverse(node, path) {
|
|
907
|
-
if (typeof node === "string") {
|
|
908
|
-
entries.push({
|
|
909
|
-
type: "file",
|
|
910
|
-
path,
|
|
911
|
-
content: Buffer.from(node, "utf8")
|
|
912
|
-
});
|
|
913
|
-
} else if (node instanceof Buffer) {
|
|
914
|
-
entries.push({ type: "file", path, content: node });
|
|
915
|
-
} else if (typeof node === "function") {
|
|
916
|
-
entries.push({ type: "callback", path, callback: node });
|
|
917
|
-
} else {
|
|
918
|
-
entries.push({ type: "dir", path });
|
|
919
|
-
for (const [name, child] of Object.entries(node)) {
|
|
920
|
-
traverse(child, path ? `${path}/${name}` : name);
|
|
921
|
-
}
|
|
922
|
-
}
|
|
923
|
-
}
|
|
924
|
-
traverse(input, "");
|
|
925
|
-
return entries;
|
|
926
|
-
}
|
|
927
|
-
}
|
|
928
|
-
function createMockDirectory(options) {
|
|
929
|
-
const tmpDir = process.env.RUNNER_TEMP || os__default.default.tmpdir();
|
|
930
|
-
const root = fs__default.default.mkdtempSync(path.join(tmpDir, "backstage-tmp-test-dir-"));
|
|
931
|
-
const mocker = new MockDirectoryImpl(root);
|
|
932
|
-
const origTmpdir = options?.mockOsTmpDir ? os__default.default.tmpdir : void 0;
|
|
933
|
-
if (origTmpdir) {
|
|
934
|
-
if (Object.hasOwn(origTmpdir, tmpdirMarker)) {
|
|
935
|
-
throw new Error(
|
|
936
|
-
"Cannot mock os.tmpdir() when it has already been mocked"
|
|
937
|
-
);
|
|
938
|
-
}
|
|
939
|
-
const mock = Object.assign(() => mocker.path, { [tmpdirMarker]: true });
|
|
940
|
-
os__default.default.tmpdir = mock;
|
|
941
|
-
}
|
|
942
|
-
const needsCleanup = !process.env.CI;
|
|
943
|
-
if (needsCleanup) {
|
|
944
|
-
process.on("beforeExit", mocker.remove);
|
|
945
|
-
}
|
|
946
|
-
try {
|
|
947
|
-
afterAll(() => {
|
|
948
|
-
if (origTmpdir) {
|
|
949
|
-
os__default.default.tmpdir = origTmpdir;
|
|
950
|
-
}
|
|
951
|
-
if (needsCleanup) {
|
|
952
|
-
mocker.remove();
|
|
953
|
-
}
|
|
954
|
-
});
|
|
955
|
-
} catch {
|
|
956
|
-
}
|
|
957
|
-
if (options?.content) {
|
|
958
|
-
mocker.setContent(options.content);
|
|
959
|
-
}
|
|
960
|
-
return mocker;
|
|
961
|
-
}
|
|
962
|
-
|
|
963
|
-
const DEFAULT_MOCK_USER_ENTITY_REF = "user:default/mock";
|
|
964
|
-
const DEFAULT_MOCK_SERVICE_SUBJECT = "external:test-service";
|
|
965
|
-
const MOCK_AUTH_COOKIE = "backstage-auth";
|
|
966
|
-
const MOCK_NONE_TOKEN = "mock-none-token";
|
|
967
|
-
const MOCK_USER_TOKEN = "mock-user-token";
|
|
968
|
-
const MOCK_USER_TOKEN_PREFIX = "mock-user-token:";
|
|
969
|
-
const MOCK_INVALID_USER_TOKEN = "mock-invalid-user-token";
|
|
970
|
-
const MOCK_USER_LIMITED_TOKEN_PREFIX = "mock-limited-user-token:";
|
|
971
|
-
const MOCK_INVALID_USER_LIMITED_TOKEN = "mock-invalid-limited-user-token";
|
|
972
|
-
const MOCK_SERVICE_TOKEN = "mock-service-token";
|
|
973
|
-
const MOCK_SERVICE_TOKEN_PREFIX = "mock-service-token:";
|
|
974
|
-
const MOCK_INVALID_SERVICE_TOKEN = "mock-invalid-service-token";
|
|
975
|
-
function validateUserEntityRef(ref) {
|
|
976
|
-
if (!ref.match(/^.+:.+\/.+$/)) {
|
|
977
|
-
throw new TypeError(
|
|
978
|
-
`Invalid user entity reference '${ref}', expected <kind>:<namespace>/<name>`
|
|
979
|
-
);
|
|
980
|
-
}
|
|
981
|
-
}
|
|
982
|
-
exports.mockCredentials = void 0;
|
|
983
|
-
((mockCredentials2) => {
|
|
984
|
-
function none() {
|
|
985
|
-
return {
|
|
986
|
-
$$type: "@backstage/BackstageCredentials",
|
|
987
|
-
principal: { type: "none" }
|
|
988
|
-
};
|
|
989
|
-
}
|
|
990
|
-
mockCredentials2.none = none;
|
|
991
|
-
((none2) => {
|
|
992
|
-
function header() {
|
|
993
|
-
return `Bearer ${MOCK_NONE_TOKEN}`;
|
|
994
|
-
}
|
|
995
|
-
none2.header = header;
|
|
996
|
-
})(none = mockCredentials2.none || (mockCredentials2.none = {}));
|
|
997
|
-
function user(userEntityRef = DEFAULT_MOCK_USER_ENTITY_REF) {
|
|
998
|
-
validateUserEntityRef(userEntityRef);
|
|
999
|
-
return {
|
|
1000
|
-
$$type: "@backstage/BackstageCredentials",
|
|
1001
|
-
principal: { type: "user", userEntityRef }
|
|
1002
|
-
};
|
|
1003
|
-
}
|
|
1004
|
-
mockCredentials2.user = user;
|
|
1005
|
-
((user2) => {
|
|
1006
|
-
function token(userEntityRef) {
|
|
1007
|
-
if (userEntityRef) {
|
|
1008
|
-
validateUserEntityRef(userEntityRef);
|
|
1009
|
-
return `${MOCK_USER_TOKEN_PREFIX}${JSON.stringify({
|
|
1010
|
-
sub: userEntityRef
|
|
1011
|
-
})}`;
|
|
1012
|
-
}
|
|
1013
|
-
return MOCK_USER_TOKEN;
|
|
1014
|
-
}
|
|
1015
|
-
user2.token = token;
|
|
1016
|
-
function header(userEntityRef) {
|
|
1017
|
-
return `Bearer ${token(userEntityRef)}`;
|
|
1018
|
-
}
|
|
1019
|
-
user2.header = header;
|
|
1020
|
-
function invalidToken() {
|
|
1021
|
-
return MOCK_INVALID_USER_TOKEN;
|
|
1022
|
-
}
|
|
1023
|
-
user2.invalidToken = invalidToken;
|
|
1024
|
-
function invalidHeader() {
|
|
1025
|
-
return `Bearer ${invalidToken()}`;
|
|
1026
|
-
}
|
|
1027
|
-
user2.invalidHeader = invalidHeader;
|
|
1028
|
-
})(user = mockCredentials2.user || (mockCredentials2.user = {}));
|
|
1029
|
-
function limitedUser(userEntityRef = DEFAULT_MOCK_USER_ENTITY_REF) {
|
|
1030
|
-
return user(userEntityRef);
|
|
1031
|
-
}
|
|
1032
|
-
mockCredentials2.limitedUser = limitedUser;
|
|
1033
|
-
((limitedUser2) => {
|
|
1034
|
-
function token(userEntityRef = DEFAULT_MOCK_USER_ENTITY_REF) {
|
|
1035
|
-
validateUserEntityRef(userEntityRef);
|
|
1036
|
-
return `${MOCK_USER_LIMITED_TOKEN_PREFIX}${JSON.stringify({
|
|
1037
|
-
sub: userEntityRef
|
|
1038
|
-
})}`;
|
|
1039
|
-
}
|
|
1040
|
-
limitedUser2.token = token;
|
|
1041
|
-
function cookie(userEntityRef) {
|
|
1042
|
-
return `${MOCK_AUTH_COOKIE}=${token(userEntityRef)}`;
|
|
1043
|
-
}
|
|
1044
|
-
limitedUser2.cookie = cookie;
|
|
1045
|
-
function invalidToken() {
|
|
1046
|
-
return MOCK_INVALID_USER_LIMITED_TOKEN;
|
|
1047
|
-
}
|
|
1048
|
-
limitedUser2.invalidToken = invalidToken;
|
|
1049
|
-
function invalidCookie() {
|
|
1050
|
-
return `${MOCK_AUTH_COOKIE}=${invalidToken()}`;
|
|
1051
|
-
}
|
|
1052
|
-
limitedUser2.invalidCookie = invalidCookie;
|
|
1053
|
-
})(limitedUser = mockCredentials2.limitedUser || (mockCredentials2.limitedUser = {}));
|
|
1054
|
-
function service(subject = DEFAULT_MOCK_SERVICE_SUBJECT, accessRestrictions) {
|
|
1055
|
-
return {
|
|
1056
|
-
$$type: "@backstage/BackstageCredentials",
|
|
1057
|
-
principal: {
|
|
1058
|
-
type: "service",
|
|
1059
|
-
subject,
|
|
1060
|
-
...accessRestrictions ? { accessRestrictions } : {}
|
|
1061
|
-
}
|
|
1062
|
-
};
|
|
1063
|
-
}
|
|
1064
|
-
mockCredentials2.service = service;
|
|
1065
|
-
((service2) => {
|
|
1066
|
-
function token(options) {
|
|
1067
|
-
if (options) {
|
|
1068
|
-
const { targetPluginId, onBehalfOf } = options;
|
|
1069
|
-
const oboPrincipal = onBehalfOf?.principal;
|
|
1070
|
-
const obo = oboPrincipal.type === "user" ? oboPrincipal.userEntityRef : void 0;
|
|
1071
|
-
const subject = oboPrincipal.type === "service" ? oboPrincipal.subject : void 0;
|
|
1072
|
-
return `${MOCK_SERVICE_TOKEN_PREFIX}${JSON.stringify({
|
|
1073
|
-
sub: subject,
|
|
1074
|
-
obo,
|
|
1075
|
-
target: targetPluginId
|
|
1076
|
-
})}`;
|
|
1077
|
-
}
|
|
1078
|
-
return MOCK_SERVICE_TOKEN;
|
|
1079
|
-
}
|
|
1080
|
-
service2.token = token;
|
|
1081
|
-
function header(options) {
|
|
1082
|
-
return `Bearer ${token(options)}`;
|
|
1083
|
-
}
|
|
1084
|
-
service2.header = header;
|
|
1085
|
-
function invalidToken() {
|
|
1086
|
-
return MOCK_INVALID_SERVICE_TOKEN;
|
|
1087
|
-
}
|
|
1088
|
-
service2.invalidToken = invalidToken;
|
|
1089
|
-
function invalidHeader() {
|
|
1090
|
-
return `Bearer ${invalidToken()}`;
|
|
1091
|
-
}
|
|
1092
|
-
service2.invalidHeader = invalidHeader;
|
|
1093
|
-
})(service = mockCredentials2.service || (mockCredentials2.service = {}));
|
|
1094
|
-
})(exports.mockCredentials || (exports.mockCredentials = {}));
|
|
1095
|
-
|
|
1096
|
-
class MockAuthService {
|
|
1097
|
-
pluginId;
|
|
1098
|
-
disableDefaultAuthPolicy;
|
|
1099
|
-
constructor(options) {
|
|
1100
|
-
this.pluginId = options.pluginId;
|
|
1101
|
-
this.disableDefaultAuthPolicy = options.disableDefaultAuthPolicy;
|
|
1102
|
-
}
|
|
1103
|
-
async authenticate(token, options) {
|
|
1104
|
-
switch (token) {
|
|
1105
|
-
case MOCK_USER_TOKEN:
|
|
1106
|
-
return exports.mockCredentials.user();
|
|
1107
|
-
case MOCK_SERVICE_TOKEN:
|
|
1108
|
-
return exports.mockCredentials.service();
|
|
1109
|
-
case MOCK_INVALID_USER_TOKEN:
|
|
1110
|
-
throw new errors.AuthenticationError("User token is invalid");
|
|
1111
|
-
case MOCK_INVALID_USER_LIMITED_TOKEN:
|
|
1112
|
-
throw new errors.AuthenticationError("Limited user token is invalid");
|
|
1113
|
-
case MOCK_INVALID_SERVICE_TOKEN:
|
|
1114
|
-
throw new errors.AuthenticationError("Service token is invalid");
|
|
1115
|
-
case "":
|
|
1116
|
-
throw new errors.AuthenticationError("Token is empty");
|
|
1117
|
-
}
|
|
1118
|
-
if (token.startsWith(MOCK_USER_TOKEN_PREFIX)) {
|
|
1119
|
-
const { sub: userEntityRef } = JSON.parse(
|
|
1120
|
-
token.slice(MOCK_USER_TOKEN_PREFIX.length)
|
|
1121
|
-
);
|
|
1122
|
-
return exports.mockCredentials.user(userEntityRef);
|
|
1123
|
-
}
|
|
1124
|
-
if (token.startsWith(MOCK_USER_LIMITED_TOKEN_PREFIX)) {
|
|
1125
|
-
if (!options?.allowLimitedAccess) {
|
|
1126
|
-
throw new errors.AuthenticationError("Limited user token is not allowed");
|
|
1127
|
-
}
|
|
1128
|
-
const { sub: userEntityRef } = JSON.parse(
|
|
1129
|
-
token.slice(MOCK_USER_LIMITED_TOKEN_PREFIX.length)
|
|
1130
|
-
);
|
|
1131
|
-
return exports.mockCredentials.user(userEntityRef);
|
|
1132
|
-
}
|
|
1133
|
-
if (token.startsWith(MOCK_SERVICE_TOKEN_PREFIX)) {
|
|
1134
|
-
const { sub, target, obo } = JSON.parse(
|
|
1135
|
-
token.slice(MOCK_SERVICE_TOKEN_PREFIX.length)
|
|
1136
|
-
);
|
|
1137
|
-
if (target && target !== this.pluginId) {
|
|
1138
|
-
throw new errors.AuthenticationError(
|
|
1139
|
-
`Invalid mock token target plugin ID, got '${target}' but expected '${this.pluginId}'`
|
|
1140
|
-
);
|
|
1141
|
-
}
|
|
1142
|
-
if (obo) {
|
|
1143
|
-
return exports.mockCredentials.user(obo);
|
|
1144
|
-
}
|
|
1145
|
-
return exports.mockCredentials.service(sub);
|
|
1146
|
-
}
|
|
1147
|
-
throw new errors.AuthenticationError(`Unknown mock token '${token}'`);
|
|
1148
|
-
}
|
|
1149
|
-
async getNoneCredentials() {
|
|
1150
|
-
return exports.mockCredentials.none();
|
|
1151
|
-
}
|
|
1152
|
-
async getOwnServiceCredentials() {
|
|
1153
|
-
return exports.mockCredentials.service(`plugin:${this.pluginId}`);
|
|
1154
|
-
}
|
|
1155
|
-
isPrincipal(credentials, type) {
|
|
1156
|
-
const principal = credentials.principal;
|
|
1157
|
-
if (type === "unknown") {
|
|
1158
|
-
return true;
|
|
1159
|
-
}
|
|
1160
|
-
if (principal.type !== type) {
|
|
1161
|
-
return false;
|
|
1162
|
-
}
|
|
1163
|
-
return true;
|
|
1164
|
-
}
|
|
1165
|
-
async getPluginRequestToken(options) {
|
|
1166
|
-
const principal = options.onBehalfOf.principal;
|
|
1167
|
-
if (principal.type === "none" && this.disableDefaultAuthPolicy) {
|
|
1168
|
-
return { token: "" };
|
|
1169
|
-
}
|
|
1170
|
-
if (principal.type !== "user" && principal.type !== "service") {
|
|
1171
|
-
throw new errors.AuthenticationError(
|
|
1172
|
-
`Refused to issue service token for credential type '${principal.type}'`
|
|
1173
|
-
);
|
|
1174
|
-
}
|
|
1175
|
-
return {
|
|
1176
|
-
token: exports.mockCredentials.service.token({
|
|
1177
|
-
onBehalfOf: options.onBehalfOf,
|
|
1178
|
-
targetPluginId: options.targetPluginId
|
|
1179
|
-
})
|
|
1180
|
-
};
|
|
1181
|
-
}
|
|
1182
|
-
async getLimitedUserToken(credentials) {
|
|
1183
|
-
if (credentials.principal.type !== "user") {
|
|
1184
|
-
throw new errors.AuthenticationError(
|
|
1185
|
-
`Refused to issue limited user token for credential type '${credentials.principal.type}'`
|
|
1186
|
-
);
|
|
1187
|
-
}
|
|
1188
|
-
return {
|
|
1189
|
-
token: exports.mockCredentials.limitedUser.token(
|
|
1190
|
-
credentials.principal.userEntityRef
|
|
1191
|
-
),
|
|
1192
|
-
expiresAt: new Date(Date.now() + 36e5)
|
|
1193
|
-
};
|
|
1194
|
-
}
|
|
1195
|
-
listPublicServiceKeys() {
|
|
1196
|
-
throw new Error("Not implemented");
|
|
1197
|
-
}
|
|
1198
|
-
}
|
|
1199
|
-
|
|
1200
|
-
class MockHttpAuthService {
|
|
1201
|
-
#auth;
|
|
1202
|
-
#defaultCredentials;
|
|
1203
|
-
constructor(pluginId, defaultCredentials) {
|
|
1204
|
-
this.#auth = new MockAuthService({
|
|
1205
|
-
pluginId,
|
|
1206
|
-
disableDefaultAuthPolicy: false
|
|
1207
|
-
});
|
|
1208
|
-
this.#defaultCredentials = defaultCredentials;
|
|
1209
|
-
}
|
|
1210
|
-
async #getCredentials(req, allowLimitedAccess) {
|
|
1211
|
-
const header = req.headers.authorization;
|
|
1212
|
-
const token = typeof header === "string" ? header.match(/^Bearer[ ]+(\S+)$/i)?.[1] : void 0;
|
|
1213
|
-
if (token) {
|
|
1214
|
-
if (token === MOCK_NONE_TOKEN) {
|
|
1215
|
-
return this.#auth.getNoneCredentials();
|
|
1216
|
-
}
|
|
1217
|
-
return await this.#auth.authenticate(token, {
|
|
1218
|
-
allowLimitedAccess
|
|
1219
|
-
});
|
|
1220
|
-
}
|
|
1221
|
-
if (allowLimitedAccess) {
|
|
1222
|
-
const cookieHeader = req.headers.cookie;
|
|
1223
|
-
if (cookieHeader) {
|
|
1224
|
-
const cookies = cookie.parse(cookieHeader);
|
|
1225
|
-
const cookie$1 = cookies[MOCK_AUTH_COOKIE];
|
|
1226
|
-
if (cookie$1) {
|
|
1227
|
-
return await this.#auth.authenticate(cookie$1, {
|
|
1228
|
-
allowLimitedAccess: true
|
|
1229
|
-
});
|
|
1230
|
-
}
|
|
1231
|
-
}
|
|
1232
|
-
}
|
|
1233
|
-
return this.#defaultCredentials;
|
|
1234
|
-
}
|
|
1235
|
-
async credentials(req, options) {
|
|
1236
|
-
const credentials = await this.#getCredentials(
|
|
1237
|
-
req,
|
|
1238
|
-
options?.allowLimitedAccess ?? false
|
|
1239
|
-
);
|
|
1240
|
-
const allowedPrincipalTypes = options?.allow;
|
|
1241
|
-
if (!allowedPrincipalTypes) {
|
|
1242
|
-
return credentials;
|
|
1243
|
-
}
|
|
1244
|
-
if (this.#auth.isPrincipal(credentials, "none")) {
|
|
1245
|
-
if (allowedPrincipalTypes.includes("none")) {
|
|
1246
|
-
return credentials;
|
|
1247
|
-
}
|
|
1248
|
-
throw new errors.AuthenticationError("Missing credentials");
|
|
1249
|
-
} else if (this.#auth.isPrincipal(credentials, "user")) {
|
|
1250
|
-
if (allowedPrincipalTypes.includes("user")) {
|
|
1251
|
-
return credentials;
|
|
1252
|
-
}
|
|
1253
|
-
throw new errors.NotAllowedError(
|
|
1254
|
-
`This endpoint does not allow 'user' credentials`
|
|
1255
|
-
);
|
|
1256
|
-
} else if (this.#auth.isPrincipal(credentials, "service")) {
|
|
1257
|
-
if (allowedPrincipalTypes.includes("service")) {
|
|
1258
|
-
return credentials;
|
|
1259
|
-
}
|
|
1260
|
-
throw new errors.NotAllowedError(
|
|
1261
|
-
`This endpoint does not allow 'service' credentials`
|
|
1262
|
-
);
|
|
1263
|
-
}
|
|
1264
|
-
throw new errors.NotAllowedError(
|
|
1265
|
-
"Unknown principal type, this should never happen"
|
|
1266
|
-
);
|
|
1267
|
-
}
|
|
1268
|
-
async issueUserCookie(res, options) {
|
|
1269
|
-
const credentials = options?.credentials ?? await this.credentials(res.req, { allow: ["user"] });
|
|
1270
|
-
res.setHeader(
|
|
1271
|
-
"Set-Cookie",
|
|
1272
|
-
exports.mockCredentials.limitedUser.cookie(credentials.principal.userEntityRef)
|
|
1273
|
-
);
|
|
1274
|
-
return { expiresAt: new Date(Date.now() + 36e5) };
|
|
1275
|
-
}
|
|
1276
|
-
}
|
|
1277
|
-
|
|
1278
|
-
const levels = {
|
|
1279
|
-
none: 0,
|
|
1280
|
-
error: 1,
|
|
1281
|
-
warn: 2,
|
|
1282
|
-
info: 3,
|
|
1283
|
-
debug: 4
|
|
1284
|
-
};
|
|
1285
|
-
class MockRootLoggerService {
|
|
1286
|
-
#level;
|
|
1287
|
-
#meta;
|
|
1288
|
-
static create(options) {
|
|
1289
|
-
const level = options?.level ?? "none";
|
|
1290
|
-
if (!(level in levels)) {
|
|
1291
|
-
throw new Error(`Invalid log level '${level}'`);
|
|
1292
|
-
}
|
|
1293
|
-
return new MockRootLoggerService(levels[level], {});
|
|
1294
|
-
}
|
|
1295
|
-
error(message, meta) {
|
|
1296
|
-
this.#log("error", message, meta);
|
|
1297
|
-
}
|
|
1298
|
-
warn(message, meta) {
|
|
1299
|
-
this.#log("warn", message, meta);
|
|
1300
|
-
}
|
|
1301
|
-
info(message, meta) {
|
|
1302
|
-
this.#log("info", message, meta);
|
|
1303
|
-
}
|
|
1304
|
-
debug(message, meta) {
|
|
1305
|
-
this.#log("debug", message, meta);
|
|
1306
|
-
}
|
|
1307
|
-
child(meta) {
|
|
1308
|
-
return new MockRootLoggerService(this.#level, { ...this.#meta, ...meta });
|
|
1309
|
-
}
|
|
1310
|
-
constructor(level, meta) {
|
|
1311
|
-
this.#level = level;
|
|
1312
|
-
this.#meta = meta;
|
|
1313
|
-
}
|
|
1314
|
-
#log(level, message, meta) {
|
|
1315
|
-
const levelValue = levels[level] ?? 0;
|
|
1316
|
-
if (levelValue <= this.#level) {
|
|
1317
|
-
const labels = Object.entries(this.#meta).map(([key, value]) => `${key}=${value}`).join(",");
|
|
1318
|
-
console[level](`${labels} ${message}`, meta);
|
|
1319
|
-
}
|
|
1320
|
-
}
|
|
1321
|
-
}
|
|
1322
|
-
|
|
1323
|
-
class MockUserInfoService {
|
|
1324
|
-
customInfo;
|
|
1325
|
-
constructor(customInfo) {
|
|
1326
|
-
this.customInfo = customInfo ?? {};
|
|
1327
|
-
}
|
|
1328
|
-
async getUserInfo(credentials) {
|
|
1329
|
-
const principal = credentials.principal;
|
|
1330
|
-
if (principal.type !== "user") {
|
|
1331
|
-
throw new errors.InputError(
|
|
1332
|
-
`User info not available for principal type '${principal.type}'`
|
|
1333
|
-
);
|
|
1334
|
-
}
|
|
1335
|
-
return {
|
|
1336
|
-
userEntityRef: principal.userEntityRef,
|
|
1337
|
-
ownershipEntityRefs: [principal.userEntityRef],
|
|
1338
|
-
...this.customInfo
|
|
1339
|
-
};
|
|
1340
|
-
}
|
|
1341
|
-
}
|
|
1342
|
-
|
|
1343
|
-
function createLoggerMock() {
|
|
1344
|
-
return {
|
|
1345
|
-
child: jest.fn().mockImplementation(createLoggerMock),
|
|
1346
|
-
debug: jest.fn(),
|
|
1347
|
-
error: jest.fn(),
|
|
1348
|
-
info: jest.fn(),
|
|
1349
|
-
warn: jest.fn()
|
|
1350
|
-
};
|
|
1351
|
-
}
|
|
1352
|
-
function simpleFactoryWithOptions(ref, factory) {
|
|
1353
|
-
const factoryWithOptions = (...options) => backendPluginApi.createServiceFactory({
|
|
1354
|
-
service: ref,
|
|
1355
|
-
deps: {},
|
|
1356
|
-
async factory() {
|
|
1357
|
-
return factory(...options);
|
|
1358
|
-
}
|
|
1359
|
-
});
|
|
1360
|
-
return Object.assign(
|
|
1361
|
-
factoryWithOptions,
|
|
1362
|
-
factoryWithOptions(...[void 0])
|
|
1363
|
-
);
|
|
1364
|
-
}
|
|
1365
|
-
function simpleMock(ref, mockFactory) {
|
|
1366
|
-
return (partialImpl) => {
|
|
1367
|
-
const mock = mockFactory();
|
|
1368
|
-
if (partialImpl) {
|
|
1369
|
-
for (const [key, impl] of Object.entries(partialImpl)) {
|
|
1370
|
-
if (typeof impl === "function") {
|
|
1371
|
-
mock[key].mockImplementation(impl);
|
|
1372
|
-
} else {
|
|
1373
|
-
mock[key] = impl;
|
|
1374
|
-
}
|
|
1375
|
-
}
|
|
1376
|
-
}
|
|
1377
|
-
return Object.assign(mock, {
|
|
1378
|
-
factory: backendPluginApi.createServiceFactory({
|
|
1379
|
-
service: ref,
|
|
1380
|
-
deps: {},
|
|
1381
|
-
factory: () => mock
|
|
1382
|
-
})
|
|
1383
|
-
});
|
|
1384
|
-
};
|
|
1385
|
-
}
|
|
1386
|
-
exports.mockServices = void 0;
|
|
1387
|
-
((mockServices2) => {
|
|
1388
|
-
function rootConfig(options) {
|
|
1389
|
-
return new config.ConfigReader(options?.data, "mock-config");
|
|
1390
|
-
}
|
|
1391
|
-
mockServices2.rootConfig = rootConfig;
|
|
1392
|
-
((rootConfig2) => {
|
|
1393
|
-
rootConfig2.factory = simpleFactoryWithOptions(
|
|
1394
|
-
backendPluginApi.coreServices.rootConfig,
|
|
1395
|
-
rootConfig2
|
|
1396
|
-
);
|
|
1397
|
-
rootConfig2.mock = simpleMock(backendPluginApi.coreServices.rootConfig, () => ({
|
|
1398
|
-
get: jest.fn(),
|
|
1399
|
-
getBoolean: jest.fn(),
|
|
1400
|
-
getConfig: jest.fn(),
|
|
1401
|
-
getConfigArray: jest.fn(),
|
|
1402
|
-
getNumber: jest.fn(),
|
|
1403
|
-
getOptional: jest.fn(),
|
|
1404
|
-
getOptionalBoolean: jest.fn(),
|
|
1405
|
-
getOptionalConfig: jest.fn(),
|
|
1406
|
-
getOptionalConfigArray: jest.fn(),
|
|
1407
|
-
getOptionalNumber: jest.fn(),
|
|
1408
|
-
getOptionalString: jest.fn(),
|
|
1409
|
-
getOptionalStringArray: jest.fn(),
|
|
1410
|
-
getString: jest.fn(),
|
|
1411
|
-
getStringArray: jest.fn(),
|
|
1412
|
-
has: jest.fn(),
|
|
1413
|
-
keys: jest.fn()
|
|
1414
|
-
}));
|
|
1415
|
-
})(rootConfig = mockServices2.rootConfig || (mockServices2.rootConfig = {}));
|
|
1416
|
-
function rootLogger(options) {
|
|
1417
|
-
return MockRootLoggerService.create(options);
|
|
1418
|
-
}
|
|
1419
|
-
mockServices2.rootLogger = rootLogger;
|
|
1420
|
-
((rootLogger2) => {
|
|
1421
|
-
rootLogger2.factory = simpleFactoryWithOptions(
|
|
1422
|
-
backendPluginApi.coreServices.rootLogger,
|
|
1423
|
-
rootLogger2
|
|
1424
|
-
);
|
|
1425
|
-
rootLogger2.mock = simpleMock(backendPluginApi.coreServices.rootLogger, () => ({
|
|
1426
|
-
child: jest.fn(),
|
|
1427
|
-
debug: jest.fn(),
|
|
1428
|
-
error: jest.fn(),
|
|
1429
|
-
info: jest.fn(),
|
|
1430
|
-
warn: jest.fn()
|
|
1431
|
-
}));
|
|
1432
|
-
})(rootLogger = mockServices2.rootLogger || (mockServices2.rootLogger = {}));
|
|
1433
|
-
function auth(options) {
|
|
1434
|
-
return new MockAuthService({
|
|
1435
|
-
pluginId: options?.pluginId ?? "test",
|
|
1436
|
-
disableDefaultAuthPolicy: Boolean(options?.disableDefaultAuthPolicy)
|
|
1437
|
-
});
|
|
1438
|
-
}
|
|
1439
|
-
mockServices2.auth = auth;
|
|
1440
|
-
((auth2) => {
|
|
1441
|
-
auth2.factory = () => backendPluginApi.createServiceFactory({
|
|
1442
|
-
service: backendPluginApi.coreServices.auth,
|
|
1443
|
-
deps: {
|
|
1444
|
-
plugin: backendPluginApi.coreServices.pluginMetadata,
|
|
1445
|
-
config: backendPluginApi.coreServices.rootConfig
|
|
1446
|
-
},
|
|
1447
|
-
factory({ plugin, config }) {
|
|
1448
|
-
const disableDefaultAuthPolicy = Boolean(
|
|
1449
|
-
config.getOptionalBoolean(
|
|
1450
|
-
"backend.auth.dangerouslyDisableDefaultAuthPolicy"
|
|
1451
|
-
)
|
|
1452
|
-
);
|
|
1453
|
-
return new MockAuthService({
|
|
1454
|
-
pluginId: plugin.getId(),
|
|
1455
|
-
disableDefaultAuthPolicy
|
|
1456
|
-
});
|
|
1457
|
-
}
|
|
1458
|
-
});
|
|
1459
|
-
auth2.mock = simpleMock(backendPluginApi.coreServices.auth, () => ({
|
|
1460
|
-
authenticate: jest.fn(),
|
|
1461
|
-
getNoneCredentials: jest.fn(),
|
|
1462
|
-
getOwnServiceCredentials: jest.fn(),
|
|
1463
|
-
isPrincipal: jest.fn(),
|
|
1464
|
-
getPluginRequestToken: jest.fn(),
|
|
1465
|
-
getLimitedUserToken: jest.fn(),
|
|
1466
|
-
listPublicServiceKeys: jest.fn()
|
|
1467
|
-
}));
|
|
1468
|
-
})(auth = mockServices2.auth || (mockServices2.auth = {}));
|
|
1469
|
-
function discovery$1() {
|
|
1470
|
-
return discovery.HostDiscovery.fromConfig(
|
|
1471
|
-
new config.ConfigReader({
|
|
1472
|
-
backend: {
|
|
1473
|
-
// Invalid port to make sure that requests are always mocked
|
|
1474
|
-
baseUrl: "http://localhost:0",
|
|
1475
|
-
listen: { port: 0 }
|
|
1476
|
-
}
|
|
1477
|
-
})
|
|
1478
|
-
);
|
|
1479
|
-
}
|
|
1480
|
-
mockServices2.discovery = discovery$1;
|
|
1481
|
-
((discovery2) => {
|
|
1482
|
-
discovery2.factory = () => discovery.discoveryServiceFactory;
|
|
1483
|
-
discovery2.mock = simpleMock(backendPluginApi.coreServices.discovery, () => ({
|
|
1484
|
-
getBaseUrl: jest.fn(),
|
|
1485
|
-
getExternalBaseUrl: jest.fn()
|
|
1486
|
-
}));
|
|
1487
|
-
})(discovery$1 = mockServices2.discovery || (mockServices2.discovery = {}));
|
|
1488
|
-
function httpAuth(options) {
|
|
1489
|
-
return new MockHttpAuthService(
|
|
1490
|
-
options?.pluginId ?? "test",
|
|
1491
|
-
options?.defaultCredentials ?? exports.mockCredentials.user()
|
|
1492
|
-
);
|
|
1493
|
-
}
|
|
1494
|
-
mockServices2.httpAuth = httpAuth;
|
|
1495
|
-
((httpAuth2) => {
|
|
1496
|
-
httpAuth2.factory = (options) => backendPluginApi.createServiceFactory({
|
|
1497
|
-
service: backendPluginApi.coreServices.httpAuth,
|
|
1498
|
-
deps: { plugin: backendPluginApi.coreServices.pluginMetadata },
|
|
1499
|
-
factory: ({ plugin }) => new MockHttpAuthService(
|
|
1500
|
-
plugin.getId(),
|
|
1501
|
-
options?.defaultCredentials ?? exports.mockCredentials.user()
|
|
1502
|
-
)
|
|
1503
|
-
});
|
|
1504
|
-
httpAuth2.mock = simpleMock(backendPluginApi.coreServices.httpAuth, () => ({
|
|
1505
|
-
credentials: jest.fn(),
|
|
1506
|
-
issueUserCookie: jest.fn()
|
|
1507
|
-
}));
|
|
1508
|
-
})(httpAuth = mockServices2.httpAuth || (mockServices2.httpAuth = {}));
|
|
1509
|
-
function userInfo(customInfo) {
|
|
1510
|
-
return new MockUserInfoService(customInfo);
|
|
1511
|
-
}
|
|
1512
|
-
mockServices2.userInfo = userInfo;
|
|
1513
|
-
((userInfo2) => {
|
|
1514
|
-
userInfo2.factory = () => backendPluginApi.createServiceFactory({
|
|
1515
|
-
service: backendPluginApi.coreServices.userInfo,
|
|
1516
|
-
deps: {},
|
|
1517
|
-
factory() {
|
|
1518
|
-
return new MockUserInfoService();
|
|
1519
|
-
}
|
|
1520
|
-
});
|
|
1521
|
-
userInfo2.mock = simpleMock(backendPluginApi.coreServices.userInfo, () => ({
|
|
1522
|
-
getUserInfo: jest.fn()
|
|
1523
|
-
}));
|
|
1524
|
-
})(userInfo = mockServices2.userInfo || (mockServices2.userInfo = {}));
|
|
1525
|
-
((cache2) => {
|
|
1526
|
-
cache2.factory = () => cache.cacheServiceFactory;
|
|
1527
|
-
cache2.mock = simpleMock(backendPluginApi.coreServices.cache, () => ({
|
|
1528
|
-
delete: jest.fn(),
|
|
1529
|
-
get: jest.fn(),
|
|
1530
|
-
set: jest.fn(),
|
|
1531
|
-
withOptions: jest.fn()
|
|
1532
|
-
}));
|
|
1533
|
-
})(mockServices2.cache || (mockServices2.cache = {}));
|
|
1534
|
-
((database2) => {
|
|
1535
|
-
database2.factory = () => database.databaseServiceFactory;
|
|
1536
|
-
database2.mock = simpleMock(backendPluginApi.coreServices.database, () => ({
|
|
1537
|
-
getClient: jest.fn()
|
|
1538
|
-
}));
|
|
1539
|
-
})(mockServices2.database || (mockServices2.database = {}));
|
|
1540
|
-
((rootHealth2) => {
|
|
1541
|
-
rootHealth2.factory = () => rootHealth.rootHealthServiceFactory;
|
|
1542
|
-
rootHealth2.mock = simpleMock(backendPluginApi.coreServices.rootHealth, () => ({
|
|
1543
|
-
getLiveness: jest.fn(),
|
|
1544
|
-
getReadiness: jest.fn()
|
|
1545
|
-
}));
|
|
1546
|
-
})(mockServices2.rootHealth || (mockServices2.rootHealth = {}));
|
|
1547
|
-
((httpRouter2) => {
|
|
1548
|
-
httpRouter2.factory = () => httpRouter.httpRouterServiceFactory;
|
|
1549
|
-
httpRouter2.mock = simpleMock(backendPluginApi.coreServices.httpRouter, () => ({
|
|
1550
|
-
use: jest.fn(),
|
|
1551
|
-
addAuthPolicy: jest.fn()
|
|
1552
|
-
}));
|
|
1553
|
-
})(mockServices2.httpRouter || (mockServices2.httpRouter = {}));
|
|
1554
|
-
((rootHttpRouter2) => {
|
|
1555
|
-
rootHttpRouter2.factory = () => rootHttpRouter.rootHttpRouterServiceFactory();
|
|
1556
|
-
rootHttpRouter2.mock = simpleMock(backendPluginApi.coreServices.rootHttpRouter, () => ({
|
|
1557
|
-
use: jest.fn()
|
|
1558
|
-
}));
|
|
1559
|
-
})(mockServices2.rootHttpRouter || (mockServices2.rootHttpRouter = {}));
|
|
1560
|
-
((lifecycle2) => {
|
|
1561
|
-
lifecycle2.factory = () => lifecycle.lifecycleServiceFactory;
|
|
1562
|
-
lifecycle2.mock = simpleMock(backendPluginApi.coreServices.lifecycle, () => ({
|
|
1563
|
-
addShutdownHook: jest.fn(),
|
|
1564
|
-
addStartupHook: jest.fn()
|
|
1565
|
-
}));
|
|
1566
|
-
})(mockServices2.lifecycle || (mockServices2.lifecycle = {}));
|
|
1567
|
-
((logger2) => {
|
|
1568
|
-
logger2.factory = () => logger.loggerServiceFactory;
|
|
1569
|
-
logger2.mock = simpleMock(
|
|
1570
|
-
backendPluginApi.coreServices.logger,
|
|
1571
|
-
() => createLoggerMock()
|
|
1572
|
-
);
|
|
1573
|
-
})(mockServices2.logger || (mockServices2.logger = {}));
|
|
1574
|
-
((permissions2) => {
|
|
1575
|
-
permissions2.factory = () => permissions.permissionsServiceFactory;
|
|
1576
|
-
permissions2.mock = simpleMock(backendPluginApi.coreServices.permissions, () => ({
|
|
1577
|
-
authorize: jest.fn(),
|
|
1578
|
-
authorizeConditional: jest.fn()
|
|
1579
|
-
}));
|
|
1580
|
-
})(mockServices2.permissions || (mockServices2.permissions = {}));
|
|
1581
|
-
((rootLifecycle2) => {
|
|
1582
|
-
rootLifecycle2.factory = () => rootLifecycle.rootLifecycleServiceFactory;
|
|
1583
|
-
rootLifecycle2.mock = simpleMock(backendPluginApi.coreServices.rootLifecycle, () => ({
|
|
1584
|
-
addShutdownHook: jest.fn(),
|
|
1585
|
-
addStartupHook: jest.fn()
|
|
1586
|
-
}));
|
|
1587
|
-
})(mockServices2.rootLifecycle || (mockServices2.rootLifecycle = {}));
|
|
1588
|
-
((scheduler2) => {
|
|
1589
|
-
scheduler2.factory = () => scheduler.schedulerServiceFactory;
|
|
1590
|
-
scheduler2.mock = simpleMock(backendPluginApi.coreServices.scheduler, () => ({
|
|
1591
|
-
createScheduledTaskRunner: jest.fn(),
|
|
1592
|
-
getScheduledTasks: jest.fn(),
|
|
1593
|
-
scheduleTask: jest.fn(),
|
|
1594
|
-
triggerTask: jest.fn()
|
|
1595
|
-
}));
|
|
1596
|
-
})(mockServices2.scheduler || (mockServices2.scheduler = {}));
|
|
1597
|
-
((urlReader2) => {
|
|
1598
|
-
urlReader2.factory = () => urlReader.urlReaderServiceFactory;
|
|
1599
|
-
urlReader2.mock = simpleMock(backendPluginApi.coreServices.urlReader, () => ({
|
|
1600
|
-
readTree: jest.fn(),
|
|
1601
|
-
readUrl: jest.fn(),
|
|
1602
|
-
search: jest.fn()
|
|
1603
|
-
}));
|
|
1604
|
-
})(mockServices2.urlReader || (mockServices2.urlReader = {}));
|
|
1605
|
-
((events2) => {
|
|
1606
|
-
events2.factory = () => pluginEventsNode.eventsServiceFactory;
|
|
1607
|
-
events2.mock = simpleMock(pluginEventsNode.eventsServiceRef, () => ({
|
|
1608
|
-
publish: jest.fn(),
|
|
1609
|
-
subscribe: jest.fn()
|
|
1610
|
-
}));
|
|
1611
|
-
})(mockServices2.events || (mockServices2.events = {}));
|
|
1612
|
-
})(exports.mockServices || (exports.mockServices = {}));
|
|
1613
|
-
|
|
1614
|
-
const defaultServiceFactories = [
|
|
1615
|
-
exports.mockServices.auth.factory(),
|
|
1616
|
-
exports.mockServices.cache.factory(),
|
|
1617
|
-
exports.mockServices.rootConfig.factory(),
|
|
1618
|
-
exports.mockServices.database.factory(),
|
|
1619
|
-
exports.mockServices.httpAuth.factory(),
|
|
1620
|
-
exports.mockServices.httpRouter.factory(),
|
|
1621
|
-
exports.mockServices.lifecycle.factory(),
|
|
1622
|
-
exports.mockServices.logger.factory(),
|
|
1623
|
-
exports.mockServices.permissions.factory(),
|
|
1624
|
-
exports.mockServices.rootHealth.factory(),
|
|
1625
|
-
exports.mockServices.rootLifecycle.factory(),
|
|
1626
|
-
exports.mockServices.rootLogger.factory(),
|
|
1627
|
-
exports.mockServices.scheduler.factory(),
|
|
1628
|
-
exports.mockServices.userInfo.factory(),
|
|
1629
|
-
exports.mockServices.urlReader.factory(),
|
|
1630
|
-
exports.mockServices.events.factory()
|
|
1631
|
-
];
|
|
1632
|
-
function createPluginsForOrphanModules(features) {
|
|
1633
|
-
const pluginIds = /* @__PURE__ */ new Set();
|
|
1634
|
-
const modulePluginIds = /* @__PURE__ */ new Set();
|
|
1635
|
-
for (const feature of features) {
|
|
1636
|
-
if (isInternalBackendRegistrations(feature)) {
|
|
1637
|
-
const registrations = feature.getRegistrations();
|
|
1638
|
-
for (const registration of registrations) {
|
|
1639
|
-
if (registration.type === "plugin") {
|
|
1640
|
-
pluginIds.add(registration.pluginId);
|
|
1641
|
-
} else if (registration.type === "module") {
|
|
1642
|
-
modulePluginIds.add(registration.pluginId);
|
|
1643
|
-
}
|
|
1644
|
-
}
|
|
1645
|
-
}
|
|
1646
|
-
}
|
|
1647
|
-
for (const pluginId of pluginIds) {
|
|
1648
|
-
modulePluginIds.delete(pluginId);
|
|
1649
|
-
}
|
|
1650
|
-
return Array.from(modulePluginIds).map(
|
|
1651
|
-
(pluginId) => backendPluginApi.createBackendPlugin({
|
|
1652
|
-
pluginId,
|
|
1653
|
-
register(reg) {
|
|
1654
|
-
reg.registerInit({ deps: {}, async init() {
|
|
1655
|
-
} });
|
|
1656
|
-
}
|
|
1657
|
-
})
|
|
1658
|
-
);
|
|
1659
|
-
}
|
|
1660
|
-
function createExtensionPointTestModules(features, extensionPointTuples) {
|
|
1661
|
-
if (!extensionPointTuples) {
|
|
1662
|
-
return [];
|
|
1663
|
-
}
|
|
1664
|
-
const registrations = features.flatMap((feature) => {
|
|
1665
|
-
if (isInternalBackendRegistrations(feature)) {
|
|
1666
|
-
return feature.getRegistrations();
|
|
1667
|
-
}
|
|
1668
|
-
return [];
|
|
1669
|
-
});
|
|
1670
|
-
const extensionPointMap = new Map(
|
|
1671
|
-
extensionPointTuples.map((ep) => [ep[0].id, ep])
|
|
1672
|
-
);
|
|
1673
|
-
const extensionPointsToSort = new Set(extensionPointMap.keys());
|
|
1674
|
-
const extensionPointsByPlugin = /* @__PURE__ */ new Map();
|
|
1675
|
-
for (const registration of registrations) {
|
|
1676
|
-
if (registration.type === "module") {
|
|
1677
|
-
const testDep = Object.values(registration.init.deps).filter(
|
|
1678
|
-
(dep) => extensionPointsToSort.has(dep.id)
|
|
1679
|
-
);
|
|
1680
|
-
if (testDep.length > 0) {
|
|
1681
|
-
let points = extensionPointsByPlugin.get(registration.pluginId);
|
|
1682
|
-
if (!points) {
|
|
1683
|
-
points = [];
|
|
1684
|
-
extensionPointsByPlugin.set(registration.pluginId, points);
|
|
1685
|
-
}
|
|
1686
|
-
for (const { id } of testDep) {
|
|
1687
|
-
points.push(id);
|
|
1688
|
-
extensionPointsToSort.delete(id);
|
|
1689
|
-
}
|
|
1690
|
-
}
|
|
1691
|
-
}
|
|
1692
|
-
}
|
|
1693
|
-
if (extensionPointsToSort.size > 0) {
|
|
1694
|
-
const list = Array.from(extensionPointsToSort).map((id) => `'${id}'`).join(", ");
|
|
1695
|
-
throw new Error(
|
|
1696
|
-
`Unable to determine the plugin ID of extension point(s) ${list}. Tested extension points must be depended on by one or more tested modules.`
|
|
1697
|
-
);
|
|
1698
|
-
}
|
|
1699
|
-
const modules = [];
|
|
1700
|
-
for (const [pluginId, pluginExtensionPointIds] of extensionPointsByPlugin) {
|
|
1701
|
-
modules.push(
|
|
1702
|
-
backendPluginApi.createBackendModule({
|
|
1703
|
-
pluginId,
|
|
1704
|
-
moduleId: "test-extension-point-registration",
|
|
1705
|
-
register(reg) {
|
|
1706
|
-
for (const id of pluginExtensionPointIds) {
|
|
1707
|
-
const tuple = extensionPointMap.get(id);
|
|
1708
|
-
reg.registerExtensionPoint(...tuple);
|
|
1709
|
-
}
|
|
1710
|
-
reg.registerInit({ deps: {}, async init() {
|
|
1711
|
-
} });
|
|
1712
|
-
}
|
|
1713
|
-
})
|
|
1714
|
-
);
|
|
1715
|
-
}
|
|
1716
|
-
return modules;
|
|
1717
|
-
}
|
|
1718
|
-
function isPromise(value) {
|
|
1719
|
-
return typeof value === "object" && value !== null && "then" in value && typeof value.then === "function";
|
|
1720
|
-
}
|
|
1721
|
-
function unwrapFeature(feature) {
|
|
1722
|
-
return typeof feature === "function" ? feature() : feature;
|
|
1723
|
-
}
|
|
1724
|
-
const backendInstancesToCleanUp = new Array();
|
|
1725
|
-
async function startTestBackend(options) {
|
|
1726
|
-
const { extensionPoints, ...otherOptions } = options;
|
|
1727
|
-
const features = await Promise.all(
|
|
1728
|
-
options.features?.map(async (val) => {
|
|
1729
|
-
if (isPromise(val)) {
|
|
1730
|
-
const { default: feature } = await val;
|
|
1731
|
-
return unwrapFeature(feature);
|
|
1732
|
-
}
|
|
1733
|
-
return unwrapFeature(val);
|
|
1734
|
-
}) ?? []
|
|
1735
|
-
);
|
|
1736
|
-
let server;
|
|
1737
|
-
const rootHttpRouterFactory = backendPluginApi.createServiceFactory({
|
|
1738
|
-
service: backendPluginApi.coreServices.rootHttpRouter,
|
|
1739
|
-
deps: {
|
|
1740
|
-
config: backendPluginApi.coreServices.rootConfig,
|
|
1741
|
-
lifecycle: backendPluginApi.coreServices.rootLifecycle,
|
|
1742
|
-
rootLogger: backendPluginApi.coreServices.rootLogger,
|
|
1743
|
-
health: backendPluginApi.coreServices.rootHealth
|
|
1744
|
-
},
|
|
1745
|
-
async factory({ config, lifecycle, rootLogger, health }) {
|
|
1746
|
-
const router = rootHttpRouter.DefaultRootHttpRouter.create();
|
|
1747
|
-
const logger = rootLogger.child({ service: "rootHttpRouter" });
|
|
1748
|
-
const app = express__default.default();
|
|
1749
|
-
const middleware = rootHttpRouter.MiddlewareFactory.create({ config, logger });
|
|
1750
|
-
const healthRouter = rootHttpRouter.createHealthRouter({ health });
|
|
1751
|
-
app.use(healthRouter);
|
|
1752
|
-
app.use(router.handler());
|
|
1753
|
-
app.use(middleware.notFound());
|
|
1754
|
-
app.use(middleware.error());
|
|
1755
|
-
server = await rootHttpRouter.createHttpServer(
|
|
1756
|
-
app,
|
|
1757
|
-
{ listen: { host: "", port: 0 } },
|
|
1758
|
-
{ logger }
|
|
1759
|
-
);
|
|
1760
|
-
lifecycle.addShutdownHook(() => server.stop(), { logger });
|
|
1761
|
-
await server.start();
|
|
1762
|
-
return router;
|
|
1763
|
-
}
|
|
1764
|
-
});
|
|
1765
|
-
const discoveryFactory = backendPluginApi.createServiceFactory({
|
|
1766
|
-
service: backendPluginApi.coreServices.discovery,
|
|
1767
|
-
deps: {
|
|
1768
|
-
rootHttpRouter: backendPluginApi.coreServices.rootHttpRouter
|
|
1769
|
-
},
|
|
1770
|
-
async factory() {
|
|
1771
|
-
if (!server) {
|
|
1772
|
-
throw new Error("Test server not started yet");
|
|
1773
|
-
}
|
|
1774
|
-
const port = server.port();
|
|
1775
|
-
const discovery$1 = discovery.HostDiscovery.fromConfig(
|
|
1776
|
-
new config.ConfigReader({
|
|
1777
|
-
backend: { baseUrl: `http://localhost:${port}`, listen: { port } }
|
|
1778
|
-
})
|
|
1779
|
-
);
|
|
1780
|
-
return discovery$1;
|
|
1781
|
-
}
|
|
1782
|
-
});
|
|
1783
|
-
const backend = backendAppApi.createSpecializedBackend({
|
|
1784
|
-
...otherOptions,
|
|
1785
|
-
defaultServiceFactories: [
|
|
1786
|
-
...defaultServiceFactories,
|
|
1787
|
-
rootHttpRouterFactory,
|
|
1788
|
-
discoveryFactory
|
|
1789
|
-
]
|
|
1790
|
-
});
|
|
1791
|
-
backendInstancesToCleanUp.push(backend);
|
|
1792
|
-
for (const m of createExtensionPointTestModules(features, extensionPoints)) {
|
|
1793
|
-
backend.add(m);
|
|
1794
|
-
}
|
|
1795
|
-
for (const p of createPluginsForOrphanModules(features)) {
|
|
1796
|
-
backend.add(p);
|
|
1797
|
-
}
|
|
1798
|
-
for (const feature of features) {
|
|
1799
|
-
backend.add(feature);
|
|
1800
|
-
}
|
|
1801
|
-
await backend.start();
|
|
1802
|
-
return Object.assign(backend, {
|
|
1803
|
-
get server() {
|
|
1804
|
-
if (!server) {
|
|
1805
|
-
throw new Error("TestBackend server is not available");
|
|
1806
|
-
}
|
|
1807
|
-
return server;
|
|
1808
|
-
}
|
|
1809
|
-
});
|
|
1810
|
-
}
|
|
1811
|
-
let registered = false;
|
|
1812
|
-
function registerTestHooks() {
|
|
1813
|
-
if (typeof afterAll !== "function") {
|
|
1814
|
-
return;
|
|
1815
|
-
}
|
|
1816
|
-
if (registered) {
|
|
1817
|
-
return;
|
|
1818
|
-
}
|
|
1819
|
-
registered = true;
|
|
1820
|
-
afterAll(async () => {
|
|
1821
|
-
await Promise.all(
|
|
1822
|
-
backendInstancesToCleanUp.map(async (backend) => {
|
|
1823
|
-
try {
|
|
1824
|
-
await backend.stop();
|
|
1825
|
-
} catch (error) {
|
|
1826
|
-
console.error(`Failed to stop backend after tests, ${error}`);
|
|
1827
|
-
}
|
|
1828
|
-
})
|
|
1829
|
-
);
|
|
1830
|
-
backendInstancesToCleanUp.length = 0;
|
|
1831
|
-
});
|
|
1832
|
-
}
|
|
1833
|
-
registerTestHooks();
|
|
1834
|
-
function toInternalBackendFeature(feature) {
|
|
1835
|
-
if (feature.$$type !== "@backstage/BackendFeature") {
|
|
1836
|
-
throw new Error(`Invalid BackendFeature, bad type '${feature.$$type}'`);
|
|
1837
|
-
}
|
|
1838
|
-
const internal = feature;
|
|
1839
|
-
if (internal.version !== "v1") {
|
|
1840
|
-
throw new Error(
|
|
1841
|
-
`Invalid BackendFeature, bad version '${internal.version}'`
|
|
1842
|
-
);
|
|
1843
|
-
}
|
|
1844
|
-
return internal;
|
|
1845
|
-
}
|
|
1846
|
-
function isInternalBackendRegistrations(feature) {
|
|
1847
|
-
const internal = toInternalBackendFeature(feature);
|
|
1848
|
-
if (internal.featureType === "registrations") {
|
|
1849
|
-
return true;
|
|
1850
|
-
}
|
|
1851
|
-
return "getRegistrations" in internal;
|
|
1852
|
-
}
|
|
1853
|
-
|
|
1854
|
-
class Node {
|
|
1855
|
-
constructor(value, consumes, provides) {
|
|
1856
|
-
this.value = value;
|
|
1857
|
-
this.consumes = consumes;
|
|
1858
|
-
this.provides = provides;
|
|
1859
|
-
}
|
|
1860
|
-
static from(input) {
|
|
1861
|
-
return new Node(
|
|
1862
|
-
input.value,
|
|
1863
|
-
input.consumes ? new Set(input.consumes) : /* @__PURE__ */ new Set(),
|
|
1864
|
-
input.provides ? new Set(input.provides) : /* @__PURE__ */ new Set()
|
|
1865
|
-
);
|
|
1866
|
-
}
|
|
1867
|
-
}
|
|
1868
|
-
class CycleKeySet {
|
|
1869
|
-
static from(nodes) {
|
|
1870
|
-
return new CycleKeySet(nodes);
|
|
1871
|
-
}
|
|
1872
|
-
#nodeIds;
|
|
1873
|
-
#cycleKeys;
|
|
1874
|
-
constructor(nodes) {
|
|
1875
|
-
this.#nodeIds = new Map(nodes.map((n, i) => [n.value, i]));
|
|
1876
|
-
this.#cycleKeys = /* @__PURE__ */ new Set();
|
|
1877
|
-
}
|
|
1878
|
-
tryAdd(path) {
|
|
1879
|
-
const cycleKey = this.#getCycleKey(path);
|
|
1880
|
-
if (this.#cycleKeys.has(cycleKey)) {
|
|
1881
|
-
return false;
|
|
1882
|
-
}
|
|
1883
|
-
this.#cycleKeys.add(cycleKey);
|
|
1884
|
-
return true;
|
|
1885
|
-
}
|
|
1886
|
-
#getCycleKey(path) {
|
|
1887
|
-
return path.map((n) => this.#nodeIds.get(n)).sort().join(",");
|
|
1888
|
-
}
|
|
1889
|
-
}
|
|
1890
|
-
class DependencyGraph {
|
|
1891
|
-
static fromMap(nodes) {
|
|
1892
|
-
return this.fromIterable(
|
|
1893
|
-
Object.entries(nodes).map(([key, node]) => ({
|
|
1894
|
-
value: String(key),
|
|
1895
|
-
...node
|
|
1896
|
-
}))
|
|
1897
|
-
);
|
|
1898
|
-
}
|
|
1899
|
-
static fromIterable(nodeInputs) {
|
|
1900
|
-
const nodes = new Array();
|
|
1901
|
-
for (const nodeInput of nodeInputs) {
|
|
1902
|
-
nodes.push(Node.from(nodeInput));
|
|
1903
|
-
}
|
|
1904
|
-
return new DependencyGraph(nodes);
|
|
1905
|
-
}
|
|
1906
|
-
#nodes;
|
|
1907
|
-
#allProvided;
|
|
1908
|
-
constructor(nodes) {
|
|
1909
|
-
this.#nodes = nodes;
|
|
1910
|
-
this.#allProvided = /* @__PURE__ */ new Set();
|
|
1911
|
-
for (const node of this.#nodes.values()) {
|
|
1912
|
-
for (const produced of node.provides) {
|
|
1913
|
-
this.#allProvided.add(produced);
|
|
1914
|
-
}
|
|
1915
|
-
}
|
|
1916
|
-
}
|
|
1917
|
-
/**
|
|
1918
|
-
* Find all nodes that consume dependencies that are not provided by any other node.
|
|
1919
|
-
*/
|
|
1920
|
-
findUnsatisfiedDeps() {
|
|
1921
|
-
const unsatisfiedDependencies = [];
|
|
1922
|
-
for (const node of this.#nodes.values()) {
|
|
1923
|
-
const unsatisfied = Array.from(node.consumes).filter(
|
|
1924
|
-
(id) => !this.#allProvided.has(id)
|
|
1925
|
-
);
|
|
1926
|
-
if (unsatisfied.length > 0) {
|
|
1927
|
-
unsatisfiedDependencies.push({ value: node.value, unsatisfied });
|
|
1928
|
-
}
|
|
1929
|
-
}
|
|
1930
|
-
return unsatisfiedDependencies;
|
|
1931
|
-
}
|
|
1932
|
-
/**
|
|
1933
|
-
* Detect the first circular dependency within the graph, returning the path of nodes that
|
|
1934
|
-
* form a cycle, with the same node as the first and last element of the array.
|
|
1935
|
-
*/
|
|
1936
|
-
detectCircularDependency() {
|
|
1937
|
-
return this.detectCircularDependencies().next().value;
|
|
1938
|
-
}
|
|
1939
|
-
/**
|
|
1940
|
-
* Detect circular dependencies within the graph, returning the path of nodes that
|
|
1941
|
-
* form a cycle, with the same node as the first and last element of the array.
|
|
1942
|
-
*/
|
|
1943
|
-
*detectCircularDependencies() {
|
|
1944
|
-
const cycleKeys = CycleKeySet.from(this.#nodes);
|
|
1945
|
-
for (const startNode of this.#nodes) {
|
|
1946
|
-
const visited = /* @__PURE__ */ new Set();
|
|
1947
|
-
const stack = new Array([
|
|
1948
|
-
startNode,
|
|
1949
|
-
[startNode.value]
|
|
1950
|
-
]);
|
|
1951
|
-
while (stack.length > 0) {
|
|
1952
|
-
const [node, path] = stack.pop();
|
|
1953
|
-
if (visited.has(node)) {
|
|
1954
|
-
continue;
|
|
1955
|
-
}
|
|
1956
|
-
visited.add(node);
|
|
1957
|
-
for (const consumed of node.consumes) {
|
|
1958
|
-
const providerNodes = this.#nodes.filter(
|
|
1959
|
-
(other) => other.provides.has(consumed)
|
|
1960
|
-
);
|
|
1961
|
-
for (const provider of providerNodes) {
|
|
1962
|
-
if (provider === startNode) {
|
|
1963
|
-
if (cycleKeys.tryAdd(path)) {
|
|
1964
|
-
yield [...path, startNode.value];
|
|
1965
|
-
}
|
|
1966
|
-
break;
|
|
1967
|
-
}
|
|
1968
|
-
if (!visited.has(provider)) {
|
|
1969
|
-
stack.push([provider, [...path, provider.value]]);
|
|
1970
|
-
}
|
|
1971
|
-
}
|
|
1972
|
-
}
|
|
1973
|
-
}
|
|
1974
|
-
}
|
|
1975
|
-
return void 0;
|
|
1976
|
-
}
|
|
1977
|
-
/**
|
|
1978
|
-
* Traverses the dependency graph in topological order, calling the provided
|
|
1979
|
-
* function for each node and waiting for it to resolve.
|
|
1980
|
-
*
|
|
1981
|
-
* The nodes are traversed in parallel, but in such a way that no node is
|
|
1982
|
-
* visited before all of its dependencies.
|
|
1983
|
-
*
|
|
1984
|
-
* Dependencies of nodes that are not produced by any other nodes will be ignored.
|
|
1985
|
-
*/
|
|
1986
|
-
async parallelTopologicalTraversal(fn) {
|
|
1987
|
-
const allProvided = this.#allProvided;
|
|
1988
|
-
const producedSoFar = /* @__PURE__ */ new Set();
|
|
1989
|
-
const waiting = new Set(this.#nodes.values());
|
|
1990
|
-
const visited = /* @__PURE__ */ new Set();
|
|
1991
|
-
const results = new Array();
|
|
1992
|
-
let inFlight = 0;
|
|
1993
|
-
async function processMoreNodes() {
|
|
1994
|
-
if (waiting.size === 0) {
|
|
1995
|
-
return;
|
|
1996
|
-
}
|
|
1997
|
-
const nodesToProcess = [];
|
|
1998
|
-
for (const node of waiting) {
|
|
1999
|
-
let ready = true;
|
|
2000
|
-
for (const consumed of node.consumes) {
|
|
2001
|
-
if (allProvided.has(consumed) && !producedSoFar.has(consumed)) {
|
|
2002
|
-
ready = false;
|
|
2003
|
-
continue;
|
|
2004
|
-
}
|
|
2005
|
-
}
|
|
2006
|
-
if (ready) {
|
|
2007
|
-
nodesToProcess.push(node);
|
|
2008
|
-
}
|
|
2009
|
-
}
|
|
2010
|
-
for (const node of nodesToProcess) {
|
|
2011
|
-
waiting.delete(node);
|
|
2012
|
-
}
|
|
2013
|
-
if (nodesToProcess.length === 0 && inFlight === 0) {
|
|
2014
|
-
throw new Error("Circular dependency detected");
|
|
2015
|
-
}
|
|
2016
|
-
await Promise.all(nodesToProcess.map(processNode));
|
|
2017
|
-
}
|
|
2018
|
-
async function processNode(node) {
|
|
2019
|
-
visited.add(node);
|
|
2020
|
-
inFlight += 1;
|
|
2021
|
-
const result = await fn(node.value);
|
|
2022
|
-
results.push(result);
|
|
2023
|
-
node.provides.forEach((produced) => producedSoFar.add(produced));
|
|
2024
|
-
inFlight -= 1;
|
|
2025
|
-
await processMoreNodes();
|
|
2026
|
-
}
|
|
2027
|
-
await processMoreNodes();
|
|
2028
|
-
return results;
|
|
2029
|
-
}
|
|
2030
|
-
}
|
|
2031
|
-
|
|
2032
|
-
function toInternalServiceFactory(factory) {
|
|
2033
|
-
const f = factory;
|
|
2034
|
-
if (f.$$type !== "@backstage/BackendFeature") {
|
|
2035
|
-
throw new Error(`Invalid service factory, bad type '${f.$$type}'`);
|
|
2036
|
-
}
|
|
2037
|
-
if (f.version !== "v1") {
|
|
2038
|
-
throw new Error(`Invalid service factory, bad version '${f.version}'`);
|
|
2039
|
-
}
|
|
2040
|
-
return f;
|
|
2041
|
-
}
|
|
2042
|
-
function createPluginMetadataServiceFactory(pluginId) {
|
|
2043
|
-
return backendPluginApi.createServiceFactory({
|
|
2044
|
-
service: backendPluginApi.coreServices.pluginMetadata,
|
|
2045
|
-
deps: {},
|
|
2046
|
-
factory: async () => ({ getId: () => pluginId })
|
|
2047
|
-
});
|
|
2048
|
-
}
|
|
2049
|
-
class ServiceRegistry {
|
|
2050
|
-
static create(factories) {
|
|
2051
|
-
const factoryMap = /* @__PURE__ */ new Map();
|
|
2052
|
-
for (const factory of factories) {
|
|
2053
|
-
if (factory.service.multiton) {
|
|
2054
|
-
const existing = factoryMap.get(factory.service.id) ?? [];
|
|
2055
|
-
factoryMap.set(
|
|
2056
|
-
factory.service.id,
|
|
2057
|
-
existing.concat(toInternalServiceFactory(factory))
|
|
2058
|
-
);
|
|
2059
|
-
} else {
|
|
2060
|
-
factoryMap.set(factory.service.id, [toInternalServiceFactory(factory)]);
|
|
2061
|
-
}
|
|
2062
|
-
}
|
|
2063
|
-
const registry = new ServiceRegistry(factoryMap);
|
|
2064
|
-
registry.checkForCircularDeps();
|
|
2065
|
-
return registry;
|
|
2066
|
-
}
|
|
2067
|
-
#providedFactories;
|
|
2068
|
-
#loadedDefaultFactories;
|
|
2069
|
-
#implementations;
|
|
2070
|
-
#rootServiceImplementations = /* @__PURE__ */ new Map();
|
|
2071
|
-
#addedFactoryIds = /* @__PURE__ */ new Set();
|
|
2072
|
-
#instantiatedFactories = /* @__PURE__ */ new Set();
|
|
2073
|
-
constructor(factories) {
|
|
2074
|
-
this.#providedFactories = factories;
|
|
2075
|
-
this.#loadedDefaultFactories = /* @__PURE__ */ new Map();
|
|
2076
|
-
this.#implementations = /* @__PURE__ */ new Map();
|
|
2077
|
-
}
|
|
2078
|
-
#resolveFactory(ref, pluginId) {
|
|
2079
|
-
if (ref.id === backendPluginApi.coreServices.pluginMetadata.id) {
|
|
2080
|
-
return Promise.resolve([
|
|
2081
|
-
toInternalServiceFactory(createPluginMetadataServiceFactory(pluginId))
|
|
2082
|
-
]);
|
|
2083
|
-
}
|
|
2084
|
-
let resolvedFactory = this.#providedFactories.get(ref.id);
|
|
2085
|
-
const { __defaultFactory: defaultFactory } = ref;
|
|
2086
|
-
if (!resolvedFactory && !defaultFactory) {
|
|
2087
|
-
return void 0;
|
|
2088
|
-
}
|
|
2089
|
-
if (!resolvedFactory) {
|
|
2090
|
-
let loadedFactory = this.#loadedDefaultFactories.get(defaultFactory);
|
|
2091
|
-
if (!loadedFactory) {
|
|
2092
|
-
loadedFactory = Promise.resolve().then(() => defaultFactory(ref)).then(
|
|
2093
|
-
(f) => toInternalServiceFactory(typeof f === "function" ? f() : f)
|
|
2094
|
-
);
|
|
2095
|
-
this.#loadedDefaultFactories.set(defaultFactory, loadedFactory);
|
|
2096
|
-
}
|
|
2097
|
-
resolvedFactory = loadedFactory.then(
|
|
2098
|
-
(factory) => [factory],
|
|
2099
|
-
(error) => {
|
|
2100
|
-
throw new Error(
|
|
2101
|
-
`Failed to instantiate service '${ref.id}' because the default factory loader threw an error, ${errors.stringifyError(
|
|
2102
|
-
error
|
|
2103
|
-
)}`
|
|
2104
|
-
);
|
|
2105
|
-
}
|
|
2106
|
-
);
|
|
2107
|
-
}
|
|
2108
|
-
return Promise.resolve(resolvedFactory);
|
|
2109
|
-
}
|
|
2110
|
-
#checkForMissingDeps(factory, pluginId) {
|
|
2111
|
-
const missingDeps = Object.values(factory.deps).filter((ref) => {
|
|
2112
|
-
if (ref.id === backendPluginApi.coreServices.pluginMetadata.id) {
|
|
2113
|
-
return false;
|
|
2114
|
-
}
|
|
2115
|
-
if (this.#providedFactories.get(ref.id)) {
|
|
2116
|
-
return false;
|
|
2117
|
-
}
|
|
2118
|
-
if (ref.multiton) {
|
|
2119
|
-
return false;
|
|
2120
|
-
}
|
|
2121
|
-
return !ref.__defaultFactory;
|
|
2122
|
-
});
|
|
2123
|
-
if (missingDeps.length) {
|
|
2124
|
-
const missing = missingDeps.map((r) => `'${r.id}'`).join(", ");
|
|
2125
|
-
throw new Error(
|
|
2126
|
-
`Failed to instantiate service '${factory.service.id}' for '${pluginId}' because the following dependent services are missing: ${missing}`
|
|
2127
|
-
);
|
|
2128
|
-
}
|
|
2129
|
-
}
|
|
2130
|
-
checkForCircularDeps() {
|
|
2131
|
-
const graph = DependencyGraph.fromIterable(
|
|
2132
|
-
Array.from(this.#providedFactories).map(([serviceId, factories]) => ({
|
|
2133
|
-
value: serviceId,
|
|
2134
|
-
provides: [serviceId],
|
|
2135
|
-
consumes: factories.flatMap(
|
|
2136
|
-
(factory) => Object.values(factory.deps).map((d) => d.id)
|
|
2137
|
-
)
|
|
2138
|
-
}))
|
|
2139
|
-
);
|
|
2140
|
-
const circularDependencies = Array.from(graph.detectCircularDependencies());
|
|
2141
|
-
if (circularDependencies.length) {
|
|
2142
|
-
const cycles = circularDependencies.map((c) => c.map((id) => `'${id}'`).join(" -> ")).join("\n ");
|
|
2143
|
-
throw new errors.ConflictError(`Circular dependencies detected:
|
|
2144
|
-
${cycles}`);
|
|
2145
|
-
}
|
|
2146
|
-
}
|
|
2147
|
-
add(factory) {
|
|
2148
|
-
const factoryId = factory.service.id;
|
|
2149
|
-
if (factoryId === backendPluginApi.coreServices.pluginMetadata.id) {
|
|
2150
|
-
throw new Error(
|
|
2151
|
-
`The ${backendPluginApi.coreServices.pluginMetadata.id} service cannot be overridden`
|
|
2152
|
-
);
|
|
2153
|
-
}
|
|
2154
|
-
if (this.#instantiatedFactories.has(factoryId)) {
|
|
2155
|
-
throw new Error(
|
|
2156
|
-
`Unable to set service factory with id ${factoryId}, service has already been instantiated`
|
|
2157
|
-
);
|
|
2158
|
-
}
|
|
2159
|
-
if (factory.service.multiton) {
|
|
2160
|
-
const newFactories = (this.#providedFactories.get(factoryId) ?? []).concat(toInternalServiceFactory(factory));
|
|
2161
|
-
this.#providedFactories.set(factoryId, newFactories);
|
|
2162
|
-
} else {
|
|
2163
|
-
if (this.#addedFactoryIds.has(factoryId)) {
|
|
2164
|
-
throw new Error(
|
|
2165
|
-
`Duplicate service implementations provided for ${factoryId}`
|
|
2166
|
-
);
|
|
2167
|
-
}
|
|
2168
|
-
this.#addedFactoryIds.add(factoryId);
|
|
2169
|
-
this.#providedFactories.set(factoryId, [
|
|
2170
|
-
toInternalServiceFactory(factory)
|
|
2171
|
-
]);
|
|
2172
|
-
}
|
|
2173
|
-
}
|
|
2174
|
-
async initializeEagerServicesWithScope(scope, pluginId = "root") {
|
|
2175
|
-
for (const [factory] of this.#providedFactories.values()) {
|
|
2176
|
-
if (factory.service.scope === scope) {
|
|
2177
|
-
if (scope === "root" && factory.initialization !== "lazy") {
|
|
2178
|
-
await this.get(factory.service, pluginId);
|
|
2179
|
-
} else if (scope === "plugin" && factory.initialization === "always") {
|
|
2180
|
-
await this.get(factory.service, pluginId);
|
|
2181
|
-
}
|
|
2182
|
-
}
|
|
2183
|
-
}
|
|
2184
|
-
}
|
|
2185
|
-
get(ref, pluginId) {
|
|
2186
|
-
this.#instantiatedFactories.add(ref.id);
|
|
2187
|
-
const resolvedFactory = this.#resolveFactory(ref, pluginId);
|
|
2188
|
-
if (!resolvedFactory) {
|
|
2189
|
-
return ref.multiton ? Promise.resolve([]) : void 0;
|
|
2190
|
-
}
|
|
2191
|
-
return resolvedFactory.then((factories) => {
|
|
2192
|
-
return Promise.all(
|
|
2193
|
-
factories.map((factory) => {
|
|
2194
|
-
if (factory.service.scope === "root") {
|
|
2195
|
-
let existing = this.#rootServiceImplementations.get(factory);
|
|
2196
|
-
if (!existing) {
|
|
2197
|
-
this.#checkForMissingDeps(factory, pluginId);
|
|
2198
|
-
const rootDeps = new Array();
|
|
2199
|
-
for (const [name, serviceRef] of Object.entries(factory.deps)) {
|
|
2200
|
-
if (serviceRef.scope !== "root") {
|
|
2201
|
-
throw new Error(
|
|
2202
|
-
`Failed to instantiate 'root' scoped service '${ref.id}' because it depends on '${serviceRef.scope}' scoped service '${serviceRef.id}'.`
|
|
2203
|
-
);
|
|
2204
|
-
}
|
|
2205
|
-
const target = this.get(serviceRef, pluginId);
|
|
2206
|
-
rootDeps.push(target.then((impl) => [name, impl]));
|
|
2207
|
-
}
|
|
2208
|
-
existing = Promise.all(rootDeps).then(
|
|
2209
|
-
(entries) => factory.factory(Object.fromEntries(entries), void 0)
|
|
2210
|
-
);
|
|
2211
|
-
this.#rootServiceImplementations.set(factory, existing);
|
|
2212
|
-
}
|
|
2213
|
-
return existing;
|
|
2214
|
-
}
|
|
2215
|
-
let implementation = this.#implementations.get(factory);
|
|
2216
|
-
if (!implementation) {
|
|
2217
|
-
this.#checkForMissingDeps(factory, pluginId);
|
|
2218
|
-
const rootDeps = new Array();
|
|
2219
|
-
for (const [name, serviceRef] of Object.entries(factory.deps)) {
|
|
2220
|
-
if (serviceRef.scope === "root") {
|
|
2221
|
-
const target = this.get(serviceRef, pluginId);
|
|
2222
|
-
rootDeps.push(target.then((impl) => [name, impl]));
|
|
2223
|
-
}
|
|
2224
|
-
}
|
|
2225
|
-
implementation = {
|
|
2226
|
-
context: Promise.all(rootDeps).then(
|
|
2227
|
-
(entries) => factory.createRootContext?.(Object.fromEntries(entries))
|
|
2228
|
-
).catch((error) => {
|
|
2229
|
-
const cause = errors.stringifyError(error);
|
|
2230
|
-
throw new Error(
|
|
2231
|
-
`Failed to instantiate service '${ref.id}' because createRootContext threw an error, ${cause}`
|
|
2232
|
-
);
|
|
2233
|
-
}),
|
|
2234
|
-
byPlugin: /* @__PURE__ */ new Map()
|
|
2235
|
-
};
|
|
2236
|
-
this.#implementations.set(factory, implementation);
|
|
2237
|
-
}
|
|
2238
|
-
let result = implementation.byPlugin.get(pluginId);
|
|
2239
|
-
if (!result) {
|
|
2240
|
-
const allDeps = new Array();
|
|
2241
|
-
for (const [name, serviceRef] of Object.entries(factory.deps)) {
|
|
2242
|
-
const target = this.get(serviceRef, pluginId);
|
|
2243
|
-
allDeps.push(target.then((impl) => [name, impl]));
|
|
2244
|
-
}
|
|
2245
|
-
result = implementation.context.then(
|
|
2246
|
-
(context) => Promise.all(allDeps).then(
|
|
2247
|
-
(entries) => factory.factory(Object.fromEntries(entries), context)
|
|
2248
|
-
)
|
|
2249
|
-
).catch((error) => {
|
|
2250
|
-
const cause = errors.stringifyError(error);
|
|
2251
|
-
throw new Error(
|
|
2252
|
-
`Failed to instantiate service '${ref.id}' for '${pluginId}' because the factory function threw an error, ${cause}`
|
|
2253
|
-
);
|
|
2254
|
-
});
|
|
2255
|
-
implementation.byPlugin.set(pluginId, result);
|
|
2256
|
-
}
|
|
2257
|
-
return result;
|
|
2258
|
-
})
|
|
2259
|
-
);
|
|
2260
|
-
}).then((results) => ref.multiton ? results : results[0]);
|
|
2261
|
-
}
|
|
2262
|
-
}
|
|
2263
|
-
|
|
2264
|
-
class ServiceFactoryTester {
|
|
2265
|
-
#subject;
|
|
2266
|
-
#registry;
|
|
2267
|
-
/**
|
|
2268
|
-
* Creates a new {@link ServiceFactoryTester} used to test the provided subject.
|
|
2269
|
-
*
|
|
2270
|
-
* @param subject - The service factory to test.
|
|
2271
|
-
* @param options - Additional options
|
|
2272
|
-
* @returns A new tester instance for the provided subject.
|
|
2273
|
-
*/
|
|
2274
|
-
static from(subject, options) {
|
|
2275
|
-
const registry = ServiceRegistry.create([
|
|
2276
|
-
...defaultServiceFactories,
|
|
2277
|
-
...options?.dependencies ?? [],
|
|
2278
|
-
subject
|
|
2279
|
-
]);
|
|
2280
|
-
return new ServiceFactoryTester(subject.service, registry);
|
|
2281
|
-
}
|
|
2282
|
-
constructor(subject, registry) {
|
|
2283
|
-
this.#subject = subject;
|
|
2284
|
-
this.#registry = registry;
|
|
2285
|
-
}
|
|
2286
|
-
/**
|
|
2287
|
-
* Returns the service instance for the subject.
|
|
2288
|
-
*
|
|
2289
|
-
* @remarks
|
|
2290
|
-
*
|
|
2291
|
-
* If the subject is a plugin scoped service factory a plugin ID
|
|
2292
|
-
* can be provided to instantiate the service for a specific plugin.
|
|
2293
|
-
*
|
|
2294
|
-
* By default the plugin ID 'test' is used.
|
|
2295
|
-
*/
|
|
2296
|
-
async getSubject(...args) {
|
|
2297
|
-
const [pluginId] = args;
|
|
2298
|
-
const instance = this.#registry.get(this.#subject, pluginId ?? "test");
|
|
2299
|
-
return instance;
|
|
2300
|
-
}
|
|
2301
|
-
/**
|
|
2302
|
-
* Return the service instance for any of the provided dependencies or built-in services.
|
|
2303
|
-
*
|
|
2304
|
-
* @remarks
|
|
2305
|
-
*
|
|
2306
|
-
* A plugin ID can optionally be provided for plugin scoped services, otherwise the plugin ID 'test' is used.
|
|
2307
|
-
*/
|
|
2308
|
-
async getService(service, ...args) {
|
|
2309
|
-
const [pluginId] = args;
|
|
2310
|
-
const instance = await this.#registry.get(service, pluginId ?? "test");
|
|
2311
|
-
if (instance === void 0) {
|
|
2312
|
-
throw new Error(`Service '${service.id}' not found`);
|
|
2313
|
-
}
|
|
2314
|
-
return instance;
|
|
2315
|
-
}
|
|
2316
|
-
}
|
|
2317
|
-
|
|
2318
|
-
function mockErrorHandler() {
|
|
2319
|
-
return rootHttpRouter.MiddlewareFactory.create({
|
|
2320
|
-
config: exports.mockServices.rootConfig(),
|
|
2321
|
-
logger: exports.mockServices.rootLogger()
|
|
2322
|
-
}).error();
|
|
2323
|
-
}
|
|
2324
|
-
|
|
2325
|
-
exports.ServiceFactoryTester = ServiceFactoryTester;
|
|
2326
|
-
exports.TestCaches = TestCaches;
|
|
2327
|
-
exports.TestDatabases = TestDatabases;
|
|
2328
|
-
exports.createMockDirectory = createMockDirectory;
|
|
2329
|
-
exports.mockErrorHandler = mockErrorHandler;
|
|
2330
|
-
exports.registerMswTestHooks = registerMswTestHooks;
|
|
2331
|
-
exports.startTestBackend = startTestBackend;
|
|
29
|
+
exports.mockErrorHandler = errorHandler.mockErrorHandler;
|
|
2332
30
|
//# sourceMappingURL=index.cjs.js.map
|