@etcsec-com/etc-collector 1.4.0 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,139 @@
1
+ #!/bin/bash
2
+ # =============================================================================
3
+ # Build standalone binary using Bun
4
+ # =============================================================================
5
+ # Usage: ./scripts/build-binary.sh [target]
6
+ # Targets: linux-x64, linux-arm64, macos-x64, macos-arm64, win-x64
7
+ # Default: current platform
8
+ #
9
+ # Output: dist/etc-collector-<target>.zip containing:
10
+ # - etc-collector (or .exe on Windows)
11
+ # - better_sqlite3.node (native SQLite module)
12
+
13
+ set -e
14
+
15
+ # Colors
16
+ RED='\033[0;31m'
17
+ GREEN='\033[0;32m'
18
+ YELLOW='\033[1;33m'
19
+ NC='\033[0m'
20
+
21
+ # Directories
22
+ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
23
+ PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
24
+ DIST_DIR="$PROJECT_DIR/dist"
25
+
26
+ # Detect current platform
27
+ detect_platform() {
28
+ local os=$(uname -s | tr '[:upper:]' '[:lower:]')
29
+ local arch=$(uname -m)
30
+
31
+ case "$os" in
32
+ linux*) os="linux" ;;
33
+ darwin*) os="macos" ;;
34
+ mingw*|msys*|cygwin*) os="win" ;;
35
+ esac
36
+
37
+ case "$arch" in
38
+ x86_64|amd64) arch="x64" ;;
39
+ arm64|aarch64) arch="arm64" ;;
40
+ esac
41
+
42
+ echo "${os}-${arch}"
43
+ }
44
+
45
+ TARGET="${1:-$(detect_platform)}"
46
+ BUILD_DIR="$DIST_DIR/etc-collector-${TARGET}"
47
+ BINARY_NAME="etc-collector"
48
+
49
+ # Add .exe for Windows
50
+ if [[ "$TARGET" == win-* ]]; then
51
+ BINARY_NAME="etc-collector.exe"
52
+ fi
53
+
54
+ echo -e "${GREEN}=== Building etc-collector for ${TARGET} ===${NC}"
55
+ echo ""
56
+
57
+ # Step 1: Check prerequisites
58
+ echo -e "${YELLOW}[1/5] Checking prerequisites...${NC}"
59
+
60
+ if ! command -v bun &> /dev/null; then
61
+ echo -e "${RED}Error: Bun is required. Install with: curl -fsSL https://bun.sh/install | bash${NC}"
62
+ exit 1
63
+ fi
64
+
65
+ echo -e "${GREEN}Bun $(bun --version) found${NC}"
66
+
67
+ # Step 2: Build TypeScript
68
+ echo -e "${YELLOW}[2/5] Building TypeScript...${NC}"
69
+ cd "$PROJECT_DIR"
70
+ npm run build
71
+
72
+ # Step 3: Compile with Bun
73
+ echo -e "${YELLOW}[3/5] Compiling binary with Bun...${NC}"
74
+ rm -rf "$BUILD_DIR"
75
+ mkdir -p "$BUILD_DIR"
76
+
77
+ # Map target to Bun target format
78
+ case "$TARGET" in
79
+ linux-x64) BUN_TARGET="bun-linux-x64" ;;
80
+ linux-arm64) BUN_TARGET="bun-linux-arm64" ;;
81
+ macos-x64) BUN_TARGET="bun-darwin-x64" ;;
82
+ macos-arm64) BUN_TARGET="bun-darwin-arm64" ;;
83
+ win-x64) BUN_TARGET="bun-windows-x64" ;;
84
+ *)
85
+ echo -e "${RED}Unknown target: $TARGET${NC}"
86
+ exit 1
87
+ ;;
88
+ esac
89
+
90
+ bun build dist/server.js \
91
+ --compile \
92
+ --target="$BUN_TARGET" \
93
+ --outfile "$BUILD_DIR/$BINARY_NAME"
94
+
95
+ echo -e "${GREEN}Binary compiled successfully${NC}"
96
+
97
+ # Step 4: Copy native modules
98
+ echo -e "${YELLOW}[4/5] Including native modules...${NC}"
99
+
100
+ # Find and copy better-sqlite3 native module for the target platform
101
+ SQLITE_NODE=""
102
+ case "$TARGET" in
103
+ macos-arm64)
104
+ SQLITE_NODE="node_modules/better-sqlite3/build/Release/better_sqlite3.node"
105
+ ;;
106
+ macos-x64|linux-x64|linux-arm64|win-x64)
107
+ # For cross-compilation, we need the target platform's native module
108
+ # These would need to be pre-built or downloaded
109
+ echo -e "${YELLOW}Note: Cross-compilation requires pre-built native modules for $TARGET${NC}"
110
+ echo -e "${YELLOW}For CI, native modules are built on each platform runner${NC}"
111
+ ;;
112
+ esac
113
+
114
+ if [ -n "$SQLITE_NODE" ] && [ -f "$SQLITE_NODE" ]; then
115
+ cp "$SQLITE_NODE" "$BUILD_DIR/"
116
+ echo -e "${GREEN}Copied better_sqlite3.node${NC}"
117
+ fi
118
+
119
+ # Step 5: Create ZIP
120
+ echo -e "${YELLOW}[5/5] Creating distribution package...${NC}"
121
+
122
+ cd "$DIST_DIR"
123
+ ZIP_NAME="etc-collector-${TARGET}.zip"
124
+ rm -f "$ZIP_NAME"
125
+ zip -r "$ZIP_NAME" "etc-collector-${TARGET}/"
126
+
127
+ # Cleanup directory, keep only ZIP
128
+ rm -rf "etc-collector-${TARGET}"
129
+
130
+ # Show result
131
+ echo ""
132
+ echo -e "${GREEN}=== Build complete ===${NC}"
133
+ ls -lh "$DIST_DIR/$ZIP_NAME"
134
+ echo ""
135
+ echo -e "Distribution package: ${YELLOW}$DIST_DIR/$ZIP_NAME${NC}"
136
+ echo ""
137
+ echo -e "To use:"
138
+ echo -e " 1. Unzip: ${YELLOW}unzip $ZIP_NAME${NC}"
139
+ echo -e " 2. Run: ${YELLOW}./etc-collector-${TARGET}/${BINARY_NAME}${NC}"
@@ -830,7 +830,7 @@ export class ADAuditService {
830
830
  */
831
831
  private async fetchAclsForObjects(objectDns: string[]): Promise<AclEntry[]> {
832
832
  const allAclEntries: AclEntry[] = [];
833
- let _successCount = 0;
833
+ let successCount = 0;
834
834
 
835
835
  // Batch requests to avoid overwhelming LDAP server
836
836
  const BATCH_SIZE = 100;
@@ -848,7 +848,7 @@ export class ADAuditService {
848
848
  });
849
849
 
850
850
  if (results.length > 0 && results[0].nTSecurityDescriptor) {
851
- _successCount++;
851
+ successCount++;
852
852
  const secDescriptor = results[0].nTSecurityDescriptor;
853
853
 
854
854
  // Convert to Buffer if needed (ldapts may return as Buffer or string)
@@ -126,7 +126,7 @@ export function formatAsJSON(
126
126
  findings: options.includeDetails
127
127
  ? findings
128
128
  : findings.map((f) => {
129
- const { affectedEntities: _affectedEntities, ...rest } = f;
129
+ const { affectedEntities, ...rest } = f;
130
130
  return rest;
131
131
  }),
132
132
  stats: {
File without changes
@@ -0,0 +1,299 @@
1
+ import { TokenRepository } from '../../../src/data/repositories/token.repository';
2
+ import { TokenCleanupJob } from '../../../src/data/jobs/token-cleanup.job';
3
+ import { DatabaseManager } from '../../../src/data/database';
4
+ import { MigrationRunner } from '../../../src/data/migrations/migration.runner';
5
+ import { existsSync, unlinkSync, mkdirSync } from 'fs';
6
+ import { join } from 'path';
7
+
8
+ /**
9
+ * Integration Tests for Token Persistence
10
+ * Tests database operations with real SQLite file to verify persistence
11
+ */
12
+ describe('Token Persistence Integration', () => {
13
+ const testDbPath = join(__dirname, '../../__test_data__/integration-test.db');
14
+ const testDbDir = join(__dirname, '../../__test_data__');
15
+
16
+ beforeAll(() => {
17
+ // Create test data directory
18
+ if (!existsSync(testDbDir)) {
19
+ mkdirSync(testDbDir, { recursive: true });
20
+ }
21
+ });
22
+
23
+ afterAll(() => {
24
+ // Cleanup test database
25
+ if (existsSync(testDbPath)) {
26
+ unlinkSync(testDbPath);
27
+ }
28
+ });
29
+
30
+ beforeEach(() => {
31
+ // Remove existing test database before each test
32
+ if (existsSync(testDbPath)) {
33
+ unlinkSync(testDbPath);
34
+ }
35
+ });
36
+
37
+ it('IV1: Tokens peuvent être créés et récupérés', async () => {
38
+ // Setup: Create database and run migrations
39
+ await MigrationRunner.runMigrations(testDbPath);
40
+
41
+ const dbManager = DatabaseManager.getInstance();
42
+ const db = dbManager.connect(testDbPath);
43
+ const repository = new TokenRepository(db);
44
+
45
+ // Test: Create token
46
+ const tokenInput = {
47
+ jti: 'test-jti-iv1',
48
+ public_key: '-----BEGIN PUBLIC KEY-----\ntest\n-----END PUBLIC KEY-----',
49
+ expires_at: new Date(Date.now() + 3600000).toISOString(),
50
+ max_uses: 10,
51
+ metadata: '{"purpose": "integration-test"}',
52
+ };
53
+
54
+ const createdToken = repository.create(tokenInput);
55
+
56
+ // Verify: All fields match
57
+ expect(createdToken.jti).toBe(tokenInput.jti);
58
+ expect(createdToken.public_key).toBe(tokenInput.public_key);
59
+ expect(createdToken.expires_at).toBe(tokenInput.expires_at);
60
+ expect(createdToken.max_uses).toBe(10);
61
+ expect(createdToken.used_count).toBe(0);
62
+ expect(createdToken.metadata).toBe(tokenInput.metadata);
63
+ expect(createdToken.revoked_at).toBeNull();
64
+ expect(createdToken.id).toBeGreaterThan(0);
65
+
66
+ // Test: Retrieve token
67
+ const retrievedToken = repository.findByJti('test-jti-iv1');
68
+
69
+ // Verify: Retrieved token matches created token
70
+ expect(retrievedToken).not.toBeNull();
71
+ expect(retrievedToken!.jti).toBe(createdToken.jti);
72
+ expect(retrievedToken!.public_key).toBe(createdToken.public_key);
73
+ expect(retrievedToken!.max_uses).toBe(createdToken.max_uses);
74
+ expect(retrievedToken!.used_count).toBe(createdToken.used_count);
75
+
76
+ dbManager.close();
77
+ });
78
+
79
+ it('IV2: Token révoqué est marqué correctement dans DB', async () => {
80
+ // Setup
81
+ await MigrationRunner.runMigrations(testDbPath);
82
+
83
+ const dbManager = DatabaseManager.getInstance();
84
+ const db = dbManager.connect(testDbPath);
85
+ const repository = new TokenRepository(db);
86
+
87
+ // Test: Create token
88
+ const token = repository.create({
89
+ jti: 'test-jti-iv2',
90
+ public_key: 'test-key',
91
+ expires_at: new Date(Date.now() + 3600000).toISOString(),
92
+ });
93
+
94
+ expect(token.revoked_at).toBeNull();
95
+
96
+ // Test: Revoke token
97
+ repository.revoke('test-jti-iv2', 'admin', 'security incident');
98
+
99
+ // Test: Retrieve revoked token
100
+ const revokedToken = repository.findByJti('test-jti-iv2');
101
+
102
+ // Verify: All revocation fields are set
103
+ expect(revokedToken).not.toBeNull();
104
+ expect(revokedToken!.revoked_at).not.toBeNull();
105
+ expect(revokedToken!.revoked_by).toBe('admin');
106
+ expect(revokedToken!.revoked_reason).toBe('security incident');
107
+
108
+ // Verify: Token is not in active list
109
+ const activeTokens = repository.findActive();
110
+ expect(activeTokens.find((t) => t.jti === 'test-jti-iv2')).toBeUndefined();
111
+
112
+ dbManager.close();
113
+ });
114
+
115
+ it('IV3: Cleanup supprime les tokens expirés', async () => {
116
+ // Setup
117
+ await MigrationRunner.runMigrations(testDbPath);
118
+
119
+ const dbManager = DatabaseManager.getInstance();
120
+ const db = dbManager.connect(testDbPath);
121
+ const repository = new TokenRepository(db);
122
+ const cleanupJob = new TokenCleanupJob(repository);
123
+
124
+ // Create old expired non-revoked token (100 days ago, will be deleted immediately)
125
+ repository.create({
126
+ jti: 'old-expired-nonrevoked',
127
+ public_key: 'key',
128
+ expires_at: new Date(Date.now() + 3600000).toISOString(),
129
+ });
130
+ db.prepare(`UPDATE tokens SET
131
+ created_at = datetime('now', '-101 days'),
132
+ expires_at = datetime('now', '-100 days')
133
+ WHERE jti = ?`).run('old-expired-nonrevoked');
134
+
135
+ // Create old revoked expired token (expired 100 days ago, revoked 95 days ago, will be deleted)
136
+ repository.create({
137
+ jti: 'old-revoked-expired',
138
+ public_key: 'key',
139
+ expires_at: new Date(Date.now() + 3600000).toISOString(),
140
+ });
141
+ db.prepare(`UPDATE tokens SET
142
+ created_at = datetime('now', '-101 days'),
143
+ expires_at = datetime('now', '-100 days')
144
+ WHERE jti = ?`).run('old-revoked-expired');
145
+ repository.revoke('old-revoked-expired', 'admin', 'test');
146
+ db.prepare("UPDATE tokens SET revoked_at = datetime('now', '-95 days') WHERE jti = ?").run(
147
+ 'old-revoked-expired'
148
+ );
149
+
150
+ // Create recent revoked expired token (expired 40 days ago, revoked 30 days ago, will be kept)
151
+ repository.create({
152
+ jti: 'recent-revoked-expired',
153
+ public_key: 'key',
154
+ expires_at: new Date(Date.now() + 3600000).toISOString(),
155
+ });
156
+ db.prepare(`UPDATE tokens SET
157
+ created_at = datetime('now', '-41 days'),
158
+ expires_at = datetime('now', '-40 days')
159
+ WHERE jti = ?`).run('recent-revoked-expired');
160
+ repository.revoke('recent-revoked-expired', 'admin', 'test');
161
+ db.prepare("UPDATE tokens SET revoked_at = datetime('now', '-30 days') WHERE jti = ?").run(
162
+ 'recent-revoked-expired'
163
+ );
164
+
165
+ // Create active token (will be kept)
166
+ repository.create({
167
+ jti: 'active-token',
168
+ public_key: 'key',
169
+ expires_at: new Date(Date.now() + 3600000).toISOString(),
170
+ });
171
+
172
+ // Verify initial state
173
+ expect(repository.count()).toBe(4);
174
+
175
+ // Test: Run cleanup job
176
+ const deletedCount = cleanupJob.run();
177
+
178
+ // Verify: Old tokens deleted (non-revoked + old-revoked), recent revoked kept
179
+ expect(deletedCount).toBe(2);
180
+ expect(repository.count()).toBe(2);
181
+ expect(repository.findByJti('old-expired-nonrevoked')).toBeNull();
182
+ expect(repository.findByJti('old-revoked-expired')).toBeNull();
183
+ expect(repository.findByJti('recent-revoked-expired')).not.toBeNull(); // Kept for audit
184
+ expect(repository.findByJti('active-token')).not.toBeNull();
185
+
186
+ dbManager.close();
187
+ });
188
+
189
+ it('IV4: Migration script convertit l\'ancien schema avec succès', async () => {
190
+ // This test verifies that the migration runner works with the schema
191
+ // For actual identity-collector migration, scripts/migrate-tokens.ts would be used
192
+
193
+ // Setup: Create fresh database
194
+ await MigrationRunner.runMigrations(testDbPath);
195
+
196
+ const dbManager = DatabaseManager.getInstance();
197
+ const db = dbManager.connect(testDbPath);
198
+
199
+ // Verify: Migrations table exists and has version 1
200
+ const migrationVersion = db
201
+ .prepare('SELECT MAX(version) as version FROM migrations')
202
+ .get() as { version: number };
203
+ expect(migrationVersion.version).toBe(1);
204
+
205
+ // Verify: Tokens table exists with correct schema
206
+ const tableInfo = db.prepare("PRAGMA table_info(tokens)").all() as Array<{
207
+ name: string;
208
+ type: string;
209
+ notnull: number;
210
+ }>;
211
+
212
+ const columnNames = tableInfo.map((col) => col.name);
213
+ expect(columnNames).toContain('id');
214
+ expect(columnNames).toContain('jti');
215
+ expect(columnNames).toContain('public_key');
216
+ expect(columnNames).toContain('expires_at');
217
+ expect(columnNames).toContain('max_uses');
218
+ expect(columnNames).toContain('used_count');
219
+ expect(columnNames).toContain('revoked_at');
220
+ expect(columnNames).toContain('revoked_by');
221
+ expect(columnNames).toContain('revoked_reason');
222
+ expect(columnNames).toContain('metadata');
223
+
224
+ // Verify: View exists
225
+ const views = db
226
+ .prepare("SELECT name FROM sqlite_master WHERE type='view'")
227
+ .all() as Array<{ name: string }>;
228
+ expect(views.find((v) => v.name === 'v_active_tokens')).toBeDefined();
229
+
230
+ // Verify: Indexes exist
231
+ const indexes = db
232
+ .prepare("SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='tokens'")
233
+ .all() as Array<{ name: string }>;
234
+ const indexNames = indexes.map((idx) => idx.name);
235
+ expect(indexNames).toContain('idx_tokens_jti');
236
+ expect(indexNames).toContain('idx_tokens_expires_at');
237
+ expect(indexNames).toContain('idx_tokens_revoked_at');
238
+
239
+ dbManager.close();
240
+ });
241
+
242
+ it('IV5: Database survive aux redémarrages', async () => {
243
+ // Setup: Create database and run migrations
244
+ await MigrationRunner.runMigrations(testDbPath);
245
+
246
+ let dbManager = DatabaseManager.getInstance();
247
+ let db = dbManager.connect(testDbPath);
248
+ let repository = new TokenRepository(db);
249
+
250
+ // Test: Create token
251
+ const tokenInput = {
252
+ jti: 'persistence-test',
253
+ public_key: 'test-key',
254
+ expires_at: new Date(Date.now() + 3600000).toISOString(),
255
+ max_uses: 5,
256
+ metadata: '{"test": "data"}',
257
+ };
258
+
259
+ repository.create(tokenInput);
260
+
261
+ // Increment usage
262
+ repository.incrementUsage('persistence-test');
263
+ repository.incrementUsage('persistence-test');
264
+
265
+ // Verify token exists with used_count = 2
266
+ let token = repository.findByJti('persistence-test');
267
+ expect(token).not.toBeNull();
268
+ expect(token!.used_count).toBe(2);
269
+
270
+ // Test: Close database connection
271
+ dbManager.close();
272
+
273
+ // Simulate restart: Reopen database connection
274
+ // Need to create new instance since singleton pattern
275
+ const DatabaseManagerClass = DatabaseManager as any;
276
+ DatabaseManagerClass.instance = undefined;
277
+
278
+ dbManager = DatabaseManager.getInstance();
279
+ db = dbManager.connect(testDbPath);
280
+ repository = new TokenRepository(db);
281
+
282
+ // Test: Verify token still exists with same data
283
+ token = repository.findByJti('persistence-test');
284
+
285
+ expect(token).not.toBeNull();
286
+ expect(token!.jti).toBe('persistence-test');
287
+ expect(token!.public_key).toBe('test-key');
288
+ expect(token!.max_uses).toBe(5);
289
+ expect(token!.used_count).toBe(2); // Persisted usage count
290
+ expect(token!.metadata).toBe('{"test": "data"}');
291
+
292
+ // Test: Continue operations after restart
293
+ repository.incrementUsage('persistence-test');
294
+ token = repository.findByJti('persistence-test');
295
+ expect(token!.used_count).toBe(3);
296
+
297
+ dbManager.close();
298
+ });
299
+ });
File without changes
@@ -0,0 +1,117 @@
1
+ import Database from 'better-sqlite3';
2
+ import { MigrationRunner } from '../../../src/data/migrations/migration.runner';
3
+
4
+ describe('MigrationRunner', () => {
5
+ let db: Database.Database;
6
+
7
+ beforeEach(() => {
8
+ // Create in-memory database
9
+ db = new Database(':memory:');
10
+ });
11
+
12
+ afterEach(() => {
13
+ db.close();
14
+ });
15
+
16
+ describe('run', () => {
17
+ it('should create migrations tracking table', async () => {
18
+ const runner = new MigrationRunner(db);
19
+ await runner.run();
20
+
21
+ const result = db
22
+ .prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='migrations'")
23
+ .get();
24
+
25
+ expect(result).toBeDefined();
26
+ });
27
+
28
+ it('should track migration version', async () => {
29
+ const runner = new MigrationRunner(db);
30
+ await runner.run();
31
+
32
+ const result = db.prepare('SELECT version FROM migrations').all();
33
+ expect(Array.isArray(result)).toBe(true);
34
+ });
35
+
36
+ it('should not reapply migrations', async () => {
37
+ const runner = new MigrationRunner(db);
38
+
39
+ // First run
40
+ await runner.run();
41
+ const countAfterFirst = db.prepare('SELECT COUNT(*) as count FROM migrations').get() as {
42
+ count: number;
43
+ };
44
+
45
+ // Second run (should be idempotent)
46
+ await runner.run();
47
+ const countAfterSecond = db.prepare('SELECT COUNT(*) as count FROM migrations').get() as {
48
+ count: number;
49
+ };
50
+
51
+ expect(countAfterFirst.count).toBe(countAfterSecond.count);
52
+ });
53
+
54
+ it('should apply real migrations successfully', async () => {
55
+ const runner = new MigrationRunner(db);
56
+ await runner.run();
57
+
58
+ // Verify that the initial migration created the tokens table
59
+ const tables = db
60
+ .prepare("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name")
61
+ .all() as Array<{ name: string }>;
62
+
63
+ const tableNames = tables.map((t) => t.name);
64
+ expect(tableNames).toContain('tokens');
65
+ expect(tableNames).toContain('migrations');
66
+
67
+ // Verify views were created
68
+ const views = db
69
+ .prepare("SELECT name FROM sqlite_master WHERE type='view'")
70
+ .all() as Array<{ name: string }>;
71
+
72
+ const viewNames = views.map((v) => v.name);
73
+ expect(viewNames).toContain('v_active_tokens');
74
+ });
75
+ });
76
+
77
+ describe('getCurrentVersion', () => {
78
+ it('should return 0 for new database', async () => {
79
+ const runner = new MigrationRunner(db);
80
+ await runner.run();
81
+
82
+ const version = runner['getCurrentVersion']();
83
+ expect(version).toBeGreaterThanOrEqual(0);
84
+ });
85
+ });
86
+
87
+ describe('getMigrationVersion', () => {
88
+ it('should extract version from filename', () => {
89
+ const runner = new MigrationRunner(db);
90
+
91
+ const version1 = runner['getMigrationVersion']('001_initial_schema.sql');
92
+ expect(version1).toBe(1);
93
+
94
+ const version2 = runner['getMigrationVersion']('042_add_column.sql');
95
+ expect(version2).toBe(42);
96
+
97
+ const version3 = runner['getMigrationVersion']('0123_test.sql');
98
+ expect(version3).toBe(123);
99
+ });
100
+
101
+ it('should throw error for invalid filename', () => {
102
+ const runner = new MigrationRunner(db);
103
+
104
+ expect(() => runner['getMigrationVersion']('invalid_migration.sql')).toThrow();
105
+ expect(() => runner['getMigrationVersion']('no-number.sql')).toThrow();
106
+ });
107
+ });
108
+
109
+ describe('static runMigrations', () => {
110
+ it('should run migrations and close database', async () => {
111
+ const dbPath = ':memory:';
112
+
113
+ // This should not throw
114
+ await expect(MigrationRunner.runMigrations(dbPath)).resolves.not.toThrow();
115
+ });
116
+ });
117
+ });