@mereb/shared-packages 0.0.39 → 0.0.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +39 -20
- package/dist/messaging/kafka.d.ts.map +1 -1
- package/dist/messaging/kafka.js +23 -0
- package/dist/testing/db.d.ts +17 -0
- package/dist/testing/db.d.ts.map +1 -0
- package/dist/testing/db.js +41 -0
- package/dist/testing/kafka.d.ts +42 -0
- package/dist/testing/kafka.d.ts.map +1 -0
- package/dist/testing/kafka.js +129 -0
- package/dist/testing/oidc.d.ts +19 -0
- package/dist/testing/oidc.d.ts.map +1 -0
- package/dist/testing/oidc.js +63 -0
- package/package.json +15 -2
package/README.md
CHANGED
|
@@ -1,44 +1,63 @@
|
|
|
1
|
-
#
|
|
1
|
+
# @mereb/shared-packages
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
Shared backend package consumed by `svc-*` services. It centralizes common auth, config, logging, telemetry, Kafka, Redis, event envelope, and media helper utilities.
|
|
4
|
+
|
|
5
|
+
## What this package exports
|
|
6
|
+
|
|
7
|
+
Primary root exports from `src/index.ts`:
|
|
8
|
+
|
|
9
|
+
- `auth/jwks`: JWT verification helpers (`verifyJwt`, header parsing helpers)
|
|
10
|
+
- `config/env`: env loading and typed env access (`loadEnv`, `getEnv`, `loadThenGetEnvs`, ...)
|
|
11
|
+
- `logger`: pino logger setup + Fastify logger options
|
|
12
|
+
- `observability/otel`: OpenTelemetry bootstrap helpers
|
|
13
|
+
- `messaging/kafka`: Kafka client/config helpers
|
|
14
|
+
- `cache/redis`: Redis client helpers
|
|
15
|
+
- `events/envelope` and `events/publisher`: integration event envelopes + publish helpers
|
|
16
|
+
- `media/s3`: media URL and upload-key helpers
|
|
17
|
+
|
|
18
|
+
Extra testing exports:
|
|
19
|
+
|
|
20
|
+
- `@mereb/shared-packages/testing/db`
|
|
21
|
+
- `@mereb/shared-packages/testing/kafka`
|
|
22
|
+
- `@mereb/shared-packages/testing/oidc`
|
|
4
23
|
|
|
5
24
|
## Local development
|
|
6
25
|
|
|
7
26
|
```bash
|
|
8
|
-
pnpm install
|
|
9
27
|
pnpm --filter @mereb/shared-packages lint
|
|
10
28
|
pnpm --filter @mereb/shared-packages typecheck
|
|
29
|
+
pnpm --filter @mereb/shared-packages test
|
|
11
30
|
pnpm --filter @mereb/shared-packages build
|
|
12
31
|
```
|
|
13
32
|
|
|
14
|
-
|
|
33
|
+
Build output is written to `dist/`.
|
|
15
34
|
|
|
16
|
-
##
|
|
35
|
+
## Versioning and publishing
|
|
17
36
|
|
|
18
|
-
|
|
37
|
+
Version is controlled in `package.json` and published as `@mereb/shared-packages`.
|
|
19
38
|
|
|
20
|
-
|
|
21
|
-
2. **Push to `main`** – Jenkins runs the branch build. After it lints/builds, the `release.autoTag` stage (configured with `allowDirty: true` because the build leaves `node_modules/` around) creates and pushes the next `v<semver>` tag.
|
|
22
|
-
3. **Release stages** – in the same build, the new `releaseStages` block uses that tag (`RELEASE_TAG`) to run `pnpm publish` and then the `release.github` stage publishes release notes.
|
|
23
|
-
4. **Jenkins job** – create a Multibranch Pipeline pointing at this repository. Tags are optional now, but still enable tag discovery if you want Jenkins to react to manual tags.
|
|
24
|
-
5. **Credentials** – add an `npm-registry-token` secret text credential in Jenkins for npm publish, plus `github-credentials` for tagging/GitHub releases. The pipeline writes the npm token to `.npmrc` during the publish stage.
|
|
25
|
-
6. **Registry override (optional)** – set the job environment variable `NPM_REGISTRY` if you publish somewhere other than `https://registry.npmjs.org`.
|
|
39
|
+
Bump version:
|
|
26
40
|
|
|
27
|
-
|
|
41
|
+
```bash
|
|
42
|
+
pnpm --filter @mereb/shared-packages version:bump
|
|
43
|
+
```
|
|
28
44
|
|
|
29
|
-
|
|
45
|
+
Optional bump type:
|
|
30
46
|
|
|
31
|
-
|
|
47
|
+
```bash
|
|
48
|
+
pnpm --filter @mereb/shared-packages version:bump minor
|
|
49
|
+
pnpm --filter @mereb/shared-packages version:bump major
|
|
50
|
+
```
|
|
32
51
|
|
|
33
|
-
|
|
52
|
+
The repository includes Jenkins + `.ci/ci.yml` automation for tag/release/publish flow.
|
|
34
53
|
|
|
35
|
-
|
|
54
|
+
## Manual publish fallback
|
|
36
55
|
|
|
37
56
|
```bash
|
|
38
57
|
cd services/shared
|
|
39
58
|
pnpm install
|
|
40
|
-
pnpm lint
|
|
59
|
+
pnpm lint
|
|
60
|
+
pnpm typecheck
|
|
61
|
+
pnpm build
|
|
41
62
|
NPM_TOKEN=... pnpm publish --registry https://registry.npmjs.org --no-git-checks
|
|
42
63
|
```
|
|
43
|
-
|
|
44
|
-
Ensure the git tag matches the package version before publishing so the CI job can take over again on the next release.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"kafka.d.ts","sourceRoot":"","sources":["../../src/messaging/kafka.ts"],"names":[],"mappings":"AAAA,OAAO,EACH,KAAK,QAAQ,
|
|
1
|
+
{"version":3,"file":"kafka.d.ts","sourceRoot":"","sources":["../../src/messaging/kafka.ts"],"names":[],"mappings":"AAAA,OAAO,EACH,KAAK,QAAQ,EAEb,KAAK,WAAW,EAChB,KAAK,QAAQ,EACb,KAAK,EAER,MAAM,SAAS,CAAC;AAOjB,wBAAgB,QAAQ,CAAC,MAAM,EAAE,WAAW,SAG3C;AAED,wBAAsB,WAAW,CAAC,MAAM,EAAE,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC,CASxE;AAED,wBAAsB,cAAc,CAAC,MAAM,EAAE,WAAW,EAAE,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,QAAQ,CAAC,CAK5F;AAED,MAAM,MAAM,qBAAqB,GAAG;IAChC;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;OAEG;IACH,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;CAChC,CAAC;AAEF,wBAAgB,uBAAuB,CACnC,IAAI,GAAE,qBAA0B,GACjC,WAAW,GAAG,IAAI,CA4DpB;AAED,wBAAsB,iBAAiB,CACnC,MAAM,EAAE,WAAW,EACnB,KAAK,EAAE,MAAM,EACb,UAAU,SAAI,EACd,iBAAiB,SAAI,GACtB,OAAO,CAAC,IAAI,CAAC,CAiBf;AAED,wBAAsB,kBAAkB,kBAKvC"}
|
package/dist/messaging/kafka.js
CHANGED
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
import { Kafka, Partitioners } from 'kafkajs';
|
|
2
|
+
import net from 'node:net';
|
|
3
|
+
import tls from 'node:tls';
|
|
2
4
|
let kafkaInstance;
|
|
3
5
|
let producerInstance;
|
|
4
6
|
export function getKafka(config) {
|
|
@@ -35,9 +37,30 @@ export function buildKafkaConfigFromEnv(opts = {}) {
|
|
|
35
37
|
const sslEnabled = (process.env.KAFKA_SSL ?? String(opts.sslDefault ?? false)) === 'true';
|
|
36
38
|
const sslInsecure = (process.env.KAFKA_SSL_INSECURE ?? String(opts.sslInsecureDefault ?? false)) ===
|
|
37
39
|
'true';
|
|
40
|
+
const portForwardHost = process.env.KAFKA_PORT_FORWARD_HOST;
|
|
41
|
+
const portForwardPortRaw = process.env.KAFKA_PORT_FORWARD_PORT;
|
|
42
|
+
const portForwardPort = portForwardPortRaw ? Number(portForwardPortRaw) : undefined;
|
|
43
|
+
const socketFactory = portForwardHost && portForwardPort
|
|
44
|
+
? ({ host, ssl, onConnect }) => {
|
|
45
|
+
if (sslEnabled) {
|
|
46
|
+
return tls.connect({
|
|
47
|
+
...ssl,
|
|
48
|
+
host: portForwardHost,
|
|
49
|
+
port: portForwardPort,
|
|
50
|
+
servername: host,
|
|
51
|
+
rejectUnauthorized: !sslInsecure
|
|
52
|
+
}, onConnect);
|
|
53
|
+
}
|
|
54
|
+
return net.connect({
|
|
55
|
+
host: portForwardHost,
|
|
56
|
+
port: portForwardPort
|
|
57
|
+
}, onConnect);
|
|
58
|
+
}
|
|
59
|
+
: undefined;
|
|
38
60
|
return {
|
|
39
61
|
clientId,
|
|
40
62
|
brokers,
|
|
63
|
+
socketFactory,
|
|
41
64
|
ssl: sslEnabled
|
|
42
65
|
? {
|
|
43
66
|
rejectUnauthorized: !sslInsecure
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
export interface SqlExecutor {
|
|
2
|
+
$executeRawUnsafe(query: string): Promise<unknown>;
|
|
3
|
+
}
|
|
4
|
+
export interface RunPrismaMigrateDeployInput {
|
|
5
|
+
cwd: string;
|
|
6
|
+
schemaPath?: string;
|
|
7
|
+
databaseUrl: string;
|
|
8
|
+
}
|
|
9
|
+
export declare function createTemporarySchemaName(prefix: string): string;
|
|
10
|
+
export declare function withSchema(databaseUrl: string, schema: string): string;
|
|
11
|
+
export declare function provisionSchema(adminClient: SqlExecutor, input: {
|
|
12
|
+
schema: string;
|
|
13
|
+
ownerRole: string;
|
|
14
|
+
}): Promise<void>;
|
|
15
|
+
export declare function dropSchema(adminClient: SqlExecutor, schema: string): Promise<void>;
|
|
16
|
+
export declare function runPrismaMigrateDeploy(input: RunPrismaMigrateDeployInput): Promise<void>;
|
|
17
|
+
//# sourceMappingURL=db.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"db.d.ts","sourceRoot":"","sources":["../../src/testing/db.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,WAAW;IAC1B,iBAAiB,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;CACpD;AAED,MAAM,WAAW,2BAA2B;IAC1C,GAAG,EAAE,MAAM,CAAC;IACZ,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,wBAAgB,yBAAyB,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,CAIhE;AAED,wBAAgB,UAAU,CAAC,WAAW,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,MAAM,CAItE;AAED,wBAAsB,eAAe,CACnC,WAAW,EAAE,WAAW,EACxB,KAAK,EAAE;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,SAAS,EAAE,MAAM,CAAA;CAAE,GAC3C,OAAO,CAAC,IAAI,CAAC,CAOf;AAED,wBAAsB,UAAU,CAAC,WAAW,EAAE,WAAW,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAExF;AAED,wBAAsB,sBAAsB,CAC1C,KAAK,EAAE,2BAA2B,GACjC,OAAO,CAAC,IAAI,CAAC,CAmCf"}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { spawn } from 'node:child_process';
|
|
2
|
+
import { resolve } from 'node:path';
|
|
3
|
+
export function createTemporarySchemaName(prefix) {
|
|
4
|
+
const timestamp = Date.now().toString(36);
|
|
5
|
+
const random = Math.random().toString(36).slice(2, 8);
|
|
6
|
+
return `${prefix}_${timestamp}_${random}`.replace(/[^a-zA-Z0-9_]/g, '_');
|
|
7
|
+
}
|
|
8
|
+
export function withSchema(databaseUrl, schema) {
|
|
9
|
+
const next = new URL(databaseUrl);
|
|
10
|
+
next.searchParams.set('schema', schema);
|
|
11
|
+
return next.toString();
|
|
12
|
+
}
|
|
13
|
+
export async function provisionSchema(adminClient, input) {
|
|
14
|
+
await adminClient.$executeRawUnsafe(`CREATE SCHEMA IF NOT EXISTS "${input.schema}" AUTHORIZATION "${input.ownerRole}"`);
|
|
15
|
+
await adminClient.$executeRawUnsafe(`GRANT USAGE, CREATE ON SCHEMA "${input.schema}" TO "${input.ownerRole}"`);
|
|
16
|
+
}
|
|
17
|
+
export async function dropSchema(adminClient, schema) {
|
|
18
|
+
await adminClient.$executeRawUnsafe(`DROP SCHEMA IF EXISTS "${schema}" CASCADE`);
|
|
19
|
+
}
|
|
20
|
+
export async function runPrismaMigrateDeploy(input) {
|
|
21
|
+
const prismaBinary = resolve(input.cwd, 'node_modules', '.bin', process.platform === 'win32' ? 'prisma.cmd' : 'prisma');
|
|
22
|
+
const schemaPath = input.schemaPath ?? resolve(input.cwd, 'prisma', 'schema.prisma');
|
|
23
|
+
await new Promise((resolvePromise, rejectPromise) => {
|
|
24
|
+
const child = spawn(prismaBinary, ['migrate', 'deploy', '--schema', schemaPath], {
|
|
25
|
+
cwd: input.cwd,
|
|
26
|
+
stdio: 'inherit',
|
|
27
|
+
env: {
|
|
28
|
+
...process.env,
|
|
29
|
+
DATABASE_URL: input.databaseUrl
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
child.on('error', rejectPromise);
|
|
33
|
+
child.on('exit', (code) => {
|
|
34
|
+
if (code === 0) {
|
|
35
|
+
resolvePromise();
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
rejectPromise(new Error(`prisma migrate deploy failed in ${input.cwd} with code ${code}`));
|
|
39
|
+
});
|
|
40
|
+
});
|
|
41
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { type KafkaConfig } from 'kafkajs';
|
|
2
|
+
export interface EnsureKafkaTopicExistsInput {
|
|
3
|
+
brokers: string[];
|
|
4
|
+
topic: string;
|
|
5
|
+
clientId?: string;
|
|
6
|
+
partitions?: number;
|
|
7
|
+
replicationFactor?: number;
|
|
8
|
+
ssl?: KafkaConfig['ssl'];
|
|
9
|
+
socketFactory?: KafkaConfig['socketFactory'];
|
|
10
|
+
}
|
|
11
|
+
export interface WaitForKafkaMessageInput {
|
|
12
|
+
brokers: string[];
|
|
13
|
+
topic: string;
|
|
14
|
+
groupId: string;
|
|
15
|
+
clientId?: string;
|
|
16
|
+
timeoutMs?: number;
|
|
17
|
+
fromBeginning?: boolean;
|
|
18
|
+
ssl?: KafkaConfig['ssl'];
|
|
19
|
+
socketFactory?: KafkaConfig['socketFactory'];
|
|
20
|
+
predicate?: (message: KafkaMessageRecord) => boolean | Promise<boolean>;
|
|
21
|
+
}
|
|
22
|
+
export interface WaitForKafkaTopicMessagesInput {
|
|
23
|
+
brokers: string[];
|
|
24
|
+
topic: string;
|
|
25
|
+
minMessages?: number;
|
|
26
|
+
clientId?: string;
|
|
27
|
+
timeoutMs?: number;
|
|
28
|
+
pollIntervalMs?: number;
|
|
29
|
+
ssl?: KafkaConfig['ssl'];
|
|
30
|
+
socketFactory?: KafkaConfig['socketFactory'];
|
|
31
|
+
}
|
|
32
|
+
export interface KafkaMessageRecord {
|
|
33
|
+
key: string | null;
|
|
34
|
+
value: string;
|
|
35
|
+
headers: Record<string, string | undefined>;
|
|
36
|
+
partition: number;
|
|
37
|
+
offset: string;
|
|
38
|
+
}
|
|
39
|
+
export declare function ensureKafkaTopicExists(input: EnsureKafkaTopicExistsInput): Promise<void>;
|
|
40
|
+
export declare function waitForKafkaMessage(input: WaitForKafkaMessageInput): Promise<KafkaMessageRecord>;
|
|
41
|
+
export declare function waitForKafkaTopicMessages(input: WaitForKafkaTopicMessagesInput): Promise<number>;
|
|
42
|
+
//# sourceMappingURL=kafka.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafka.d.ts","sourceRoot":"","sources":["../../src/testing/kafka.ts"],"names":[],"mappings":"AAAA,OAAO,EAAwB,KAAK,WAAW,EAAE,MAAM,SAAS,CAAC;AAEjE,MAAM,WAAW,2BAA2B;IAC1C,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,GAAG,CAAC,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;IACzB,aAAa,CAAC,EAAE,WAAW,CAAC,eAAe,CAAC,CAAC;CAC9C;AAED,MAAM,WAAW,wBAAwB;IACvC,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,GAAG,CAAC,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;IACzB,aAAa,CAAC,EAAE,WAAW,CAAC,eAAe,CAAC,CAAC;IAC7C,SAAS,CAAC,EAAE,CAAC,OAAO,EAAE,kBAAkB,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;CACzE;AAED,MAAM,WAAW,8BAA8B;IAC7C,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,GAAG,CAAC,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;IACzB,aAAa,CAAC,EAAE,WAAW,CAAC,eAAe,CAAC,CAAC;CAC9C;AAED,MAAM,WAAW,kBAAkB;IACjC,GAAG,EAAE,MAAM,GAAG,IAAI,CAAC;IACnB,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,CAAC,CAAC;IAC5C,SAAS,EAAE,MAAM,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;CAChB;AA8CD,wBAAsB,sBAAsB,CAC1C,KAAK,EAAE,2BAA2B,GACjC,OAAO,CAAC,IAAI,CAAC,CAqBf;AAED,wBAAsB,mBAAmB,CACvC,KAAK,EAAE,wBAAwB,GAC9B,OAAO,CAAC,kBAAkB,CAAC,CA4D7B;AAED,wBAAsB,yBAAyB,CAC7C,KAAK,EAAE,8BAA8B,GACpC,OAAO,CAAC,MAAM,CAAC,CA8BjB"}
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
import { Kafka } from 'kafkajs';
|
|
2
|
+
function createKafkaConfig(input) {
|
|
3
|
+
return {
|
|
4
|
+
clientId: input.clientId ?? 'mereb-test',
|
|
5
|
+
brokers: input.brokers,
|
|
6
|
+
ssl: input.ssl,
|
|
7
|
+
socketFactory: input.socketFactory
|
|
8
|
+
};
|
|
9
|
+
}
|
|
10
|
+
function toHeaderValue(value) {
|
|
11
|
+
if (Array.isArray(value)) {
|
|
12
|
+
return toHeaderValue(value[0]);
|
|
13
|
+
}
|
|
14
|
+
if (Buffer.isBuffer(value)) {
|
|
15
|
+
return value.toString('utf8');
|
|
16
|
+
}
|
|
17
|
+
return value;
|
|
18
|
+
}
|
|
19
|
+
function toMessageRecord(input) {
|
|
20
|
+
return {
|
|
21
|
+
key: input.key?.toString('utf8') ?? null,
|
|
22
|
+
value: input.value?.toString('utf8') ?? '',
|
|
23
|
+
headers: Object.fromEntries(Object.entries(input.headers ?? {}).map(([key, value]) => [
|
|
24
|
+
key,
|
|
25
|
+
toHeaderValue(value)
|
|
26
|
+
])),
|
|
27
|
+
partition: input.partition,
|
|
28
|
+
offset: input.offset
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
export async function ensureKafkaTopicExists(input) {
|
|
32
|
+
const kafka = new Kafka(createKafkaConfig(input));
|
|
33
|
+
const admin = kafka.admin();
|
|
34
|
+
await admin.connect();
|
|
35
|
+
try {
|
|
36
|
+
const topics = await admin.listTopics();
|
|
37
|
+
if (!topics.includes(input.topic)) {
|
|
38
|
+
await admin.createTopics({
|
|
39
|
+
topics: [
|
|
40
|
+
{
|
|
41
|
+
topic: input.topic,
|
|
42
|
+
numPartitions: input.partitions ?? 1,
|
|
43
|
+
replicationFactor: input.replicationFactor ?? 1
|
|
44
|
+
}
|
|
45
|
+
]
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
finally {
|
|
50
|
+
await admin.disconnect();
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
export async function waitForKafkaMessage(input) {
|
|
54
|
+
const kafka = new Kafka(createKafkaConfig(input));
|
|
55
|
+
const consumer = kafka.consumer({ groupId: input.groupId });
|
|
56
|
+
await consumer.connect();
|
|
57
|
+
await consumer.subscribe({
|
|
58
|
+
topic: input.topic,
|
|
59
|
+
fromBeginning: input.fromBeginning ?? false
|
|
60
|
+
});
|
|
61
|
+
return new Promise((resolvePromise, rejectPromise) => {
|
|
62
|
+
let settled = false;
|
|
63
|
+
const timeout = setTimeout(async () => {
|
|
64
|
+
if (settled) {
|
|
65
|
+
return;
|
|
66
|
+
}
|
|
67
|
+
settled = true;
|
|
68
|
+
await consumer.disconnect().catch(() => undefined);
|
|
69
|
+
rejectPromise(new Error(`Timed out waiting for Kafka message on ${input.topic}`));
|
|
70
|
+
}, input.timeoutMs ?? 30_000);
|
|
71
|
+
void consumer.run({
|
|
72
|
+
eachMessage: async ({ partition, message }) => {
|
|
73
|
+
if (settled) {
|
|
74
|
+
return;
|
|
75
|
+
}
|
|
76
|
+
const candidate = toMessageRecord({
|
|
77
|
+
key: message.key,
|
|
78
|
+
value: message.value,
|
|
79
|
+
headers: message.headers,
|
|
80
|
+
partition,
|
|
81
|
+
offset: message.offset
|
|
82
|
+
});
|
|
83
|
+
const matches = input.predicate
|
|
84
|
+
? await input.predicate(candidate)
|
|
85
|
+
: true;
|
|
86
|
+
if (!matches) {
|
|
87
|
+
return;
|
|
88
|
+
}
|
|
89
|
+
settled = true;
|
|
90
|
+
clearTimeout(timeout);
|
|
91
|
+
await consumer.disconnect();
|
|
92
|
+
resolvePromise(candidate);
|
|
93
|
+
}
|
|
94
|
+
}).catch(async (error) => {
|
|
95
|
+
if (settled) {
|
|
96
|
+
return;
|
|
97
|
+
}
|
|
98
|
+
settled = true;
|
|
99
|
+
clearTimeout(timeout);
|
|
100
|
+
await consumer.disconnect().catch(() => undefined);
|
|
101
|
+
rejectPromise(error);
|
|
102
|
+
});
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
export async function waitForKafkaTopicMessages(input) {
|
|
106
|
+
const kafka = new Kafka(createKafkaConfig(input));
|
|
107
|
+
const admin = kafka.admin();
|
|
108
|
+
await admin.connect();
|
|
109
|
+
const deadline = Date.now() + (input.timeoutMs ?? 30_000);
|
|
110
|
+
const minMessages = input.minMessages ?? 1;
|
|
111
|
+
const pollIntervalMs = input.pollIntervalMs ?? 250;
|
|
112
|
+
try {
|
|
113
|
+
while (Date.now() <= deadline) {
|
|
114
|
+
const offsets = await admin.fetchTopicOffsets(input.topic);
|
|
115
|
+
const messageCount = offsets.reduce((sum, offset) => {
|
|
116
|
+
const value = Number(offset.offset);
|
|
117
|
+
return sum + (Number.isNaN(value) ? 0 : value);
|
|
118
|
+
}, 0);
|
|
119
|
+
if (messageCount >= minMessages) {
|
|
120
|
+
return messageCount;
|
|
121
|
+
}
|
|
122
|
+
await new Promise((resolve) => setTimeout(resolve, pollIntervalMs));
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
finally {
|
|
126
|
+
await admin.disconnect();
|
|
127
|
+
}
|
|
128
|
+
throw new Error(`Timed out waiting for ${minMessages} Kafka message(s) on ${input.topic}`);
|
|
129
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
export interface OidcTokenOptions {
|
|
2
|
+
subject?: string;
|
|
3
|
+
audience?: string | string[];
|
|
4
|
+
issuer?: string;
|
|
5
|
+
expiresIn?: string;
|
|
6
|
+
additionalClaims?: Record<string, unknown>;
|
|
7
|
+
}
|
|
8
|
+
export interface OidcTestIssuer {
|
|
9
|
+
issuer: string;
|
|
10
|
+
audience: string;
|
|
11
|
+
jwksUrl: string;
|
|
12
|
+
issueToken(options?: OidcTokenOptions): Promise<string>;
|
|
13
|
+
close(): Promise<void>;
|
|
14
|
+
}
|
|
15
|
+
export declare function startOidcTestIssuer(input?: {
|
|
16
|
+
issuerPath?: string;
|
|
17
|
+
audience?: string;
|
|
18
|
+
}): Promise<OidcTestIssuer>;
|
|
19
|
+
//# sourceMappingURL=oidc.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"oidc.d.ts","sourceRoot":"","sources":["../../src/testing/oidc.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,gBAAgB;IAC/B,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;IAC7B,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,gBAAgB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CAC5C;AAED,MAAM,WAAW,cAAc;IAC7B,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,MAAM,CAAC;IACjB,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,CAAC,OAAO,CAAC,EAAE,gBAAgB,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IACxD,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;CACxB;AAED,wBAAsB,mBAAmB,CAAC,KAAK,GAAE;IAC/C,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACd,GAAG,OAAO,CAAC,cAAc,CAAC,CAoE/B"}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import { createServer } from 'node:http';
|
|
2
|
+
import { generateKeyPair, exportJWK, SignJWT } from 'jose';
|
|
3
|
+
export async function startOidcTestIssuer(input = {}) {
|
|
4
|
+
const issuerPath = input.issuerPath ?? '/realms/test';
|
|
5
|
+
const audience = input.audience ?? 'test-audience';
|
|
6
|
+
const { privateKey, publicKey } = await generateKeyPair('RS256');
|
|
7
|
+
const publicJwk = await exportJWK(publicKey);
|
|
8
|
+
const kid = 'mereb-test-key';
|
|
9
|
+
const jwk = {
|
|
10
|
+
...publicJwk,
|
|
11
|
+
alg: 'RS256',
|
|
12
|
+
kid,
|
|
13
|
+
use: 'sig'
|
|
14
|
+
};
|
|
15
|
+
const server = createServer((request, response) => {
|
|
16
|
+
if (request.url === `${issuerPath}/protocol/openid-connect/certs`) {
|
|
17
|
+
response.writeHead(200, { 'content-type': 'application/json' });
|
|
18
|
+
response.end(JSON.stringify({ keys: [jwk] }));
|
|
19
|
+
return;
|
|
20
|
+
}
|
|
21
|
+
response.writeHead(404, { 'content-type': 'application/json' });
|
|
22
|
+
response.end(JSON.stringify({ error: 'Not found' }));
|
|
23
|
+
});
|
|
24
|
+
await new Promise((resolvePromise) => {
|
|
25
|
+
server.listen(0, '127.0.0.1', () => resolvePromise());
|
|
26
|
+
});
|
|
27
|
+
const address = server.address();
|
|
28
|
+
if (!address || typeof address === 'string') {
|
|
29
|
+
throw new Error('Failed to determine OIDC issuer address');
|
|
30
|
+
}
|
|
31
|
+
const issuer = `http://127.0.0.1:${address.port}${issuerPath}`;
|
|
32
|
+
return {
|
|
33
|
+
issuer,
|
|
34
|
+
audience,
|
|
35
|
+
jwksUrl: `${issuer}/protocol/openid-connect/certs`,
|
|
36
|
+
async issueToken(options = {}) {
|
|
37
|
+
const jwtAudience = options.audience ?? audience;
|
|
38
|
+
const jwtIssuer = options.issuer ?? issuer;
|
|
39
|
+
const subject = options.subject ?? 'user-1';
|
|
40
|
+
return new SignJWT({
|
|
41
|
+
sub: subject,
|
|
42
|
+
...options.additionalClaims
|
|
43
|
+
})
|
|
44
|
+
.setProtectedHeader({ alg: 'RS256', kid })
|
|
45
|
+
.setIssuer(jwtIssuer)
|
|
46
|
+
.setAudience(jwtAudience)
|
|
47
|
+
.setIssuedAt()
|
|
48
|
+
.setExpirationTime(options.expiresIn ?? '5m')
|
|
49
|
+
.sign(privateKey);
|
|
50
|
+
},
|
|
51
|
+
async close() {
|
|
52
|
+
await new Promise((resolvePromise, rejectPromise) => {
|
|
53
|
+
server.close((error) => {
|
|
54
|
+
if (error) {
|
|
55
|
+
rejectPromise(error);
|
|
56
|
+
return;
|
|
57
|
+
}
|
|
58
|
+
resolvePromise();
|
|
59
|
+
});
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
};
|
|
63
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@mereb/shared-packages",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.41",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -11,6 +11,18 @@
|
|
|
11
11
|
".": {
|
|
12
12
|
"import": "./dist/index.js",
|
|
13
13
|
"types": "./dist/index.d.ts"
|
|
14
|
+
},
|
|
15
|
+
"./testing/db": {
|
|
16
|
+
"import": "./dist/testing/db.js",
|
|
17
|
+
"types": "./dist/testing/db.d.ts"
|
|
18
|
+
},
|
|
19
|
+
"./testing/kafka": {
|
|
20
|
+
"import": "./dist/testing/kafka.js",
|
|
21
|
+
"types": "./dist/testing/kafka.d.ts"
|
|
22
|
+
},
|
|
23
|
+
"./testing/oidc": {
|
|
24
|
+
"import": "./dist/testing/oidc.js",
|
|
25
|
+
"types": "./dist/testing/oidc.d.ts"
|
|
14
26
|
}
|
|
15
27
|
},
|
|
16
28
|
"dependencies": {
|
|
@@ -42,6 +54,7 @@
|
|
|
42
54
|
"@types/node": "^20.12.7",
|
|
43
55
|
"@typescript-eslint/eslint-plugin": "^8.18.1",
|
|
44
56
|
"@typescript-eslint/parser": "^8.18.1",
|
|
57
|
+
"@vitest/coverage-v8": "^1.6.0",
|
|
45
58
|
"eslint": "^9.26.0",
|
|
46
59
|
"husky": "^9.1.7",
|
|
47
60
|
"rimraf": "^5.0.5",
|
|
@@ -52,7 +65,7 @@
|
|
|
52
65
|
"build": "tsc -b",
|
|
53
66
|
"clean": "rimraf dist",
|
|
54
67
|
"lint": "eslint \"src/**/*.{ts,tsx}\"",
|
|
55
|
-
"test": "vitest run",
|
|
68
|
+
"test": "vitest run --coverage",
|
|
56
69
|
"typecheck": "tsc --noEmit",
|
|
57
70
|
"version:bump": "node ./scripts/bump-version.mjs"
|
|
58
71
|
}
|