@mereb/shared-packages 0.0.26 → 0.0.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/auth/jwks.js CHANGED
@@ -1,28 +1,22 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.initJwks = initJwks;
4
- exports.verifyJwt = verifyJwt;
5
- exports.extractUserId = extractUserId;
6
- exports.parseAuthHeader = parseAuthHeader;
7
- const jose_1 = require("jose");
1
+ import { createRemoteJWKSet, jwtVerify } from 'jose';
8
2
  let jwks;
9
- async function initJwks(issuer) {
10
- jwks = (0, jose_1.createRemoteJWKSet)(new URL(`${issuer.replace(/\/$/, '')}/protocol/openid-connect/certs`));
3
+ export async function initJwks(issuer) {
4
+ jwks = createRemoteJWKSet(new URL(`${issuer.replace(/\/$/, '')}/protocol/openid-connect/certs`));
11
5
  }
12
- async function verifyJwt(token, { issuer, audience }) {
6
+ export async function verifyJwt(token, { issuer, audience }) {
13
7
  if (!jwks) {
14
8
  await initJwks(issuer);
15
9
  }
16
- const { payload } = await (0, jose_1.jwtVerify)(token, jwks, {
10
+ const { payload } = await jwtVerify(token, jwks, {
17
11
  issuer,
18
12
  audience
19
13
  });
20
14
  return payload;
21
15
  }
22
- function extractUserId(payload) {
16
+ export function extractUserId(payload) {
23
17
  return typeof payload.sub === 'string' ? payload.sub : undefined;
24
18
  }
25
- function parseAuthHeader(headers) {
19
+ export function parseAuthHeader(headers) {
26
20
  const auth = headers.authorization;
27
21
  if (!auth || !auth.startsWith('Bearer ')) {
28
22
  return undefined;
@@ -1,13 +1,6 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.getRedisClient = getRedisClient;
7
- exports.disconnectRedis = disconnectRedis;
8
- const client_1 = require("@redis/client");
9
- const pino_1 = __importDefault(require("pino"));
10
- const logger = (0, pino_1.default)({ name: 'redis' });
1
+ import { createClient } from '@redis/client';
2
+ import pino from 'pino';
3
+ const logger = pino({ name: 'redis' });
11
4
  const DEFAULT_CONNECT_TIMEOUT_MS = 5_000;
12
5
  let client;
13
6
  function withTimeout(promise, timeoutMs) {
@@ -40,10 +33,10 @@ async function connectClient(instance, timeoutMs) {
40
33
  throw error;
41
34
  }
42
35
  }
43
- async function getRedisClient(options) {
36
+ export async function getRedisClient(options) {
44
37
  if (!client) {
45
38
  const timeoutMs = options.connectTimeoutMs ?? DEFAULT_CONNECT_TIMEOUT_MS;
46
- client = (0, client_1.createClient)({ url: options.url });
39
+ client = createClient({ url: options.url });
47
40
  client.on('error', (err) => {
48
41
  logger.error({ err }, 'Redis connection error');
49
42
  });
@@ -57,7 +50,7 @@ async function getRedisClient(options) {
57
50
  }
58
51
  return client;
59
52
  }
60
- async function disconnectRedis() {
53
+ export async function disconnectRedis() {
61
54
  if (client) {
62
55
  await client.disconnect();
63
56
  client = undefined;
@@ -1,23 +1,12 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.loadEnv = loadEnv;
7
- exports.loadThenGetEnvs = loadThenGetEnvs;
8
- exports.getEnv = getEnv;
9
- exports.mustGetEnv = mustGetEnv;
10
- exports.getBooleanEnv = getBooleanEnv;
11
- exports.getNumberEnv = getNumberEnv;
12
- const dotenv_1 = __importDefault(require("dotenv"));
1
+ import dotenv from 'dotenv';
13
2
  let loaded = false;
14
- function loadEnv(path) {
3
+ export function loadEnv(path) {
15
4
  if (!loaded) {
16
- dotenv_1.default.config(path ? { path } : undefined);
5
+ dotenv.config(path ? { path } : undefined);
17
6
  loaded = true;
18
7
  }
19
8
  }
20
- function loadThenGetEnvs(options) {
9
+ export function loadThenGetEnvs(options) {
21
10
  loadEnv(options.path);
22
11
  const envs = {};
23
12
  for (const spec of options.envs) {
@@ -38,14 +27,14 @@ function loadThenGetEnvs(options) {
38
27
  }
39
28
  return envs;
40
29
  }
41
- function getEnv(key, fallback) {
30
+ export function getEnv(key, fallback) {
42
31
  const value = process.env[key] ?? fallback;
43
32
  if (value === undefined) {
44
33
  throw new Error(`Missing required environment variable ${key}`);
45
34
  }
46
35
  return value;
47
36
  }
48
- function mustGetEnv(keys) {
37
+ export function mustGetEnv(keys) {
49
38
  return keys.reduce((acc, key) => {
50
39
  const value = process.env[key];
51
40
  if (value === undefined) {
@@ -55,14 +44,14 @@ function mustGetEnv(keys) {
55
44
  return acc;
56
45
  }, {});
57
46
  }
58
- function getBooleanEnv(key, fallback = false) {
47
+ export function getBooleanEnv(key, fallback = false) {
59
48
  const value = process.env[key];
60
49
  if (value === undefined) {
61
50
  return fallback;
62
51
  }
63
52
  return ['true', '1', 'yes', 'on'].includes(value.toLowerCase());
64
53
  }
65
- function getNumberEnv(key, fallback) {
54
+ export function getNumberEnv(key, fallback) {
66
55
  const value = process.env[key];
67
56
  if (value === undefined) {
68
57
  if (fallback === undefined) {
package/dist/index.d.ts CHANGED
@@ -1,9 +1,9 @@
1
- export * from './auth/jwks';
2
- export * from './cache/redis';
3
- export * from './config/env';
4
- export * from './logger';
5
- export type { Logger } from './logger';
6
- export * from './messaging/kafka';
7
- export * from './observability/otel';
8
- export * from './media/s3';
1
+ export * from './auth/jwks.js';
2
+ export * from './cache/redis.js';
3
+ export * from './config/env.js';
4
+ export * from './logger.js';
5
+ export type { Logger } from 'pino';
6
+ export * from './messaging/kafka.js';
7
+ export * from './observability/otel.js';
8
+ export * from './media/s3.js';
9
9
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAC;AAC5B,cAAc,eAAe,CAAC;AAC9B,cAAc,cAAc,CAAC;AAC7B,cAAc,UAAU,CAAC;AACzB,YAAY,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AACvC,cAAc,mBAAmB,CAAC;AAClC,cAAc,sBAAsB,CAAC;AACrC,cAAc,YAAY,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,gBAAgB,CAAC;AAC/B,cAAc,kBAAkB,CAAC;AACjC,cAAc,iBAAiB,CAAC;AAChC,cAAc,aAAa,CAAC;AAC5B,YAAY,EAAE,MAAM,EAAE,MAAM,MAAM,CAAC;AACnC,cAAc,sBAAsB,CAAC;AACrC,cAAc,yBAAyB,CAAC;AACxC,cAAc,eAAe,CAAC"}
package/dist/index.js CHANGED
@@ -1,23 +1,7 @@
1
- "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- var desc = Object.getOwnPropertyDescriptor(m, k);
5
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
- desc = { enumerable: true, get: function() { return m[k]; } };
7
- }
8
- Object.defineProperty(o, k2, desc);
9
- }) : (function(o, m, k, k2) {
10
- if (k2 === undefined) k2 = k;
11
- o[k2] = m[k];
12
- }));
13
- var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
- for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
- };
16
- Object.defineProperty(exports, "__esModule", { value: true });
17
- __exportStar(require("./auth/jwks"), exports);
18
- __exportStar(require("./cache/redis"), exports);
19
- __exportStar(require("./config/env"), exports);
20
- __exportStar(require("./logger"), exports);
21
- __exportStar(require("./messaging/kafka"), exports);
22
- __exportStar(require("./observability/otel"), exports);
23
- __exportStar(require("./media/s3"), exports);
1
+ export * from './auth/jwks.js';
2
+ export * from './cache/redis.js';
3
+ export * from './config/env.js';
4
+ export * from './logger.js';
5
+ export * from './messaging/kafka.js';
6
+ export * from './observability/otel.js';
7
+ export * from './media/s3.js';
package/dist/logger.d.ts CHANGED
@@ -1,4 +1,3 @@
1
1
  import pino from 'pino';
2
- export type { Logger } from 'pino';
3
2
  export declare function createLogger(name: string): pino.Logger;
4
3
  //# sourceMappingURL=logger.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../src/logger.ts"],"names":[],"mappings":"AAAA,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,YAAY,EAAE,MAAM,EAAE,MAAM,MAAM,CAAC;AAEnC,wBAAgB,YAAY,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC,MAAM,CAWtD"}
1
+ {"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../src/logger.ts"],"names":[],"mappings":"AAAA,OAAO,IAAI,MAAM,MAAM,CAAC;AA+BxB,wBAAgB,YAAY,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC,MAAM,CAkDtD"}
package/dist/logger.js CHANGED
@@ -1,17 +1,54 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.createLogger = createLogger;
7
- const pino_1 = __importDefault(require("pino"));
8
- function createLogger(name) {
9
- console.log('Creating logger with name:', name);
10
- return (0, pino_1.default)({
1
+ import pino from 'pino';
2
+ import { fileURLToPath } from 'node:url';
3
+ import { dirname, join } from 'node:path';
4
+ function resolveSplunkConfig(name) {
5
+ const token = process.env.SPLUNK_HEC_TOKEN;
6
+ if (!token)
7
+ return null;
8
+ const env = process.env.NODE_ENV ?? 'development';
9
+ const defaultIndex = process.env.SPLUNK_HEC_INDEX ??
10
+ (env === 'production' ? 'logs-prd' : env === 'staging' ? 'logs-stg' : 'logs-dev');
11
+ const url = process.env.SPLUNK_HEC_URL ??
12
+ 'http://splunk-prd.platform-prd.svc.cluster.local:8088/services/collector/event';
13
+ return {
14
+ url,
15
+ token,
16
+ index: defaultIndex,
17
+ source: name
18
+ };
19
+ }
20
+ export function createLogger(name) {
21
+ const transports = [];
22
+ const splunkConfig = resolveSplunkConfig(name);
23
+ const baseLevel = process.env.LOG_LEVEL ?? 'info';
24
+ const isProd = process.env.NODE_ENV === 'production';
25
+ if (isProd) {
26
+ transports.push({ stream: process.stdout });
27
+ }
28
+ else {
29
+ const pretty = pino.transport({
30
+ target: 'pino-pretty',
31
+ options: { translateTime: 'SYS:standard' }
32
+ });
33
+ transports.push({ stream: pretty });
34
+ }
35
+ if (splunkConfig) {
36
+ const transportPath = join(dirname(fileURLToPath(import.meta.url)), 'transports', 'splunk-transport.js');
37
+ const splunkStream = pino.transport({
38
+ target: transportPath,
39
+ level: 'info',
40
+ options: splunkConfig
41
+ });
42
+ transports.push({ stream: splunkStream, level: 'info' });
43
+ }
44
+ const destination = transports.length === 1
45
+ ? transports[0].stream
46
+ : pino.multistream(transports.map((t) => ({
47
+ stream: t.stream,
48
+ level: t.level ?? baseLevel
49
+ })));
50
+ return pino({
11
51
  name,
12
- level: process.env.LOG_LEVEL ?? 'info',
13
- transport: process.env.NODE_ENV === 'production'
14
- ? undefined
15
- : { target: 'pino-pretty', options: { translateTime: 'SYS:standard' } }
16
- });
52
+ level: baseLevel
53
+ }, destination);
17
54
  }
package/dist/media/s3.js CHANGED
@@ -1,12 +1,8 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.signMediaUrl = signMediaUrl;
4
- exports.signUploadKey = signUploadKey;
5
- function signMediaUrl(key) {
1
+ export function signMediaUrl(key) {
6
2
  const base = process.env.MEDIA_CDN_ORIGIN ?? 'https://cdn.example.com';
7
3
  return `${base.replace(/\/$/, '')}/${key}`;
8
4
  }
9
- function signUploadKey(ownerId, filename) {
5
+ export function signUploadKey(ownerId, filename) {
10
6
  const timestamp = Date.now();
11
7
  return `users/${ownerId}/${timestamp}-${filename}`;
12
8
  }
@@ -1,32 +1,26 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getKafka = getKafka;
4
- exports.getProducer = getProducer;
5
- exports.createConsumer = createConsumer;
6
- exports.disconnectProducer = disconnectProducer;
7
- const kafkajs_1 = require("kafkajs");
1
+ import { Kafka } from 'kafkajs';
8
2
  let kafkaInstance;
9
3
  let producerInstance;
10
- function getKafka(config) {
4
+ export function getKafka(config) {
11
5
  if (!kafkaInstance) {
12
- kafkaInstance = new kafkajs_1.Kafka(config);
6
+ kafkaInstance = new Kafka(config);
13
7
  }
14
8
  return kafkaInstance;
15
9
  }
16
- async function getProducer(config) {
10
+ export async function getProducer(config) {
17
11
  if (!producerInstance) {
18
12
  producerInstance = getKafka(config).producer();
19
13
  await producerInstance.connect();
20
14
  }
21
15
  return producerInstance;
22
16
  }
23
- async function createConsumer(config, groupId) {
17
+ export async function createConsumer(config, groupId) {
24
18
  const kafka = getKafka(config);
25
19
  const consumer = kafka.consumer({ groupId });
26
20
  await consumer.connect();
27
21
  return consumer;
28
22
  }
29
- async function disconnectProducer() {
23
+ export async function disconnectProducer() {
30
24
  if (producerInstance) {
31
25
  await producerInstance.disconnect();
32
26
  producerInstance = undefined;
@@ -1,28 +1,25 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.initTelemetry = initTelemetry;
4
- const api_1 = require("@opentelemetry/api");
5
- const resources_1 = require("@opentelemetry/resources");
6
- const sdk_trace_base_1 = require("@opentelemetry/sdk-trace-base");
7
- const sdk_trace_node_1 = require("@opentelemetry/sdk-trace-node");
8
- const exporter_trace_otlp_grpc_1 = require("@opentelemetry/exporter-trace-otlp-grpc");
9
- const instrumentation_1 = require("@opentelemetry/instrumentation");
10
- api_1.diag.setLogger(new api_1.DiagConsoleLogger(), api_1.DiagLogLevel.INFO);
1
+ import { diag, DiagConsoleLogger, DiagLogLevel } from '@opentelemetry/api';
2
+ import { Resource } from '@opentelemetry/resources';
3
+ import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
4
+ import { NodeTracerProvider } from '@opentelemetry/sdk-trace-node';
5
+ import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-grpc';
6
+ import { registerInstrumentations } from '@opentelemetry/instrumentation';
7
+ diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.INFO);
11
8
  let initialized = false;
12
- function initTelemetry({ serviceName, otlpEndpoint, instrumentations = [] }) {
9
+ export function initTelemetry({ serviceName, otlpEndpoint, instrumentations = [] }) {
13
10
  if (initialized) {
14
11
  return;
15
12
  }
16
- const resource = new resources_1.Resource({
13
+ const resource = new Resource({
17
14
  'service.name': serviceName
18
15
  });
19
- const provider = new sdk_trace_node_1.NodeTracerProvider({ resource });
16
+ const provider = new NodeTracerProvider({ resource });
20
17
  if (otlpEndpoint) {
21
- provider.addSpanProcessor(new sdk_trace_base_1.BatchSpanProcessor(new exporter_trace_otlp_grpc_1.OTLPTraceExporter({ url: otlpEndpoint })));
18
+ provider.addSpanProcessor(new BatchSpanProcessor(new OTLPTraceExporter({ url: otlpEndpoint })));
22
19
  }
23
20
  provider.register();
24
21
  if (instrumentations.length) {
25
- (0, instrumentation_1.registerInstrumentations)({ instrumentations });
22
+ registerInstrumentations({ instrumentations });
26
23
  }
27
24
  initialized = true;
28
25
  }
@@ -0,0 +1,12 @@
1
+ import build from 'pino-abstract-transport';
2
+ type SplunkTransportOptions = {
3
+ url: string;
4
+ token: string;
5
+ index: string;
6
+ source: string;
7
+ sourcetype?: string;
8
+ timeoutMs?: number;
9
+ };
10
+ export default function splunkTransport(opts: SplunkTransportOptions): Promise<import("stream").Transform & build.OnUnknown>;
11
+ export {};
12
+ //# sourceMappingURL=splunk-transport.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"splunk-transport.d.ts","sourceRoot":"","sources":["../../src/transports/splunk-transport.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,yBAAyB,CAAC;AAE5C,KAAK,sBAAsB,GAAG;IAC1B,GAAG,EAAE,MAAM,CAAC;IACZ,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;CACtB,CAAC;AAEF,wBAA8B,eAAe,CAAE,IAAI,EAAE,sBAAsB,yDAyC1E"}
@@ -0,0 +1,40 @@
1
+ import build from 'pino-abstract-transport';
2
+ export default async function splunkTransport(opts) {
3
+ const url = opts.url;
4
+ const token = opts.token;
5
+ const index = opts.index;
6
+ const source = opts.source;
7
+ const sourcetype = opts.sourcetype ?? 'json';
8
+ const timeoutMs = opts.timeoutMs ?? 5000;
9
+ if (!url || !token || !index) {
10
+ throw new Error('Splunk transport requires url, token, and index');
11
+ }
12
+ return build(async (stream) => {
13
+ for await (const chunk of stream) {
14
+ let payload;
15
+ try {
16
+ payload = JSON.parse(chunk);
17
+ }
18
+ catch {
19
+ continue;
20
+ }
21
+ const controller = AbortSignal.timeout(timeoutMs);
22
+ fetch(url, {
23
+ method: 'POST',
24
+ headers: {
25
+ Authorization: `Splunk ${token}`,
26
+ 'Content-Type': 'application/json'
27
+ },
28
+ body: JSON.stringify({
29
+ event: payload,
30
+ index,
31
+ source,
32
+ sourcetype
33
+ }),
34
+ signal: controller
35
+ }).catch((err) => {
36
+ console.error('Failed to send log to Splunk HEC', err);
37
+ });
38
+ }
39
+ });
40
+ }
package/package.json CHANGED
@@ -1,6 +1,7 @@
1
1
  {
2
2
  "name": "@mereb/shared-packages",
3
- "version": "0.0.26",
3
+ "version": "0.0.28",
4
+ "type": "module",
4
5
  "main": "dist/index.js",
5
6
  "types": "dist/index.d.ts",
6
7
  "files": [
@@ -33,12 +34,15 @@
33
34
  "jose": "^5.2.3",
34
35
  "kafkajs": "^2.2.4",
35
36
  "pino": "^9.2.0",
37
+ "pino-abstract-transport": "^1.1.0",
36
38
  "pino-pretty": "^10.3.1"
37
39
  },
38
40
  "devDependencies": {
39
- "husky": "^9.1.7",
40
41
  "@types/node": "^20.12.7",
42
+ "@typescript-eslint/eslint-plugin": "^8.18.1",
43
+ "@typescript-eslint/parser": "^8.18.1",
41
44
  "eslint": "^8.57.0",
45
+ "husky": "^9.1.7",
42
46
  "rimraf": "^5.0.5",
43
47
  "typescript": "^5.4.5",
44
48
  "vitest": "^1.6.0"