nestlens 0.1.1 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/__tests__/core/storage/memory.storage.spec.d.ts +2 -0
- package/dist/__tests__/core/storage/memory.storage.spec.d.ts.map +1 -0
- package/dist/__tests__/core/storage/memory.storage.spec.js +450 -0
- package/dist/__tests__/core/storage/memory.storage.spec.js.map +1 -0
- package/dist/__tests__/core/storage/redis.storage.spec.d.ts +10 -0
- package/dist/__tests__/core/storage/redis.storage.spec.d.ts.map +1 -0
- package/dist/__tests__/core/storage/redis.storage.spec.js +660 -0
- package/dist/__tests__/core/storage/redis.storage.spec.js.map +1 -0
- package/dist/__tests__/core/storage/storage.factory.spec.d.ts +2 -0
- package/dist/__tests__/core/storage/storage.factory.spec.d.ts.map +1 -0
- package/dist/__tests__/core/storage/storage.factory.spec.js +151 -0
- package/dist/__tests__/core/storage/storage.factory.spec.js.map +1 -0
- package/dist/core/storage/index.d.ts +2 -1
- package/dist/core/storage/index.d.ts.map +1 -1
- package/dist/core/storage/index.js +17 -1
- package/dist/core/storage/index.js.map +1 -1
- package/dist/core/storage/memory.storage.d.ts +59 -0
- package/dist/core/storage/memory.storage.d.ts.map +1 -0
- package/dist/core/storage/memory.storage.js +629 -0
- package/dist/core/storage/memory.storage.js.map +1 -0
- package/dist/core/storage/redis.storage.d.ts +77 -0
- package/dist/core/storage/redis.storage.d.ts.map +1 -0
- package/dist/core/storage/redis.storage.js +595 -0
- package/dist/core/storage/redis.storage.js.map +1 -0
- package/dist/core/storage/storage.factory.d.ts +28 -0
- package/dist/core/storage/storage.factory.d.ts.map +1 -0
- package/dist/core/storage/storage.factory.js +169 -0
- package/dist/core/storage/storage.factory.js.map +1 -0
- package/dist/index.d.ts +3 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +5 -1
- package/dist/index.js.map +1 -1
- package/dist/nestlens.config.d.ts +54 -0
- package/dist/nestlens.config.d.ts.map +1 -1
- package/dist/nestlens.config.js +16 -2
- package/dist/nestlens.config.js.map +1 -1
- package/dist/nestlens.module.js +3 -3
- package/dist/nestlens.module.js.map +1 -1
- package/package.json +19 -4
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import { OnModuleDestroy } from '@nestjs/common';
|
|
2
|
+
import { Entry, EntryFilter, EntryStats, EntryType, CursorPaginationParams, CursorPaginatedResponse, StorageStats, MonitoredTag, TagWithCount } from '../../types';
|
|
3
|
+
import { StorageInterface } from './storage.interface';
|
|
4
|
+
import { RedisStorageConfig } from '../../nestlens.config';
|
|
5
|
+
/**
|
|
6
|
+
* Redis storage implementation for NestLens.
|
|
7
|
+
* Requires ioredis to be installed.
|
|
8
|
+
* Ideal for production environments with horizontal scaling.
|
|
9
|
+
*
|
|
10
|
+
* Redis Key Structure:
|
|
11
|
+
* - {prefix}entries:{id} - Hash storing entry data
|
|
12
|
+
* - {prefix}entries:all - Sorted set of all entry IDs (score = timestamp)
|
|
13
|
+
* - {prefix}entries:type:{type} - Sorted set of entry IDs by type
|
|
14
|
+
* - {prefix}entries:request:{requestId} - Set of entry IDs for a request
|
|
15
|
+
* - {prefix}entries:sequence - Counter for entry IDs
|
|
16
|
+
* - {prefix}tags:{entryId} - Set of tags for an entry
|
|
17
|
+
* - {prefix}tags:index:{tag} - Set of entry IDs with this tag
|
|
18
|
+
* - {prefix}tags:counts - Hash of tag -> count
|
|
19
|
+
* - {prefix}monitored - Hash of monitored tags
|
|
20
|
+
* - {prefix}monitored:sequence - Counter for monitored tag IDs
|
|
21
|
+
* - {prefix}family:{hash} - Set of entry IDs with this family hash
|
|
22
|
+
*/
|
|
23
|
+
export declare class RedisStorage implements StorageInterface, OnModuleDestroy {
|
|
24
|
+
private readonly logger;
|
|
25
|
+
private client;
|
|
26
|
+
private readonly keyPrefix;
|
|
27
|
+
private readonly config;
|
|
28
|
+
constructor(config?: RedisStorageConfig);
|
|
29
|
+
/**
|
|
30
|
+
* Builds a Redis key with the configured prefix
|
|
31
|
+
*/
|
|
32
|
+
private key;
|
|
33
|
+
/**
|
|
34
|
+
* Lazily loads ioredis and creates a client
|
|
35
|
+
*/
|
|
36
|
+
private loadRedisClient;
|
|
37
|
+
private getClient;
|
|
38
|
+
initialize(): Promise<void>;
|
|
39
|
+
save(entry: Entry): Promise<Entry>;
|
|
40
|
+
saveBatch(entries: Entry[]): Promise<Entry[]>;
|
|
41
|
+
find(filter: EntryFilter): Promise<Entry[]>;
|
|
42
|
+
findWithCursor(type: EntryType | undefined, params: CursorPaginationParams): Promise<CursorPaginatedResponse<Entry>>;
|
|
43
|
+
findById(id: number): Promise<Entry | null>;
|
|
44
|
+
count(type?: EntryType): Promise<number>;
|
|
45
|
+
clear(): Promise<void>;
|
|
46
|
+
close(): Promise<void>;
|
|
47
|
+
getLatestSequence(type?: EntryType): Promise<number | null>;
|
|
48
|
+
hasEntriesAfter(sequence: number, type?: EntryType): Promise<number>;
|
|
49
|
+
getStats(): Promise<EntryStats>;
|
|
50
|
+
getStorageStats(): Promise<StorageStats>;
|
|
51
|
+
prune(before: Date): Promise<number>;
|
|
52
|
+
pruneByType(type: EntryType, before: Date): Promise<number>;
|
|
53
|
+
addTags(entryId: number, tags: string[]): Promise<void>;
|
|
54
|
+
removeTags(entryId: number, tags: string[]): Promise<void>;
|
|
55
|
+
getEntryTags(entryId: number): Promise<string[]>;
|
|
56
|
+
getAllTags(): Promise<TagWithCount[]>;
|
|
57
|
+
findByTags(tags: string[], logic?: 'AND' | 'OR', limit?: number): Promise<Entry[]>;
|
|
58
|
+
addMonitoredTag(tag: string): Promise<MonitoredTag>;
|
|
59
|
+
removeMonitoredTag(tag: string): Promise<void>;
|
|
60
|
+
getMonitoredTags(): Promise<MonitoredTag[]>;
|
|
61
|
+
resolveEntry(id: number): Promise<void>;
|
|
62
|
+
unresolveEntry(id: number): Promise<void>;
|
|
63
|
+
updateFamilyHash(id: number, familyHash: string): Promise<void>;
|
|
64
|
+
findByFamilyHash(familyHash: string, limit?: number): Promise<Entry[]>;
|
|
65
|
+
getGroupedByFamilyHash(type?: EntryType, limit?: number): Promise<{
|
|
66
|
+
familyHash: string;
|
|
67
|
+
count: number;
|
|
68
|
+
latestEntry: Entry;
|
|
69
|
+
}[]>;
|
|
70
|
+
onModuleDestroy(): void;
|
|
71
|
+
private fetchEntriesByIds;
|
|
72
|
+
private hashToEntry;
|
|
73
|
+
private hydrateEntriesWithTags;
|
|
74
|
+
private deleteEntry;
|
|
75
|
+
private applyAdvancedFilters;
|
|
76
|
+
}
|
|
77
|
+
//# sourceMappingURL=redis.storage.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"redis.storage.d.ts","sourceRoot":"","sources":["../../../src/core/storage/redis.storage.ts"],"names":[],"mappings":"AAAA,OAAO,EAAsB,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAErE,OAAO,EACL,KAAK,EACL,WAAW,EACX,UAAU,EACV,SAAS,EACT,sBAAsB,EACtB,uBAAuB,EACvB,YAAY,EACZ,YAAY,EACZ,YAAY,EACb,MAAM,aAAa,CAAC;AACrB,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AACvD,OAAO,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAC;AAE3D;;;;;;;;;;;;;;;;;GAiBG;AACH,qBACa,YAAa,YAAW,gBAAgB,EAAE,eAAe;IACpE,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAiC;IACxD,OAAO,CAAC,MAAM,CAAsB;IACpC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAS;IACnC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAqB;gBAEhC,MAAM,GAAE,kBAAuB;IAK3C;;OAEG;IACH,OAAO,CAAC,GAAG;IAIX;;OAEG;YACW,eAAe;IAsB7B,OAAO,CAAC,SAAS;IAOX,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAO3B,IAAI,CAAC,KAAK,EAAE,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC;IAqClC,SAAS,CAAC,OAAO,EAAE,KAAK,EAAE,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IA0C7C,IAAI,CAAC,MAAM,EAAE,WAAW,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IAmC3C,cAAc,CAClB,IAAI,EAAE,SAAS,GAAG,SAAS,EAC3B,MAAM,EAAE,sBAAsB,GAC7B,OAAO,CAAC,uBAAuB,CAAC,KAAK,CAAC,CAAC;IAsDpC,QAAQ,CAAC,EAAE,EAAE,MAAM,GAAG,OAAO,CAAC,KAAK,GAAG,IAAI,CAAC;IAa3C,KAAK,CAAC,IAAI,CAAC,EAAE,SAAS,GAAG,OAAO,CAAC,MAAM,CAAC;IAMxC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAStB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAUtB,iBAAiB,CAAC,IAAI,CAAC,EAAE,SAAS,GAAG,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;IAQ3D,eAAe,CAAC,QAAQ,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,SAAS,GAAG,OAAO,CAAC,MAAM,CAAC;IAMpE,QAAQ,IAAI,OAAO,CAAC,UAAU,CAAC;IAkC/B,eAAe,IAAI,OAAO,CAAC,YAAY,CAAC;IA+CxC,KAAK,CAAC,MAAM,EAAE,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC;IAepC,WAAW,CAAC,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC;IAqB3D,OAAO,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAavD,UAAU,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAU1D,YAAY,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAMhD,UAAU,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;IAUrC,UAAU,CAAC,IAAI,EAAE,MAAM,EAAE,EAAE,KAAK,GAAE,KAAK,GAAG,IAAW,EAAE,KAAK,SAAK,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IAsBpF,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,CAAC;IAmBnD,kBAAkB,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAK9C,gBAAgB,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;IAY3C,YAAY,CAAC,EAAE,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IASvC,cAAc,CAAC,EAAE,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAOzC,gBAAgB,CAAC,EAAE,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAM/D,gBAAgB,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,SAAK,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IAWlE,sBAAsB,CAC1B,IAAI,CAAC,EAAE,SAAS,EAChB,KAAK,SAAK,GACT,OAAO,CAAC;QAAE,UAAU,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAC;QAAC,WAAW,EAAE,KAAK,CAAA;KAAE,EAAE,CAAC;IAqCvE,eAAe,IAAI,IAAI;YAQT,iBAAiB;IAyB/B,OAAO,CAAC,WAAW;YAgBL,sBAAsB;YAYtB,WAAW;IA6BzB,OAAO,CAAC,oBAAoB;CA0B7B"}
|
|
@@ -0,0 +1,595 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
19
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
20
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
21
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
22
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
23
|
+
};
|
|
24
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
25
|
+
var ownKeys = function(o) {
|
|
26
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
27
|
+
var ar = [];
|
|
28
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
29
|
+
return ar;
|
|
30
|
+
};
|
|
31
|
+
return ownKeys(o);
|
|
32
|
+
};
|
|
33
|
+
return function (mod) {
|
|
34
|
+
if (mod && mod.__esModule) return mod;
|
|
35
|
+
var result = {};
|
|
36
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
37
|
+
__setModuleDefault(result, mod);
|
|
38
|
+
return result;
|
|
39
|
+
};
|
|
40
|
+
})();
|
|
41
|
+
var __metadata = (this && this.__metadata) || function (k, v) {
|
|
42
|
+
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
|
43
|
+
};
|
|
44
|
+
var RedisStorage_1;
|
|
45
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
46
|
+
exports.RedisStorage = void 0;
|
|
47
|
+
const common_1 = require("@nestjs/common");
|
|
48
|
+
/**
|
|
49
|
+
* Redis storage implementation for NestLens.
|
|
50
|
+
* Requires ioredis to be installed.
|
|
51
|
+
* Ideal for production environments with horizontal scaling.
|
|
52
|
+
*
|
|
53
|
+
* Redis Key Structure:
|
|
54
|
+
* - {prefix}entries:{id} - Hash storing entry data
|
|
55
|
+
* - {prefix}entries:all - Sorted set of all entry IDs (score = timestamp)
|
|
56
|
+
* - {prefix}entries:type:{type} - Sorted set of entry IDs by type
|
|
57
|
+
* - {prefix}entries:request:{requestId} - Set of entry IDs for a request
|
|
58
|
+
* - {prefix}entries:sequence - Counter for entry IDs
|
|
59
|
+
* - {prefix}tags:{entryId} - Set of tags for an entry
|
|
60
|
+
* - {prefix}tags:index:{tag} - Set of entry IDs with this tag
|
|
61
|
+
* - {prefix}tags:counts - Hash of tag -> count
|
|
62
|
+
* - {prefix}monitored - Hash of monitored tags
|
|
63
|
+
* - {prefix}monitored:sequence - Counter for monitored tag IDs
|
|
64
|
+
* - {prefix}family:{hash} - Set of entry IDs with this family hash
|
|
65
|
+
*/
|
|
66
|
+
let RedisStorage = RedisStorage_1 = class RedisStorage {
|
|
67
|
+
constructor(config = {}) {
|
|
68
|
+
this.logger = new common_1.Logger(RedisStorage_1.name);
|
|
69
|
+
this.client = null;
|
|
70
|
+
this.config = config;
|
|
71
|
+
this.keyPrefix = config.keyPrefix ?? 'nestlens:';
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* Builds a Redis key with the configured prefix
|
|
75
|
+
*/
|
|
76
|
+
key(...parts) {
|
|
77
|
+
return this.keyPrefix + parts.join(':');
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Lazily loads ioredis and creates a client
|
|
81
|
+
*/
|
|
82
|
+
async loadRedisClient() {
|
|
83
|
+
try {
|
|
84
|
+
// Dynamic import - ioredis is an optional peer dependency
|
|
85
|
+
const { default: RedisClient } = await Promise.resolve().then(() => __importStar(require('ioredis')));
|
|
86
|
+
if (this.config.url) {
|
|
87
|
+
return new RedisClient(this.config.url);
|
|
88
|
+
}
|
|
89
|
+
return new RedisClient({
|
|
90
|
+
host: this.config.host ?? 'localhost',
|
|
91
|
+
port: this.config.port ?? 6379,
|
|
92
|
+
password: this.config.password,
|
|
93
|
+
db: this.config.db ?? 0,
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
catch (error) {
|
|
97
|
+
throw new Error('ioredis is required for Redis storage. Install it with: npm install ioredis');
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
getClient() {
|
|
101
|
+
if (!this.client) {
|
|
102
|
+
throw new Error('Redis client not initialized. Call initialize() first.');
|
|
103
|
+
}
|
|
104
|
+
return this.client;
|
|
105
|
+
}
|
|
106
|
+
async initialize() {
|
|
107
|
+
this.client = await this.loadRedisClient();
|
|
108
|
+
this.logger.log('Redis storage initialized');
|
|
109
|
+
}
|
|
110
|
+
// ==================== Core CRUD Operations ====================
|
|
111
|
+
async save(entry) {
|
|
112
|
+
const client = this.getClient();
|
|
113
|
+
const id = await client.incr(this.key('entries', 'sequence'));
|
|
114
|
+
const createdAt = new Date().toISOString();
|
|
115
|
+
const timestamp = Date.now();
|
|
116
|
+
const savedEntry = {
|
|
117
|
+
...entry,
|
|
118
|
+
id,
|
|
119
|
+
createdAt,
|
|
120
|
+
};
|
|
121
|
+
// Store entry as hash
|
|
122
|
+
await client.hset(this.key('entries', String(id)), 'id', String(id), 'type', entry.type, 'requestId', entry.requestId ?? '', 'payload', JSON.stringify(entry.payload), 'createdAt', createdAt, 'familyHash', entry.familyHash ?? '', 'resolvedAt', entry.resolvedAt ?? '');
|
|
123
|
+
// Add to sorted sets for indexing
|
|
124
|
+
await client.zadd(this.key('entries', 'all'), timestamp, String(id));
|
|
125
|
+
await client.zadd(this.key('entries', 'type', entry.type), timestamp, String(id));
|
|
126
|
+
// Add to request index if applicable
|
|
127
|
+
if (entry.requestId) {
|
|
128
|
+
await client.sadd(this.key('entries', 'request', entry.requestId), String(id));
|
|
129
|
+
}
|
|
130
|
+
return savedEntry;
|
|
131
|
+
}
|
|
132
|
+
async saveBatch(entries) {
|
|
133
|
+
if (entries.length === 0)
|
|
134
|
+
return [];
|
|
135
|
+
const client = this.getClient();
|
|
136
|
+
const pipeline = client.pipeline();
|
|
137
|
+
const results = [];
|
|
138
|
+
// Pre-fetch IDs
|
|
139
|
+
const startId = await client.incrby(this.key('entries', 'sequence'), entries.length);
|
|
140
|
+
const timestamp = Date.now();
|
|
141
|
+
for (let i = 0; i < entries.length; i++) {
|
|
142
|
+
const entry = entries[i];
|
|
143
|
+
const id = startId - entries.length + 1 + i;
|
|
144
|
+
const createdAt = new Date().toISOString();
|
|
145
|
+
const savedEntry = { ...entry, id, createdAt };
|
|
146
|
+
results.push(savedEntry);
|
|
147
|
+
pipeline.hset(this.key('entries', String(id)), 'id', String(id), 'type', entry.type, 'requestId', entry.requestId ?? '', 'payload', JSON.stringify(entry.payload), 'createdAt', createdAt, 'familyHash', entry.familyHash ?? '', 'resolvedAt', entry.resolvedAt ?? '');
|
|
148
|
+
pipeline.zadd(this.key('entries', 'all'), timestamp + i, String(id));
|
|
149
|
+
pipeline.zadd(this.key('entries', 'type', entry.type), timestamp + i, String(id));
|
|
150
|
+
if (entry.requestId) {
|
|
151
|
+
pipeline.sadd(this.key('entries', 'request', entry.requestId), String(id));
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
await pipeline.exec();
|
|
155
|
+
return results;
|
|
156
|
+
}
|
|
157
|
+
async find(filter) {
|
|
158
|
+
const client = this.getClient();
|
|
159
|
+
let ids;
|
|
160
|
+
if (filter.requestId) {
|
|
161
|
+
ids = await client.smembers(this.key('entries', 'request', filter.requestId));
|
|
162
|
+
}
|
|
163
|
+
else if (filter.type) {
|
|
164
|
+
const start = filter.offset ?? 0;
|
|
165
|
+
const end = start + (filter.limit ?? 100) - 1;
|
|
166
|
+
ids = await client.zrevrange(this.key('entries', 'type', filter.type), start, end);
|
|
167
|
+
}
|
|
168
|
+
else {
|
|
169
|
+
const start = filter.offset ?? 0;
|
|
170
|
+
const end = start + (filter.limit ?? 100) - 1;
|
|
171
|
+
ids = await client.zrevrange(this.key('entries', 'all'), start, end);
|
|
172
|
+
}
|
|
173
|
+
if (ids.length === 0)
|
|
174
|
+
return [];
|
|
175
|
+
const entries = await this.fetchEntriesByIds(ids);
|
|
176
|
+
// Apply date filters
|
|
177
|
+
let filtered = entries;
|
|
178
|
+
if (filter.from) {
|
|
179
|
+
const fromTime = filter.from.getTime();
|
|
180
|
+
filtered = filtered.filter((e) => new Date(e.createdAt).getTime() >= fromTime);
|
|
181
|
+
}
|
|
182
|
+
if (filter.to) {
|
|
183
|
+
const toTime = filter.to.getTime();
|
|
184
|
+
filtered = filtered.filter((e) => new Date(e.createdAt).getTime() <= toTime);
|
|
185
|
+
}
|
|
186
|
+
return this.hydrateEntriesWithTags(filtered);
|
|
187
|
+
}
|
|
188
|
+
async findWithCursor(type, params) {
|
|
189
|
+
const client = this.getClient();
|
|
190
|
+
const limit = params.limit ?? 50;
|
|
191
|
+
const indexKey = type ? this.key('entries', 'type', type) : this.key('entries', 'all');
|
|
192
|
+
let ids;
|
|
193
|
+
if (params.beforeSequence !== undefined) {
|
|
194
|
+
ids = await client.zrevrangebyscore(indexKey, `(${params.beforeSequence}`, '-inf', 'LIMIT', '0', String(limit + 1));
|
|
195
|
+
}
|
|
196
|
+
else if (params.afterSequence !== undefined) {
|
|
197
|
+
ids = await client.zrangebyscore(indexKey, `(${params.afterSequence}`, '+inf', 'LIMIT', '0', String(limit + 1));
|
|
198
|
+
}
|
|
199
|
+
else {
|
|
200
|
+
ids = await client.zrevrange(indexKey, 0, limit);
|
|
201
|
+
}
|
|
202
|
+
const hasMore = ids.length > limit;
|
|
203
|
+
if (hasMore)
|
|
204
|
+
ids = ids.slice(0, limit);
|
|
205
|
+
if (params.afterSequence !== undefined) {
|
|
206
|
+
ids.reverse();
|
|
207
|
+
}
|
|
208
|
+
let entries = await this.fetchEntriesByIds(ids);
|
|
209
|
+
// Apply advanced filters
|
|
210
|
+
if (params.filters) {
|
|
211
|
+
entries = this.applyAdvancedFilters(entries, params.filters);
|
|
212
|
+
}
|
|
213
|
+
const hydratedEntries = await this.hydrateEntriesWithTags(entries);
|
|
214
|
+
const total = await client.zcard(indexKey);
|
|
215
|
+
return {
|
|
216
|
+
data: hydratedEntries,
|
|
217
|
+
meta: {
|
|
218
|
+
hasMore,
|
|
219
|
+
oldestSequence: hydratedEntries.length > 0 ? hydratedEntries[hydratedEntries.length - 1].id : null,
|
|
220
|
+
newestSequence: hydratedEntries.length > 0 ? hydratedEntries[0].id : null,
|
|
221
|
+
total,
|
|
222
|
+
},
|
|
223
|
+
};
|
|
224
|
+
}
|
|
225
|
+
async findById(id) {
|
|
226
|
+
const client = this.getClient();
|
|
227
|
+
const hash = await client.hgetall(this.key('entries', String(id)));
|
|
228
|
+
if (!hash || !hash.id)
|
|
229
|
+
return null;
|
|
230
|
+
const entry = this.hashToEntry(hash);
|
|
231
|
+
if (!entry)
|
|
232
|
+
return null;
|
|
233
|
+
const [hydrated] = await this.hydrateEntriesWithTags([entry]);
|
|
234
|
+
return hydrated;
|
|
235
|
+
}
|
|
236
|
+
async count(type) {
|
|
237
|
+
const client = this.getClient();
|
|
238
|
+
const key = type ? this.key('entries', 'type', type) : this.key('entries', 'all');
|
|
239
|
+
return client.zcard(key);
|
|
240
|
+
}
|
|
241
|
+
async clear() {
|
|
242
|
+
const client = this.getClient();
|
|
243
|
+
const keys = await client.keys(this.keyPrefix + '*');
|
|
244
|
+
if (keys.length > 0) {
|
|
245
|
+
await client.del(...keys);
|
|
246
|
+
}
|
|
247
|
+
this.logger.log('Storage cleared');
|
|
248
|
+
}
|
|
249
|
+
async close() {
|
|
250
|
+
if (this.client) {
|
|
251
|
+
await this.client.quit();
|
|
252
|
+
this.client = null;
|
|
253
|
+
}
|
|
254
|
+
this.logger.log('Redis storage closed');
|
|
255
|
+
}
|
|
256
|
+
// ==================== Statistics ====================
|
|
257
|
+
async getLatestSequence(type) {
|
|
258
|
+
const client = this.getClient();
|
|
259
|
+
const key = type ? this.key('entries', 'type', type) : this.key('entries', 'all');
|
|
260
|
+
const result = await client.zrevrange(key, 0, 0);
|
|
261
|
+
return result.length > 0 ? parseInt(result[0], 10) : null;
|
|
262
|
+
}
|
|
263
|
+
async hasEntriesAfter(sequence, type) {
|
|
264
|
+
const client = this.getClient();
|
|
265
|
+
const key = type ? this.key('entries', 'type', type) : this.key('entries', 'all');
|
|
266
|
+
return client.zcount(key, `(${sequence}`, '+inf');
|
|
267
|
+
}
|
|
268
|
+
async getStats() {
|
|
269
|
+
const client = this.getClient();
|
|
270
|
+
// Get counts by type
|
|
271
|
+
const types = [
|
|
272
|
+
'request', 'query', 'exception', 'log', 'cache', 'event', 'job',
|
|
273
|
+
'schedule', 'mail', 'http-client', 'redis', 'model', 'notification',
|
|
274
|
+
'view', 'command', 'gate', 'batch', 'dump',
|
|
275
|
+
];
|
|
276
|
+
const byType = {};
|
|
277
|
+
let total = 0;
|
|
278
|
+
for (const type of types) {
|
|
279
|
+
const count = await client.zcard(this.key('entries', 'type', type));
|
|
280
|
+
if (count > 0) {
|
|
281
|
+
byType[type] = count;
|
|
282
|
+
total += count;
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
// For avgResponseTime and slowQueries, we'd need to iterate over entries
|
|
286
|
+
// which is expensive in Redis. Return undefined for now.
|
|
287
|
+
return {
|
|
288
|
+
total,
|
|
289
|
+
byType,
|
|
290
|
+
avgResponseTime: undefined,
|
|
291
|
+
slowQueries: 0,
|
|
292
|
+
exceptions: byType.exception || 0,
|
|
293
|
+
unresolvedExceptions: 0,
|
|
294
|
+
};
|
|
295
|
+
}
|
|
296
|
+
async getStorageStats() {
|
|
297
|
+
const client = this.getClient();
|
|
298
|
+
const types = [
|
|
299
|
+
'request', 'query', 'exception', 'log', 'cache', 'event', 'job',
|
|
300
|
+
'schedule', 'mail', 'http-client', 'redis', 'model', 'notification',
|
|
301
|
+
'view', 'command', 'gate', 'batch', 'dump',
|
|
302
|
+
];
|
|
303
|
+
const byType = {};
|
|
304
|
+
let total = 0;
|
|
305
|
+
for (const type of types) {
|
|
306
|
+
const count = await client.zcard(this.key('entries', 'type', type));
|
|
307
|
+
if (count > 0) {
|
|
308
|
+
byType[type] = count;
|
|
309
|
+
total += count;
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
// Get oldest and newest entries
|
|
313
|
+
const oldest = await client.zrange(this.key('entries', 'all'), 0, 0);
|
|
314
|
+
const newest = await client.zrevrange(this.key('entries', 'all'), 0, 0);
|
|
315
|
+
let oldestEntry = null;
|
|
316
|
+
let newestEntry = null;
|
|
317
|
+
if (oldest.length > 0) {
|
|
318
|
+
const hash = await client.hget(this.key('entries', oldest[0]), 'createdAt');
|
|
319
|
+
oldestEntry = hash ?? null;
|
|
320
|
+
}
|
|
321
|
+
if (newest.length > 0) {
|
|
322
|
+
const hash = await client.hget(this.key('entries', newest[0]), 'createdAt');
|
|
323
|
+
newestEntry = hash ?? null;
|
|
324
|
+
}
|
|
325
|
+
return {
|
|
326
|
+
total,
|
|
327
|
+
byType,
|
|
328
|
+
oldestEntry,
|
|
329
|
+
newestEntry,
|
|
330
|
+
databaseSize: undefined, // Redis doesn't expose this easily
|
|
331
|
+
};
|
|
332
|
+
}
|
|
333
|
+
// ==================== Pruning ====================
|
|
334
|
+
async prune(before) {
|
|
335
|
+
const client = this.getClient();
|
|
336
|
+
const maxScore = before.getTime();
|
|
337
|
+
const ids = await client.zrangebyscore(this.key('entries', 'all'), '-inf', maxScore);
|
|
338
|
+
if (ids.length === 0)
|
|
339
|
+
return 0;
|
|
340
|
+
for (const id of ids) {
|
|
341
|
+
await this.deleteEntry(parseInt(id, 10));
|
|
342
|
+
}
|
|
343
|
+
this.logger.log(`Pruned ${ids.length} entries older than ${before.toISOString()}`);
|
|
344
|
+
return ids.length;
|
|
345
|
+
}
|
|
346
|
+
async pruneByType(type, before) {
|
|
347
|
+
const client = this.getClient();
|
|
348
|
+
const maxScore = before.getTime();
|
|
349
|
+
const ids = await client.zrangebyscore(this.key('entries', 'type', type), '-inf', maxScore);
|
|
350
|
+
if (ids.length === 0)
|
|
351
|
+
return 0;
|
|
352
|
+
for (const id of ids) {
|
|
353
|
+
await this.deleteEntry(parseInt(id, 10));
|
|
354
|
+
}
|
|
355
|
+
return ids.length;
|
|
356
|
+
}
|
|
357
|
+
// ==================== Tag Methods ====================
|
|
358
|
+
async addTags(entryId, tags) {
|
|
359
|
+
const client = this.getClient();
|
|
360
|
+
const pipeline = client.pipeline();
|
|
361
|
+
for (const tag of tags) {
|
|
362
|
+
pipeline.sadd(this.key('tags', String(entryId)), tag);
|
|
363
|
+
pipeline.sadd(this.key('tags', 'index', tag), String(entryId));
|
|
364
|
+
pipeline.hincrby(this.key('tags', 'counts'), tag, 1);
|
|
365
|
+
}
|
|
366
|
+
await pipeline.exec();
|
|
367
|
+
}
|
|
368
|
+
async removeTags(entryId, tags) {
|
|
369
|
+
const client = this.getClient();
|
|
370
|
+
for (const tag of tags) {
|
|
371
|
+
await client.srem(this.key('tags', String(entryId)), tag);
|
|
372
|
+
await client.srem(this.key('tags', 'index', tag), String(entryId));
|
|
373
|
+
await client.hincrby(this.key('tags', 'counts'), tag, -1);
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
async getEntryTags(entryId) {
|
|
377
|
+
const client = this.getClient();
|
|
378
|
+
const tags = await client.smembers(this.key('tags', String(entryId)));
|
|
379
|
+
return tags.sort();
|
|
380
|
+
}
|
|
381
|
+
async getAllTags() {
|
|
382
|
+
const client = this.getClient();
|
|
383
|
+
const counts = await client.hgetall(this.key('tags', 'counts'));
|
|
384
|
+
return Object.entries(counts)
|
|
385
|
+
.map(([tag, count]) => ({ tag, count: parseInt(count, 10) }))
|
|
386
|
+
.filter((t) => t.count > 0)
|
|
387
|
+
.sort((a, b) => b.count - a.count || a.tag.localeCompare(b.tag));
|
|
388
|
+
}
|
|
389
|
+
async findByTags(tags, logic = 'OR', limit = 50) {
|
|
390
|
+
if (tags.length === 0)
|
|
391
|
+
return [];
|
|
392
|
+
const client = this.getClient();
|
|
393
|
+
const tagKeys = tags.map((t) => this.key('tags', 'index', t));
|
|
394
|
+
let ids;
|
|
395
|
+
if (logic === 'AND') {
|
|
396
|
+
ids = await client.sinter(...tagKeys);
|
|
397
|
+
}
|
|
398
|
+
else {
|
|
399
|
+
ids = await client.sunion(...tagKeys);
|
|
400
|
+
}
|
|
401
|
+
if (ids.length === 0)
|
|
402
|
+
return [];
|
|
403
|
+
const entries = await this.fetchEntriesByIds(ids.slice(0, limit));
|
|
404
|
+
const sorted = entries.sort((a, b) => b.id - a.id);
|
|
405
|
+
return this.hydrateEntriesWithTags(sorted);
|
|
406
|
+
}
|
|
407
|
+
// ==================== Monitored Tags ====================
|
|
408
|
+
async addMonitoredTag(tag) {
|
|
409
|
+
const client = this.getClient();
|
|
410
|
+
const existing = await client.hget(this.key('monitored'), tag);
|
|
411
|
+
if (existing) {
|
|
412
|
+
return JSON.parse(existing);
|
|
413
|
+
}
|
|
414
|
+
const id = await client.incr(this.key('monitored', 'sequence'));
|
|
415
|
+
const monitored = {
|
|
416
|
+
id,
|
|
417
|
+
tag,
|
|
418
|
+
createdAt: new Date().toISOString(),
|
|
419
|
+
};
|
|
420
|
+
await client.hset(this.key('monitored'), tag, JSON.stringify(monitored));
|
|
421
|
+
return monitored;
|
|
422
|
+
}
|
|
423
|
+
async removeMonitoredTag(tag) {
|
|
424
|
+
const client = this.getClient();
|
|
425
|
+
await client.hdel(this.key('monitored'), tag);
|
|
426
|
+
}
|
|
427
|
+
async getMonitoredTags() {
|
|
428
|
+
const client = this.getClient();
|
|
429
|
+
const all = await client.hgetall(this.key('monitored'));
|
|
430
|
+
return Object.values(all)
|
|
431
|
+
.filter((v) => v && v !== '')
|
|
432
|
+
.map((v) => JSON.parse(v))
|
|
433
|
+
.sort((a, b) => a.tag.localeCompare(b.tag));
|
|
434
|
+
}
|
|
435
|
+
// ==================== Resolution ====================
|
|
436
|
+
async resolveEntry(id) {
|
|
437
|
+
const client = this.getClient();
|
|
438
|
+
await client.hset(this.key('entries', String(id)), 'resolvedAt', new Date().toISOString());
|
|
439
|
+
}
|
|
440
|
+
async unresolveEntry(id) {
|
|
441
|
+
const client = this.getClient();
|
|
442
|
+
await client.hset(this.key('entries', String(id)), 'resolvedAt', '');
|
|
443
|
+
}
|
|
444
|
+
// ==================== Family Hash ====================
|
|
445
|
+
async updateFamilyHash(id, familyHash) {
|
|
446
|
+
const client = this.getClient();
|
|
447
|
+
await client.hset(this.key('entries', String(id)), 'familyHash', familyHash);
|
|
448
|
+
await client.sadd(this.key('family', familyHash), String(id));
|
|
449
|
+
}
|
|
450
|
+
async findByFamilyHash(familyHash, limit = 50) {
|
|
451
|
+
const client = this.getClient();
|
|
452
|
+
const ids = await client.smembers(this.key('family', familyHash));
|
|
453
|
+
if (ids.length === 0)
|
|
454
|
+
return [];
|
|
455
|
+
const entries = await this.fetchEntriesByIds(ids);
|
|
456
|
+
const sorted = entries.sort((a, b) => b.id - a.id).slice(0, limit);
|
|
457
|
+
return this.hydrateEntriesWithTags(sorted);
|
|
458
|
+
}
|
|
459
|
+
async getGroupedByFamilyHash(type, limit = 50) {
|
|
460
|
+
const client = this.getClient();
|
|
461
|
+
// Get all family hash keys
|
|
462
|
+
const familyKeys = await client.keys(this.key('family', '*'));
|
|
463
|
+
const groups = [];
|
|
464
|
+
for (const key of familyKeys) {
|
|
465
|
+
const familyHash = key.replace(this.key('family', ''), '');
|
|
466
|
+
const ids = await client.smembers(key);
|
|
467
|
+
if (ids.length === 0)
|
|
468
|
+
continue;
|
|
469
|
+
// Get entries for this family
|
|
470
|
+
const entries = await this.fetchEntriesByIds(ids);
|
|
471
|
+
const filtered = type ? entries.filter((e) => e.type === type) : entries;
|
|
472
|
+
if (filtered.length === 0)
|
|
473
|
+
continue;
|
|
474
|
+
// Sort and get latest
|
|
475
|
+
filtered.sort((a, b) => b.id - a.id);
|
|
476
|
+
const [latestEntry] = await this.hydrateEntriesWithTags([filtered[0]]);
|
|
477
|
+
groups.push({
|
|
478
|
+
familyHash,
|
|
479
|
+
count: filtered.length,
|
|
480
|
+
latestEntry,
|
|
481
|
+
});
|
|
482
|
+
}
|
|
483
|
+
return groups
|
|
484
|
+
.sort((a, b) => b.count - a.count || b.latestEntry.id - a.latestEntry.id)
|
|
485
|
+
.slice(0, limit);
|
|
486
|
+
}
|
|
487
|
+
// ==================== Lifecycle ====================
|
|
488
|
+
onModuleDestroy() {
|
|
489
|
+
this.close().catch((err) => {
|
|
490
|
+
this.logger.error('Error closing Redis connection', err);
|
|
491
|
+
});
|
|
492
|
+
}
|
|
493
|
+
// ==================== Private Helpers ====================
|
|
494
|
+
async fetchEntriesByIds(ids) {
|
|
495
|
+
if (ids.length === 0)
|
|
496
|
+
return [];
|
|
497
|
+
const client = this.getClient();
|
|
498
|
+
const pipeline = client.pipeline();
|
|
499
|
+
for (const id of ids) {
|
|
500
|
+
pipeline.hgetall(this.key('entries', id));
|
|
501
|
+
}
|
|
502
|
+
const results = await pipeline.exec();
|
|
503
|
+
const entries = [];
|
|
504
|
+
for (const [err, data] of results ?? []) {
|
|
505
|
+
if (err || !data || typeof data !== 'object')
|
|
506
|
+
continue;
|
|
507
|
+
const hash = data;
|
|
508
|
+
if (!hash.id)
|
|
509
|
+
continue;
|
|
510
|
+
const entry = this.hashToEntry(hash);
|
|
511
|
+
if (entry)
|
|
512
|
+
entries.push(entry);
|
|
513
|
+
}
|
|
514
|
+
return entries;
|
|
515
|
+
}
|
|
516
|
+
hashToEntry(hash) {
|
|
517
|
+
try {
|
|
518
|
+
return {
|
|
519
|
+
id: parseInt(hash.id, 10),
|
|
520
|
+
type: hash.type,
|
|
521
|
+
requestId: hash.requestId || undefined,
|
|
522
|
+
payload: JSON.parse(hash.payload || '{}'),
|
|
523
|
+
createdAt: hash.createdAt,
|
|
524
|
+
familyHash: hash.familyHash || undefined,
|
|
525
|
+
resolvedAt: hash.resolvedAt || undefined,
|
|
526
|
+
};
|
|
527
|
+
}
|
|
528
|
+
catch {
|
|
529
|
+
return null;
|
|
530
|
+
}
|
|
531
|
+
}
|
|
532
|
+
async hydrateEntriesWithTags(entries) {
|
|
533
|
+
const client = this.getClient();
|
|
534
|
+
const result = [];
|
|
535
|
+
for (const entry of entries) {
|
|
536
|
+
const tags = await client.smembers(this.key('tags', String(entry.id)));
|
|
537
|
+
result.push({ ...entry, tags: tags.sort() });
|
|
538
|
+
}
|
|
539
|
+
return result;
|
|
540
|
+
}
|
|
541
|
+
async deleteEntry(id) {
|
|
542
|
+
const client = this.getClient();
|
|
543
|
+
// Get entry to find its type
|
|
544
|
+
const hash = await client.hgetall(this.key('entries', String(id)));
|
|
545
|
+
if (!hash || !hash.type)
|
|
546
|
+
return;
|
|
547
|
+
// Remove from indexes
|
|
548
|
+
await client.del(this.key('entries', String(id)));
|
|
549
|
+
await client.zrem(this.key('entries', 'all'), String(id));
|
|
550
|
+
await client.zrem(this.key('entries', 'type', hash.type), String(id));
|
|
551
|
+
if (hash.requestId) {
|
|
552
|
+
await client.srem(this.key('entries', 'request', hash.requestId), String(id));
|
|
553
|
+
}
|
|
554
|
+
if (hash.familyHash) {
|
|
555
|
+
await client.srem(this.key('family', hash.familyHash), String(id));
|
|
556
|
+
}
|
|
557
|
+
// Remove tags
|
|
558
|
+
const tags = await client.smembers(this.key('tags', String(id)));
|
|
559
|
+
for (const tag of tags) {
|
|
560
|
+
await client.srem(this.key('tags', 'index', tag), String(id));
|
|
561
|
+
await client.hincrby(this.key('tags', 'counts'), tag, -1);
|
|
562
|
+
}
|
|
563
|
+
await client.del(this.key('tags', String(id)));
|
|
564
|
+
}
|
|
565
|
+
applyAdvancedFilters(entries, filters) {
|
|
566
|
+
if (!filters)
|
|
567
|
+
return entries;
|
|
568
|
+
return entries.filter((entry) => {
|
|
569
|
+
const payload = entry.payload;
|
|
570
|
+
// Apply the same filter logic as MemoryStorage
|
|
571
|
+
if (filters.levels?.length && entry.type === 'log') {
|
|
572
|
+
if (!filters.levels.includes(payload.level))
|
|
573
|
+
return false;
|
|
574
|
+
}
|
|
575
|
+
if (filters.search) {
|
|
576
|
+
const payloadStr = JSON.stringify(payload).toLowerCase();
|
|
577
|
+
if (!payloadStr.includes(filters.search.toLowerCase()))
|
|
578
|
+
return false;
|
|
579
|
+
}
|
|
580
|
+
if (filters.resolved !== undefined) {
|
|
581
|
+
const isResolved = !!entry.resolvedAt;
|
|
582
|
+
if (isResolved !== filters.resolved)
|
|
583
|
+
return false;
|
|
584
|
+
}
|
|
585
|
+
// Additional filters can be added as needed
|
|
586
|
+
return true;
|
|
587
|
+
});
|
|
588
|
+
}
|
|
589
|
+
};
|
|
590
|
+
exports.RedisStorage = RedisStorage;
|
|
591
|
+
exports.RedisStorage = RedisStorage = RedisStorage_1 = __decorate([
|
|
592
|
+
(0, common_1.Injectable)(),
|
|
593
|
+
__metadata("design:paramtypes", [Object])
|
|
594
|
+
], RedisStorage);
|
|
595
|
+
//# sourceMappingURL=redis.storage.js.map
|