@0xobelisk/graphql-server 1.2.0-pre.100

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. package/.turbo/turbo-build.log +8 -0
  2. package/DUAL_POOL_CONFIG.md +188 -0
  3. package/Dockerfile +35 -0
  4. package/LICENSE +92 -0
  5. package/README.md +487 -0
  6. package/dist/cli.d.ts +3 -0
  7. package/dist/cli.d.ts.map +1 -0
  8. package/dist/cli.js +206 -0
  9. package/dist/cli.js.map +1 -0
  10. package/dist/config/subscription-config.d.ts +80 -0
  11. package/dist/config/subscription-config.d.ts.map +1 -0
  12. package/dist/config/subscription-config.js +158 -0
  13. package/dist/config/subscription-config.js.map +1 -0
  14. package/dist/index.d.ts +1 -0
  15. package/dist/index.d.ts.map +1 -0
  16. package/dist/index.js +11 -0
  17. package/dist/index.js.map +1 -0
  18. package/dist/plugins/all-fields-filter-plugin.d.ts +4 -0
  19. package/dist/plugins/all-fields-filter-plugin.d.ts.map +1 -0
  20. package/dist/plugins/all-fields-filter-plugin.js +132 -0
  21. package/dist/plugins/all-fields-filter-plugin.js.map +1 -0
  22. package/dist/plugins/database-introspector.d.ts +23 -0
  23. package/dist/plugins/database-introspector.d.ts.map +1 -0
  24. package/dist/plugins/database-introspector.js +96 -0
  25. package/dist/plugins/database-introspector.js.map +1 -0
  26. package/dist/plugins/enhanced-playground.d.ts +9 -0
  27. package/dist/plugins/enhanced-playground.d.ts.map +1 -0
  28. package/dist/plugins/enhanced-playground.js +113 -0
  29. package/dist/plugins/enhanced-playground.js.map +1 -0
  30. package/dist/plugins/enhanced-server-manager.d.ts +29 -0
  31. package/dist/plugins/enhanced-server-manager.d.ts.map +1 -0
  32. package/dist/plugins/enhanced-server-manager.js +262 -0
  33. package/dist/plugins/enhanced-server-manager.js.map +1 -0
  34. package/dist/plugins/index.d.ts +9 -0
  35. package/dist/plugins/index.d.ts.map +1 -0
  36. package/dist/plugins/index.js +26 -0
  37. package/dist/plugins/index.js.map +1 -0
  38. package/dist/plugins/postgraphile-config.d.ts +94 -0
  39. package/dist/plugins/postgraphile-config.d.ts.map +1 -0
  40. package/dist/plugins/postgraphile-config.js +138 -0
  41. package/dist/plugins/postgraphile-config.js.map +1 -0
  42. package/dist/plugins/query-filter.d.ts +4 -0
  43. package/dist/plugins/query-filter.d.ts.map +1 -0
  44. package/dist/plugins/query-filter.js +42 -0
  45. package/dist/plugins/query-filter.js.map +1 -0
  46. package/dist/plugins/simple-naming.d.ts +4 -0
  47. package/dist/plugins/simple-naming.d.ts.map +1 -0
  48. package/dist/plugins/simple-naming.js +79 -0
  49. package/dist/plugins/simple-naming.js.map +1 -0
  50. package/dist/plugins/welcome-page.d.ts +11 -0
  51. package/dist/plugins/welcome-page.d.ts.map +1 -0
  52. package/dist/plugins/welcome-page.js +203 -0
  53. package/dist/plugins/welcome-page.js.map +1 -0
  54. package/dist/server.d.ts +21 -0
  55. package/dist/server.d.ts.map +1 -0
  56. package/dist/server.js +265 -0
  57. package/dist/server.js.map +1 -0
  58. package/dist/universal-subscriptions.d.ts +32 -0
  59. package/dist/universal-subscriptions.d.ts.map +1 -0
  60. package/dist/universal-subscriptions.js +318 -0
  61. package/dist/universal-subscriptions.js.map +1 -0
  62. package/dist/utils/logger/index.d.ts +80 -0
  63. package/dist/utils/logger/index.d.ts.map +1 -0
  64. package/dist/utils/logger/index.js +230 -0
  65. package/dist/utils/logger/index.js.map +1 -0
  66. package/docker-compose.yml +46 -0
  67. package/eslint.config.mjs +3 -0
  68. package/package.json +78 -0
  69. package/src/cli.ts +232 -0
  70. package/src/config/subscription-config.ts +243 -0
  71. package/src/index.ts +11 -0
  72. package/src/plugins/README.md +138 -0
  73. package/src/plugins/all-fields-filter-plugin.ts +158 -0
  74. package/src/plugins/database-introspector.ts +126 -0
  75. package/src/plugins/enhanced-playground.ts +121 -0
  76. package/src/plugins/enhanced-server-manager.ts +314 -0
  77. package/src/plugins/index.ts +9 -0
  78. package/src/plugins/postgraphile-config.ts +182 -0
  79. package/src/plugins/query-filter.ts +50 -0
  80. package/src/plugins/simple-naming.ts +105 -0
  81. package/src/plugins/welcome-page.ts +218 -0
  82. package/src/server.ts +324 -0
  83. package/src/universal-subscriptions.ts +397 -0
  84. package/src/utils/logger/README.md +209 -0
  85. package/src/utils/logger/index.ts +275 -0
  86. package/sui-indexer-schema.graphql +3691 -0
  87. package/tsconfig.json +28 -0
@@ -0,0 +1,397 @@
1
+ import { makeExtendSchemaPlugin, gql } from 'postgraphile';
2
+ import { subscriptionLogger } from './utils/logger';
3
+
4
+ // Database table information interface
5
+ interface TableInfo {
6
+ tableName: string;
7
+ fullTableName: string;
8
+ columns: ColumnInfo[];
9
+ primaryKeys: string[];
10
+ statistics?: {
11
+ rowCount: number;
12
+ totalSize: string;
13
+ tableSize: string;
14
+ };
15
+ generatedAt?: string;
16
+ }
17
+
18
+ interface ColumnInfo {
19
+ columnName: string;
20
+ dataType: string;
21
+ isNullable: boolean;
22
+ defaultValue?: any;
23
+ maxLength?: number;
24
+ precision?: number;
25
+ scale?: number;
26
+ }
27
+
28
+ // Cached table information
29
+ let cachedTables: Record<string, TableInfo> = {};
30
+ let schemaGenerated = false;
31
+
32
+ /**
33
+ * Dynamically retrieve schema information for all store tables
34
+ */
35
+ async function discoverStoreTables(pgClient: any): Promise<Record<string, TableInfo>> {
36
+ if (schemaGenerated && Object.keys(cachedTables).length > 0) {
37
+ return cachedTables;
38
+ }
39
+
40
+ try {
41
+ subscriptionLogger.info('Starting discovery of database store table structure...');
42
+
43
+ // 1. Get all store_* tables
44
+ const tablesResult = await pgClient.query(`
45
+ SELECT table_name
46
+ FROM information_schema.tables
47
+ WHERE table_schema = 'public'
48
+ AND table_name LIKE 'store_%'
49
+ ORDER BY table_name
50
+ `);
51
+
52
+ const tables: Record<string, TableInfo> = {};
53
+
54
+ // 2. Get detailed information for each table
55
+ for (const tableRow of tablesResult.rows) {
56
+ const fullTableName = tableRow.table_name;
57
+ const tableName = fullTableName.replace(/^store_/, '');
58
+
59
+ const tableInfo = await getTableInfo(pgClient, fullTableName);
60
+ tables[tableName] = tableInfo;
61
+ }
62
+
63
+ cachedTables = tables;
64
+ schemaGenerated = true;
65
+
66
+ subscriptionLogger.info(
67
+ `Discovered ${Object.keys(tables).length} store tables: ${Object.keys(tables).join(', ')}`
68
+ );
69
+ return tables;
70
+ } catch (error) {
71
+ subscriptionLogger.error('Failed to discover store tables', error);
72
+ return {};
73
+ }
74
+ }
75
+
76
+ /**
77
+ * Generate pre-built table information - called at server startup
78
+ */
79
+ export async function generateStoreTablesInfo(pgPool: any): Promise<Record<string, TableInfo>> {
80
+ const pgClient = await pgPool.connect();
81
+ try {
82
+ const tables = await discoverStoreTables(pgClient);
83
+ subscriptionLogger.info(
84
+ `Pre-generated schema information for ${Object.keys(tables).length} tables`
85
+ );
86
+ return tables;
87
+ } finally {
88
+ pgClient.release();
89
+ }
90
+ }
91
+
92
+ /**
93
+ * Simplified tools plugin - only provides basic query functionality, let PostGraphile's built-in listen subscriptions work normally
94
+ */
95
+ export function createUniversalSubscriptionsPlugin(preGeneratedTables?: Record<string, TableInfo>) {
96
+ return makeExtendSchemaPlugin((_build) => {
97
+ subscriptionLogger.info(
98
+ 'Enabling simplified tools plugin - only keeping basic query functionality'
99
+ );
100
+
101
+ // Use pre-generated table information if available
102
+ if (preGeneratedTables && Object.keys(preGeneratedTables).length > 0) {
103
+ cachedTables = preGeneratedTables;
104
+ schemaGenerated = true;
105
+ }
106
+
107
+ const tableNames = Object.keys(cachedTables);
108
+ subscriptionLogger.info(`Discovered store tables: ${tableNames.join(', ')}`);
109
+
110
+ return {
111
+ typeDefs: gql`
112
+ extend type Query {
113
+ """
114
+ Get Schema information for all store tables
115
+ """
116
+ storeSchema: JSON
117
+
118
+ """
119
+ Query data from specified store table
120
+ """
121
+ storeData(table: String!): JSON
122
+
123
+ """
124
+ Get list of all available store table names
125
+ """
126
+ availableStoreTables: [String!]!
127
+ }
128
+
129
+ # Removed custom subscription types, now only use PostGraphile's built-in listen subscriptions
130
+ `,
131
+
132
+ resolvers: {
133
+ Query: {
134
+ storeSchema: async (root: any, args: any, context: any, _info: any) => {
135
+ const { pgClient } = context;
136
+ try {
137
+ const tables = await discoverStoreTables(pgClient);
138
+ return {
139
+ tables,
140
+ generatedAt: new Date().toISOString()
141
+ };
142
+ } catch (error) {
143
+ return {
144
+ error: (error as Error).message,
145
+ tables: {}
146
+ };
147
+ }
148
+ },
149
+
150
+ storeData: async (root: any, args: any, context: any, _info: any) => {
151
+ return await executeTableQuery(context, args.table);
152
+ },
153
+
154
+ availableStoreTables: async (root: any, args: any, context: any, _info: any) => {
155
+ const { pgClient } = context;
156
+ try {
157
+ const tables = await discoverStoreTables(pgClient);
158
+ return Object.keys(tables);
159
+ } catch (error) {
160
+ subscriptionLogger.error('Failed to get available table list', error);
161
+ return [];
162
+ }
163
+ }
164
+ }
165
+ }
166
+ };
167
+ });
168
+ }
169
+
170
+ // Default plugin export (for backward compatibility)
171
+ export const UniversalSubscriptionsPlugin = createUniversalSubscriptionsPlugin();
172
+
173
+ // =========================
174
+ // Database query functions
175
+ // =========================
176
+
177
+ /**
178
+ * Get detailed table information (columns, primary keys, data statistics, etc.)
179
+ */
180
+ async function getTableInfo(pgClient: any, fullTableName: string): Promise<TableInfo> {
181
+ const tableName = fullTableName.replace(/^store_/, '');
182
+
183
+ // 1. Get column information
184
+ const columnsResult = await pgClient.query(
185
+ `
186
+ SELECT
187
+ column_name,
188
+ data_type,
189
+ is_nullable,
190
+ column_default,
191
+ character_maximum_length,
192
+ numeric_precision,
193
+ numeric_scale
194
+ FROM information_schema.columns
195
+ WHERE table_name = $1
196
+ ORDER BY ordinal_position
197
+ `,
198
+ [fullTableName]
199
+ );
200
+
201
+ // 2. Get primary key information
202
+ const primaryKeysResult = await pgClient.query(
203
+ `
204
+ SELECT column_name
205
+ FROM information_schema.table_constraints tc
206
+ JOIN information_schema.key_column_usage kcu
207
+ ON tc.constraint_name = kcu.constraint_name
208
+ WHERE tc.table_name = $1
209
+ AND tc.constraint_type = 'PRIMARY KEY'
210
+ ORDER BY kcu.ordinal_position
211
+ `,
212
+ [fullTableName]
213
+ );
214
+
215
+ // 3. Try to get primary key information from table_fields table (if exists)
216
+ let tableFieldsKeys: string[] = [];
217
+ try {
218
+ const tableFieldsResult = await pgClient.query(
219
+ `
220
+ SELECT field_name
221
+ FROM table_fields
222
+ WHERE table_name = $1 AND is_key = true
223
+ ORDER BY field_name
224
+ `,
225
+ [tableName]
226
+ );
227
+ tableFieldsKeys = tableFieldsResult.rows.map((row: any) => row.field_name);
228
+ } catch (_e) {
229
+ // table_fields table may not exist, ignore error
230
+ }
231
+
232
+ // 4. Get data statistics
233
+ const statsResult = await pgClient.query(`
234
+ SELECT count(*) as row_count
235
+ FROM ${fullTableName}
236
+ `);
237
+
238
+ // 5. Get table size information
239
+ const sizeResult = await pgClient.query(
240
+ `
241
+ SELECT
242
+ pg_size_pretty(pg_total_relation_size($1)) as total_size,
243
+ pg_size_pretty(pg_relation_size($1)) as table_size
244
+ `,
245
+ [fullTableName]
246
+ );
247
+
248
+ const columns: ColumnInfo[] = columnsResult.rows.map((row: any) => ({
249
+ columnName: row.column_name,
250
+ dataType: row.data_type,
251
+ isNullable: row.is_nullable === 'YES',
252
+ defaultValue: row.column_default,
253
+ maxLength: row.character_maximum_length,
254
+ precision: row.numeric_precision,
255
+ scale: row.numeric_scale
256
+ }));
257
+
258
+ const primaryKeys = primaryKeysResult.rows.map((row: any) => row.column_name);
259
+
260
+ return {
261
+ tableName,
262
+ fullTableName,
263
+ columns,
264
+ primaryKeys: primaryKeys.length > 0 ? primaryKeys : tableFieldsKeys,
265
+ statistics: {
266
+ rowCount: parseInt(statsResult.rows[0]?.row_count || '0'),
267
+ totalSize: sizeResult.rows[0]?.total_size || 'unknown',
268
+ tableSize: sizeResult.rows[0]?.table_size || 'unknown'
269
+ },
270
+ generatedAt: new Date().toISOString()
271
+ };
272
+ }
273
+
274
+ /**
275
+ * Dynamically execute table queries
276
+ */
277
+ async function executeTableQuery(context: any, tableName: string): Promise<any> {
278
+ const { pgClient } = context;
279
+ const fullTableName = `store_${tableName}`;
280
+
281
+ try {
282
+ subscriptionLogger.debug(`Executing table query: ${fullTableName}`);
283
+
284
+ // 1. Get table information
285
+ const tableInfo = cachedTables[tableName] || (await getTableInfo(pgClient, fullTableName));
286
+
287
+ if (tableInfo.columns.length === 0) {
288
+ return {
289
+ nodes: [],
290
+ totalCount: 0,
291
+ tableName,
292
+ generatedAt: new Date().toISOString()
293
+ };
294
+ }
295
+
296
+ // 2. Build dynamic nodeId expression
297
+ const nodeIdExpression = buildNodeIdExpression(tableInfo);
298
+
299
+ // 3. Build query fields
300
+ const columnFields = tableInfo.columns
301
+ .map((col) => `'${col.columnName}', ${col.columnName}`)
302
+ .join(', ');
303
+
304
+ // 4. Build WHERE condition
305
+ const whereCondition = buildWhereCondition(tableInfo);
306
+
307
+ // 5. Execute query
308
+ const sql = `
309
+ SELECT
310
+ COALESCE(
311
+ json_agg(
312
+ json_build_object(
313
+ 'nodeId', ${nodeIdExpression},
314
+ ${columnFields}
315
+ )
316
+ ),
317
+ '[]'::json
318
+ ) as nodes,
319
+ count(*) as total_count
320
+ FROM ${fullTableName}
321
+ WHERE ${whereCondition}
322
+ `;
323
+
324
+ subscriptionLogger.debug(`Executing SQL: ${sql}`);
325
+ const result = await pgClient.query(sql);
326
+
327
+ const row = result.rows[0];
328
+ const data = {
329
+ nodes: row?.nodes || [],
330
+ totalCount: parseInt(row?.total_count || '0'),
331
+ tableName,
332
+ generatedAt: new Date().toISOString()
333
+ };
334
+
335
+ subscriptionLogger.debug(`Query result: ${fullTableName} found ${data.totalCount} records`);
336
+ return data;
337
+ } catch (error) {
338
+ subscriptionLogger.error(`Failed to query ${fullTableName}`, error);
339
+ return {
340
+ nodes: [],
341
+ totalCount: 0,
342
+ tableName,
343
+ generatedAt: new Date().toISOString(),
344
+ error: (error as Error).message
345
+ };
346
+ }
347
+ }
348
+
349
+ /**
350
+ * Dynamically build NodeId expression
351
+ */
352
+ function buildNodeIdExpression(tableInfo: TableInfo): string {
353
+ const { tableName, primaryKeys, columns } = tableInfo;
354
+
355
+ if (primaryKeys.length > 0) {
356
+ // Use primary keys to build nodeId
357
+ const keyExpression = primaryKeys
358
+ .map((key) => `COALESCE(${key}::text, 'null')`)
359
+ .join(" || ':' || ");
360
+ return `encode(('${tableName}:' || ${keyExpression})::bytea, 'base64')`;
361
+ }
362
+
363
+ // If no primary key, use first column
364
+ const firstColumn = columns[0]?.columnName || 'unknown';
365
+ return `encode(('${tableName}:' || COALESCE(${firstColumn}::text, 'unknown'))::bytea, 'base64')`;
366
+ }
367
+
368
+ /**
369
+ * Dynamically build WHERE condition - completely generic, no hardcoded field names
370
+ */
371
+ function buildWhereCondition(tableInfo: TableInfo): string {
372
+ const { primaryKeys, columns } = tableInfo;
373
+
374
+ // 1. Prioritize primary key fields as filter conditions (most reliable)
375
+ if (primaryKeys.length > 0) {
376
+ const conditions = primaryKeys.map((key) => `${key} IS NOT NULL`);
377
+ return conditions.join(' AND ');
378
+ }
379
+
380
+ // 2. If no primary key, find first non-null field (reduce empty data)
381
+ const nonNullableColumns = columns.filter((col) => !col.isNullable);
382
+ if (nonNullableColumns.length > 0) {
383
+ return `${nonNullableColumns[0].columnName} IS NOT NULL`;
384
+ }
385
+
386
+ // 3. If all fields can be null, use first field for basic filtering
387
+ if (columns.length > 0) {
388
+ return `${columns[0].columnName} IS NOT NULL`;
389
+ }
390
+
391
+ // 4. Final fallback - return all rows (no filtering)
392
+ return 'true';
393
+ }
394
+
395
+ // Removed getLatestInsertedData function, now only use listen subscriptions
396
+
397
+ // Removed getLatestInsertedDataSince function, now only use simple listen subscriptions
@@ -0,0 +1,209 @@
1
+ # Dubhe Logger System
2
+
3
+ High-performance logging system based on Pino, featuring object-oriented design for better understanding and extensibility.
4
+
5
+ ## Features
6
+
7
+ - 🚀 High-performance logging based on Pino
8
+ - 📝 Support for structured logging
9
+ - 🎨 Colorful pretty printing in development environment
10
+ - 📁 File logging support in production environment
11
+ - 🔧 Flexible configuration options
12
+ - 📦 Component-based log management
13
+ - 🛡️ TypeScript type safety
14
+
15
+ ## Basic Usage
16
+
17
+ ### 1. Using Predefined Component Loggers
18
+
19
+ ```typescript
20
+ import { dbLogger, serverLogger, systemLogger } from './utils/logger';
21
+
22
+ // Database operation logs
23
+ dbLogger.info('Database connection successful', { host: 'localhost', port: 5432 });
24
+ dbLogger.error('Query failed', new Error('Connection timeout'), { query: 'SELECT * FROM users' });
25
+
26
+ // Server logs
27
+ serverLogger.info('Server started', { port: 4000, env: 'development' });
28
+
29
+ // System logs
30
+ systemLogger.warn('High memory usage', { usage: '85%' });
31
+ ```
32
+
33
+ ### 2. Creating Custom Component Loggers
34
+
35
+ ```typescript
36
+ import { createComponentLogger } from './utils/logger';
37
+
38
+ const apiLogger = createComponentLogger('api');
39
+ const cacheLogger = createComponentLogger('cache');
40
+
41
+ apiLogger.info('API request', { method: 'GET', path: '/users', userId: 123 });
42
+ cacheLogger.debug('Cache hit', { key: 'user:123', ttl: 3600 });
43
+ ```
44
+
45
+ ### 3. Using Logger Class to Create Custom Instances
46
+
47
+ ```typescript
48
+ import { Logger } from './utils/logger';
49
+
50
+ // Create logger with custom configuration
51
+ const customLogger = new Logger({
52
+ level: 'debug',
53
+ service: 'my-service',
54
+ enableFileLogging: true,
55
+ logsDir: './custom-logs',
56
+ enablePrettyPrint: false
57
+ });
58
+
59
+ const myLogger = customLogger.createComponentLogger('my-component');
60
+ myLogger.info('Custom log message');
61
+ ```
62
+
63
+ ## Configuration Options
64
+
65
+ ```typescript
66
+ interface LoggerConfig {
67
+ level?: string; // Log level (debug|info|warn|error)
68
+ service?: string; // Service name
69
+ component?: string; // Component name
70
+ enableFileLogging?: boolean; // Enable file logging
71
+ logsDir?: string; // Log file directory
72
+ enablePrettyPrint?: boolean; // Enable colorful output
73
+ }
74
+ ```
75
+
76
+ ## Utility Functions
77
+
78
+ ### Performance Logging
79
+
80
+ ```typescript
81
+ import { logPerformance } from './utils/logger';
82
+
83
+ const startTime = Date.now();
84
+ // ... perform operations
85
+ logPerformance('Database query', startTime, { table: 'users', rows: 1000 });
86
+ ```
87
+
88
+ ### Database Operation Logging
89
+
90
+ ```typescript
91
+ import { logDatabaseOperation } from './utils/logger';
92
+
93
+ logDatabaseOperation('SELECT', 'users', { limit: 10, offset: 0 });
94
+ ```
95
+
96
+ ### WebSocket Event Logging
97
+
98
+ ```typescript
99
+ import { logWebSocketEvent } from './utils/logger';
100
+
101
+ logWebSocketEvent('client_connected', 5, { clientId: 'abc123' });
102
+ ```
103
+
104
+ ### GraphQL Query Logging
105
+
106
+ ```typescript
107
+ import { logGraphQLQuery } from './utils/logger';
108
+
109
+ logGraphQLQuery('query', 'query GetUsers { users { id name } }', { limit: 10 });
110
+ ```
111
+
112
+ ## Predefined Component Loggers
113
+
114
+ | Logger | Component | Purpose |
115
+ | -------------------- | ------------ | ---------------------- |
116
+ | `dbLogger` | database | Database operations |
117
+ | `serverLogger` | server | Server related |
118
+ | `wsLogger` | websocket | WebSocket connections |
119
+ | `gqlLogger` | graphql | GraphQL queries |
120
+ | `subscriptionLogger` | subscription | Subscription features |
121
+ | `systemLogger` | system | System level |
122
+ | `authLogger` | auth | Authentication |
123
+ | `perfLogger` | performance | Performance monitoring |
124
+
125
+ ## Environment Variables
126
+
127
+ - `LOG_LEVEL`: Set log level (debug|info|warn|error)
128
+ - `NODE_ENV`: Set environment mode, enables pretty printing in development
129
+
130
+ ## Log Format
131
+
132
+ ### Development Environment (Pretty Print)
133
+
134
+ ```
135
+ 2024-01-15 10:30:45 [INFO] dubhe-graphql-server [database]: Database connection successful {"host": "localhost", "port": 5432}
136
+ ```
137
+
138
+ ### Production Environment (JSON)
139
+
140
+ ```json
141
+ {
142
+ "level": 30,
143
+ "time": "2024-01-15T10:30:45.123Z",
144
+ "service": "dubhe-graphql-server",
145
+ "component": "database",
146
+ "msg": "Database connection successful",
147
+ "host": "localhost",
148
+ "port": 5432
149
+ }
150
+ ```
151
+
152
+ ## Advanced Usage
153
+
154
+ ### Extending Logger Class
155
+
156
+ ```typescript
157
+ import { Logger, LoggerConfig } from './utils/logger';
158
+
159
+ class CustomLogger extends Logger {
160
+ constructor(config: LoggerConfig) {
161
+ super(config);
162
+ }
163
+
164
+ // Add custom methods
165
+ public audit(action: string, userId: string, meta?: any) {
166
+ const auditLogger = this.createComponentLogger('audit');
167
+ auditLogger.info(`User action: ${action}`, {
168
+ userId,
169
+ timestamp: new Date().toISOString(),
170
+ ...meta
171
+ });
172
+ }
173
+ }
174
+
175
+ const logger = new CustomLogger({ service: 'audit-service' });
176
+ logger.audit('login', 'user123', { ip: '192.168.1.1' });
177
+ ```
178
+
179
+ ### Getting Raw Pino Instance
180
+
181
+ ```typescript
182
+ import { Logger } from './utils/logger';
183
+
184
+ const logger = new Logger();
185
+ const pinoInstance = logger.getPinoInstance();
186
+
187
+ // Use Pino API directly
188
+ pinoInstance.info({ customField: 'value' }, 'Using Pino directly');
189
+ ```
190
+
191
+ ## Migration Guide
192
+
193
+ Migrating from winston to the new Logger system:
194
+
195
+ ### Before (Winston)
196
+
197
+ ```typescript
198
+ import logger from './logger';
199
+ logger.info('Message', { meta: 'data' });
200
+ ```
201
+
202
+ ### Now (Pino + Class)
203
+
204
+ ```typescript
205
+ import { systemLogger } from './utils/logger';
206
+ systemLogger.info('Message', { meta: 'data' });
207
+ ```
208
+
209
+ Most APIs remain compatible, just need to update import paths.