@memberjunction/server 3.1.0 → 3.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/skip-sdk.d.ts.map +1 -1
- package/dist/agents/skip-sdk.js +26 -1
- package/dist/agents/skip-sdk.js.map +1 -1
- package/dist/generated/generated.d.ts +384 -57
- package/dist/generated/generated.d.ts.map +1 -1
- package/dist/generated/generated.js +12066 -9955
- package/dist/generated/generated.js.map +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +4 -0
- package/dist/index.js.map +1 -1
- package/dist/resolvers/FileResolver.d.ts +130 -1
- package/dist/resolvers/FileResolver.d.ts.map +1 -1
- package/dist/resolvers/FileResolver.js +784 -9
- package/dist/resolvers/FileResolver.js.map +1 -1
- package/dist/resolvers/RunAIAgentResolver.d.ts.map +1 -1
- package/dist/resolvers/RunAIAgentResolver.js +51 -30
- package/dist/resolvers/RunAIAgentResolver.js.map +1 -1
- package/dist/resolvers/SqlLoggingConfigResolver.d.ts +10 -0
- package/dist/resolvers/SqlLoggingConfigResolver.d.ts.map +1 -1
- package/dist/resolvers/SqlLoggingConfigResolver.js +72 -7
- package/dist/resolvers/SqlLoggingConfigResolver.js.map +1 -1
- package/dist/util.d.ts.map +1 -1
- package/dist/util.js +36 -14
- package/dist/util.js.map +1 -1
- package/package.json +45 -44
- package/src/agents/skip-sdk.ts +31 -1
- package/src/generated/generated.ts +1558 -215
- package/src/index.ts +8 -0
- package/src/resolvers/FileResolver.ts +701 -29
- package/src/resolvers/RunAIAgentResolver.ts +56 -46
- package/src/resolvers/SqlLoggingConfigResolver.ts +86 -13
- package/src/util.ts +47 -17
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { Resolver, Mutation, Query, Arg, Ctx, ObjectType, Field, PubSub, PubSubEngine, Subscription, Root, ResolverFilterData, ID, Int } from 'type-graphql';
|
|
2
2
|
import { AppContext, UserPayload } from '../types.js';
|
|
3
3
|
import { DatabaseProviderBase, LogError, LogStatus, Metadata, RunView, UserInfo } from '@memberjunction/core';
|
|
4
|
-
import { ConversationDetailEntity, ConversationDetailAttachmentEntity
|
|
4
|
+
import { ConversationDetailEntity, ConversationDetailAttachmentEntity } from '@memberjunction/core-entities';
|
|
5
5
|
import { AgentRunner } from '@memberjunction/ai-agents';
|
|
6
6
|
import { AIAgentEntityExtended, AIAgentRunEntityExtended, ExecuteAgentResult, ConversationUtility, AttachmentData } from '@memberjunction/ai-core-plus';
|
|
7
7
|
import { AIEngine } from '@memberjunction/aiengine';
|
|
@@ -12,6 +12,7 @@ import { RequireSystemUser } from '../directives/RequireSystemUser.js';
|
|
|
12
12
|
import { GetReadWriteProvider } from '../util.js';
|
|
13
13
|
import { SafeJSONParse } from '@memberjunction/global';
|
|
14
14
|
import { getAttachmentService } from '@memberjunction/aiengine';
|
|
15
|
+
import { NotificationEngine } from '@memberjunction/notifications';
|
|
15
16
|
|
|
16
17
|
@ObjectType()
|
|
17
18
|
export class AIAgentRunResult {
|
|
@@ -650,55 +651,64 @@ export class RunAIAgentResolver extends ResolverBase {
|
|
|
650
651
|
throw new Error(`Failed to load conversation detail ${conversationDetailId}`);
|
|
651
652
|
}
|
|
652
653
|
|
|
653
|
-
//
|
|
654
|
-
const
|
|
655
|
-
|
|
656
|
-
contextUser
|
|
657
|
-
);
|
|
658
|
-
|
|
659
|
-
notification.UserID = contextUser.ID;
|
|
660
|
-
notification.Title = `${agentName} completed your request`;
|
|
654
|
+
// Build conversation URL for email/SMS templates
|
|
655
|
+
const baseUrl = process.env.APP_BASE_URL || 'http://localhost:4201';
|
|
656
|
+
const conversationUrl = `${baseUrl}/conversations/${detail.ConversationID}?artifact=${artifactInfo.artifactId}`;
|
|
661
657
|
|
|
662
658
|
// Craft message based on versioning
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
659
|
+
const message = artifactInfo.versionNumber > 1
|
|
660
|
+
? `${agentName} has finished processing and created version ${artifactInfo.versionNumber}`
|
|
661
|
+
: `${agentName} has finished processing and created a new artifact`;
|
|
662
|
+
|
|
663
|
+
// Use unified notification engine (Config called to ensure loaded)
|
|
664
|
+
const notificationEngine = NotificationEngine.Instance;
|
|
665
|
+
await notificationEngine.Config(false, contextUser);
|
|
666
|
+
const result = await notificationEngine.SendNotification({
|
|
667
|
+
userId: contextUser.ID,
|
|
668
|
+
typeNameOrId: 'Agent Completion',
|
|
669
|
+
title: `${agentName} completed your request`,
|
|
670
|
+
message: message,
|
|
671
|
+
resourceConfiguration: {
|
|
672
|
+
type: 'conversation',
|
|
673
|
+
conversationId: detail.ConversationID,
|
|
674
|
+
messageId: conversationDetailId,
|
|
675
|
+
artifactId: artifactInfo.artifactId,
|
|
676
|
+
versionId: artifactInfo.versionId,
|
|
677
|
+
versionNumber: artifactInfo.versionNumber
|
|
678
|
+
},
|
|
679
|
+
templateData: {
|
|
680
|
+
agentName: agentName,
|
|
681
|
+
artifactTitle: artifactInfo.artifactId,
|
|
682
|
+
conversationUrl: conversationUrl,
|
|
683
|
+
versionNumber: artifactInfo.versionNumber > 1 ? artifactInfo.versionNumber : undefined
|
|
684
|
+
}
|
|
685
|
+
}, contextUser);
|
|
686
|
+
|
|
687
|
+
if (result.success && result.inAppNotificationId) {
|
|
688
|
+
const channels = [];
|
|
689
|
+
if (result.deliveryChannels.inApp) channels.push('InApp');
|
|
690
|
+
if (result.deliveryChannels.email) channels.push('Email');
|
|
691
|
+
if (result.deliveryChannels.sms) channels.push('SMS');
|
|
692
|
+
const channelList = channels.length > 0 ? channels.join(', ') : 'None';
|
|
693
|
+
LogStatus(`📬 Notification sent via ${channelList} (ID: ${result.inAppNotificationId})`);
|
|
694
|
+
|
|
695
|
+
// Publish real-time notification event so client updates immediately
|
|
696
|
+
pubSub.publish(PUSH_STATUS_UPDATES_TOPIC, {
|
|
697
|
+
userPayload: JSON.stringify(userPayload),
|
|
698
|
+
message: JSON.stringify({
|
|
699
|
+
type: 'notification',
|
|
700
|
+
notificationId: result.inAppNotificationId,
|
|
701
|
+
action: 'create',
|
|
702
|
+
title: `${agentName} completed your request`,
|
|
703
|
+
message: message
|
|
704
|
+
})
|
|
705
|
+
});
|
|
706
|
+
|
|
707
|
+
LogStatus(`📡 Published notification event to client`);
|
|
708
|
+
} else if (!result.success) {
|
|
709
|
+
LogError(`Notification failed: ${result.errors?.join(', ')}`);
|
|
684
710
|
}
|
|
685
711
|
|
|
686
|
-
LogStatus(`📬 Created notification ${notification.ID} for user ${contextUser.ID}`);
|
|
687
|
-
|
|
688
|
-
// Publish real-time notification event so client updates immediately
|
|
689
|
-
pubSub.publish(PUSH_STATUS_UPDATES_TOPIC, {
|
|
690
|
-
userPayload: JSON.stringify(userPayload),
|
|
691
|
-
message: JSON.stringify({
|
|
692
|
-
type: 'notification',
|
|
693
|
-
notificationId: notification.ID,
|
|
694
|
-
action: 'create',
|
|
695
|
-
title: notification.Title,
|
|
696
|
-
message: notification.Message
|
|
697
|
-
})
|
|
698
|
-
});
|
|
699
|
-
|
|
700
|
-
LogStatus(`📡 Published notification event to client`);
|
|
701
|
-
|
|
702
712
|
} catch (error) {
|
|
703
713
|
LogError(`Failed to create completion notification: ${(error as Error).message}`);
|
|
704
714
|
// Don't throw - notification failure shouldn't fail the agent run
|
|
@@ -48,6 +48,22 @@ export class SqlLoggingOptions {
|
|
|
48
48
|
@Field(() => String, { nullable: true })
|
|
49
49
|
filterByUserId?: string;
|
|
50
50
|
|
|
51
|
+
/** Array of regex/wildcard patterns to filter SQL statements */
|
|
52
|
+
@Field(() => [String], { nullable: true })
|
|
53
|
+
filterPatterns?: string[];
|
|
54
|
+
|
|
55
|
+
/** How to apply patterns: 'include' or 'exclude' (default: exclude) */
|
|
56
|
+
@Field(() => String, { nullable: true })
|
|
57
|
+
filterType?: 'include' | 'exclude';
|
|
58
|
+
|
|
59
|
+
/** Whether to output verbose debug information to console */
|
|
60
|
+
@Field(() => Boolean, { nullable: true })
|
|
61
|
+
verboseOutput?: boolean;
|
|
62
|
+
|
|
63
|
+
/** Default schema name for Flyway migration placeholder replacement */
|
|
64
|
+
@Field(() => String, { nullable: true })
|
|
65
|
+
defaultSchemaName?: string;
|
|
66
|
+
|
|
51
67
|
/** Human-readable name for the logging session */
|
|
52
68
|
@Field(() => String, { nullable: true })
|
|
53
69
|
sessionName?: string;
|
|
@@ -118,6 +134,22 @@ export class SqlLoggingOptionsInput {
|
|
|
118
134
|
@Field(() => String, { nullable: true })
|
|
119
135
|
filterByUserId?: string;
|
|
120
136
|
|
|
137
|
+
/** Array of regex/wildcard patterns to filter SQL statements */
|
|
138
|
+
@Field(() => [String], { nullable: true })
|
|
139
|
+
filterPatterns?: string[];
|
|
140
|
+
|
|
141
|
+
/** How to apply patterns: 'include' or 'exclude' (default: exclude) */
|
|
142
|
+
@Field(() => String, { nullable: true })
|
|
143
|
+
filterType?: 'include' | 'exclude';
|
|
144
|
+
|
|
145
|
+
/** Whether to output verbose debug information to console */
|
|
146
|
+
@Field(() => Boolean, { nullable: true })
|
|
147
|
+
verboseOutput?: boolean;
|
|
148
|
+
|
|
149
|
+
/** Default schema name for Flyway migration placeholder replacement */
|
|
150
|
+
@Field(() => String, { nullable: true })
|
|
151
|
+
defaultSchemaName?: string;
|
|
152
|
+
|
|
121
153
|
/** Human-readable name for the logging session */
|
|
122
154
|
@Field(() => String, { nullable: true })
|
|
123
155
|
sessionName?: string;
|
|
@@ -201,6 +233,9 @@ export class SqlLoggingConfig {
|
|
|
201
233
|
export class SqlLoggingConfigResolver extends ResolverBase {
|
|
202
234
|
/** Default prefix for auto-generated SQL log filenames */
|
|
203
235
|
private static readonly LOG_FILE_PREFIX = 'sql-log-';
|
|
236
|
+
|
|
237
|
+
/** Track active session timeouts for proper cleanup when sessions are manually stopped */
|
|
238
|
+
private static sessionTimeouts = new Map<string, NodeJS.Timeout>();
|
|
204
239
|
|
|
205
240
|
/**
|
|
206
241
|
* Validates that the current user has Owner-level privileges required for SQL logging operations.
|
|
@@ -343,7 +378,7 @@ export class SqlLoggingConfigResolver extends ResolverBase {
|
|
|
343
378
|
filePath: session.filePath,
|
|
344
379
|
startTime: session.startTime,
|
|
345
380
|
statementCount: session.statementCount,
|
|
346
|
-
options: session.options,
|
|
381
|
+
options: this.convertOptionsToGraphQL(session.options),
|
|
347
382
|
sessionName: session.options.sessionName,
|
|
348
383
|
filterByUserId: session.options.filterByUserId
|
|
349
384
|
}));
|
|
@@ -409,7 +444,7 @@ export class SqlLoggingConfigResolver extends ResolverBase {
|
|
|
409
444
|
// Prepare file path
|
|
410
445
|
const allowedDir = path.resolve(config.sqlLogging.allowedLogDirectory ?? './logs/sql');
|
|
411
446
|
await this.ensureDirectoryExists(allowedDir);
|
|
412
|
-
|
|
447
|
+
|
|
413
448
|
const fileName = input.fileName || `${SqlLoggingConfigResolver.LOG_FILE_PREFIX}${new Date().toISOString().replace(/[:.]/g, '-')}.sql`;
|
|
414
449
|
const filePath = path.join(allowedDir, fileName);
|
|
415
450
|
|
|
@@ -432,15 +467,20 @@ export class SqlLoggingConfigResolver extends ResolverBase {
|
|
|
432
467
|
// Create the logging session
|
|
433
468
|
const session = await provider.CreateSqlLogger(filePath, sessionOptions);
|
|
434
469
|
|
|
435
|
-
// Set up auto-cleanup after timeout
|
|
470
|
+
// Set up auto-cleanup after timeout with proper tracking
|
|
436
471
|
if (config.sqlLogging.sessionTimeout > 0) {
|
|
437
|
-
setTimeout(async () => {
|
|
472
|
+
const timeoutId = setTimeout(async () => {
|
|
438
473
|
try {
|
|
439
474
|
await session.dispose();
|
|
475
|
+
SqlLoggingConfigResolver.sessionTimeouts.delete(session.id);
|
|
440
476
|
} catch (e) {
|
|
441
|
-
// Session might already be disposed
|
|
477
|
+
// Session might already be disposed - log for debugging
|
|
478
|
+
console.warn(`Auto-cleanup failed for SQL logging session ${session.id}:`, e);
|
|
442
479
|
}
|
|
443
480
|
}, config.sqlLogging.sessionTimeout);
|
|
481
|
+
|
|
482
|
+
// Track the timeout so we can cancel it if session is manually stopped
|
|
483
|
+
SqlLoggingConfigResolver.sessionTimeouts.set(session.id, timeoutId);
|
|
444
484
|
}
|
|
445
485
|
|
|
446
486
|
return {
|
|
@@ -448,9 +488,9 @@ export class SqlLoggingConfigResolver extends ResolverBase {
|
|
|
448
488
|
filePath: session.filePath,
|
|
449
489
|
startTime: session.startTime,
|
|
450
490
|
statementCount: session.statementCount,
|
|
451
|
-
options: session.options,
|
|
491
|
+
options: this.convertOptionsToGraphQL(session.options),
|
|
452
492
|
sessionName: session.options.sessionName,
|
|
453
|
-
filterByUserId: session.options.filterByUserId
|
|
493
|
+
filterByUserId: session.options.filterByUserId,
|
|
454
494
|
};
|
|
455
495
|
}
|
|
456
496
|
|
|
@@ -482,15 +522,21 @@ export class SqlLoggingConfigResolver extends ResolverBase {
|
|
|
482
522
|
): Promise<boolean> {
|
|
483
523
|
await this.checkOwnerAccess(context);
|
|
484
524
|
const provider = GetReadOnlyProvider(context.providers, {allowFallbackToReadWrite: true}) as SQLServerDataProvider;
|
|
485
|
-
|
|
486
|
-
//
|
|
487
|
-
const
|
|
488
|
-
|
|
489
|
-
|
|
525
|
+
|
|
526
|
+
// Use the public method to get and dispose the session
|
|
527
|
+
const session = provider.GetSqlLoggingSessionById(sessionId);
|
|
528
|
+
|
|
490
529
|
if (!session) {
|
|
491
530
|
throw new Error(`SQL logging session ${sessionId} not found`);
|
|
492
531
|
}
|
|
493
532
|
|
|
533
|
+
// Clear any scheduled timeout for this session
|
|
534
|
+
const timeoutId = SqlLoggingConfigResolver.sessionTimeouts.get(sessionId);
|
|
535
|
+
if (timeoutId) {
|
|
536
|
+
clearTimeout(timeoutId);
|
|
537
|
+
SqlLoggingConfigResolver.sessionTimeouts.delete(sessionId);
|
|
538
|
+
}
|
|
539
|
+
|
|
494
540
|
await session.dispose();
|
|
495
541
|
return true;
|
|
496
542
|
}
|
|
@@ -610,7 +656,7 @@ export class SqlLoggingConfigResolver extends ResolverBase {
|
|
|
610
656
|
// Find the session
|
|
611
657
|
const provider = GetReadOnlyProvider(context.providers, {allowFallbackToReadWrite: true}) as SQLServerDataProvider;
|
|
612
658
|
const sessions = provider.GetActiveSqlLoggingSessions();
|
|
613
|
-
const session = sessions.find(s => s.id === sessionId);
|
|
659
|
+
const session = sessions.find((s) => s.id === sessionId);
|
|
614
660
|
|
|
615
661
|
if (!session) {
|
|
616
662
|
throw new Error(`SQL logging session ${sessionId} not found`);
|
|
@@ -689,4 +735,31 @@ export class SqlLoggingConfigResolver extends ResolverBase {
|
|
|
689
735
|
await fs.mkdir(dir, { recursive: true });
|
|
690
736
|
}
|
|
691
737
|
}
|
|
738
|
+
|
|
739
|
+
/**
|
|
740
|
+
* Converts SqlLoggingOptions from the provider to the GraphQL-compatible type.
|
|
741
|
+
* The provider's filterPatterns can contain RegExp objects, but GraphQL only supports strings.
|
|
742
|
+
*
|
|
743
|
+
* @param options - Options from SQLServerDataProvider
|
|
744
|
+
* @returns GraphQL-compatible SqlLoggingOptions
|
|
745
|
+
* @private
|
|
746
|
+
*/
|
|
747
|
+
private convertOptionsToGraphQL(options: import('@memberjunction/sqlserver-dataprovider').SqlLoggingOptions): SqlLoggingOptions {
|
|
748
|
+
return {
|
|
749
|
+
formatAsMigration: options.formatAsMigration,
|
|
750
|
+
description: options.description,
|
|
751
|
+
statementTypes: options.statementTypes,
|
|
752
|
+
batchSeparator: options.batchSeparator,
|
|
753
|
+
prettyPrint: options.prettyPrint,
|
|
754
|
+
logRecordChangeMetadata: options.logRecordChangeMetadata,
|
|
755
|
+
retainEmptyLogFiles: options.retainEmptyLogFiles,
|
|
756
|
+
filterByUserId: options.filterByUserId,
|
|
757
|
+
sessionName: options.sessionName,
|
|
758
|
+
verboseOutput: options.verboseOutput,
|
|
759
|
+
defaultSchemaName: options.defaultSchemaName,
|
|
760
|
+
// Convert RegExp objects to their string representation
|
|
761
|
+
filterPatterns: options.filterPatterns?.map((p) => (p instanceof RegExp ? p.toString() : String(p))),
|
|
762
|
+
filterType: options.filterType,
|
|
763
|
+
};
|
|
764
|
+
}
|
|
692
765
|
}
|
package/src/util.ts
CHANGED
|
@@ -60,7 +60,27 @@ export async function sendPostRequest(url: string, payload: any, useCompression:
|
|
|
60
60
|
const req = request(options, (res) => {
|
|
61
61
|
const gunzip = createGunzip();
|
|
62
62
|
const stream = res.headers['content-encoding'] === 'gzip' ? res.pipe(gunzip) : res;
|
|
63
|
-
|
|
63
|
+
let streamEnded = false;
|
|
64
|
+
|
|
65
|
+
const handleStreamEnd = () => {
|
|
66
|
+
if (streamEnded) return; // Prevent double-resolution
|
|
67
|
+
streamEnded = true;
|
|
68
|
+
|
|
69
|
+
// Attempt to parse any remaining data in buffer in case it's a complete JSON object
|
|
70
|
+
if (buffer.trim()) {
|
|
71
|
+
try {
|
|
72
|
+
const jsonObject = JSON.parse(buffer.trim());
|
|
73
|
+
jsonObjects.push(jsonObject);
|
|
74
|
+
streamCallback?.(jsonObject);
|
|
75
|
+
} catch (e) {
|
|
76
|
+
const err = z.object({ message: z.string() }).safeParse(e);
|
|
77
|
+
// Handle JSON parse error for the last chunk
|
|
78
|
+
console.warn(`Error in postRequest().stream(end) while parsing JSON object: ${err.success ? err.data.message : e}`);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
resolve(jsonObjects);
|
|
82
|
+
};
|
|
83
|
+
|
|
64
84
|
stream.on('data', (chunk) => {
|
|
65
85
|
buffer += chunk;
|
|
66
86
|
let boundary;
|
|
@@ -78,28 +98,38 @@ export async function sendPostRequest(url: string, payload: any, useCompression:
|
|
|
78
98
|
}
|
|
79
99
|
}
|
|
80
100
|
});
|
|
81
|
-
|
|
82
|
-
stream.on('end',
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
101
|
+
|
|
102
|
+
stream.on('end', handleStreamEnd);
|
|
103
|
+
|
|
104
|
+
// Handle premature connection close (e.g., server crashes mid-response)
|
|
105
|
+
stream.on('close', () => {
|
|
106
|
+
if (!streamEnded) {
|
|
107
|
+
console.warn(`Stream closed prematurely for ${url}`);
|
|
108
|
+
handleStreamEnd();
|
|
109
|
+
}
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
// Handle stream errors (decompression failures, etc.)
|
|
113
|
+
stream.on('error', (e) => {
|
|
114
|
+
if (!streamEnded) {
|
|
115
|
+
console.error(`Stream error for ${url}:`, e);
|
|
116
|
+
reject(new Error(`Stream error: ${e.message}`));
|
|
94
117
|
}
|
|
95
|
-
resolve(jsonObjects);
|
|
96
118
|
});
|
|
97
119
|
});
|
|
98
120
|
|
|
99
121
|
req.on('error', (e) => {
|
|
100
122
|
const err = z.object({ message: z.string() }).safeParse(e);
|
|
101
|
-
|
|
102
|
-
|
|
123
|
+
const errorMessage = err.success ? err.data.message : String(e);
|
|
124
|
+
console.error(`Error in sendPostRequest().req.on(error) for ${hostname}:${port}${pathname}: ${errorMessage}`);
|
|
125
|
+
|
|
126
|
+
// Create a more informative error for the rejection
|
|
127
|
+
const contextualError = new Error(`HTTP request failed to ${url}: ${errorMessage}`);
|
|
128
|
+
// Preserve the original error as the cause
|
|
129
|
+
if (e instanceof Error) {
|
|
130
|
+
(contextualError as any).cause = e;
|
|
131
|
+
}
|
|
132
|
+
reject(contextualError);
|
|
103
133
|
});
|
|
104
134
|
|
|
105
135
|
req.write(data);
|