@memberjunction/server 5.27.1 → 5.29.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/dist/config.d.ts +151 -0
  2. package/dist/config.d.ts.map +1 -1
  3. package/dist/config.js +15 -0
  4. package/dist/config.js.map +1 -1
  5. package/dist/generated/generated.d.ts +959 -5
  6. package/dist/generated/generated.d.ts.map +1 -1
  7. package/dist/generated/generated.js +4639 -280
  8. package/dist/generated/generated.js.map +1 -1
  9. package/dist/generic/ResolverBase.d.ts +14 -0
  10. package/dist/generic/ResolverBase.d.ts.map +1 -1
  11. package/dist/generic/ResolverBase.js +37 -3
  12. package/dist/generic/ResolverBase.js.map +1 -1
  13. package/dist/generic/RestoreContextInput.d.ts +27 -0
  14. package/dist/generic/RestoreContextInput.d.ts.map +1 -0
  15. package/dist/generic/RestoreContextInput.js +39 -0
  16. package/dist/generic/RestoreContextInput.js.map +1 -0
  17. package/dist/index.d.ts +2 -0
  18. package/dist/index.d.ts.map +1 -1
  19. package/dist/index.js +21 -4
  20. package/dist/index.js.map +1 -1
  21. package/dist/resolvers/FeedbackResolver.d.ts +150 -0
  22. package/dist/resolvers/FeedbackResolver.d.ts.map +1 -0
  23. package/dist/resolvers/FeedbackResolver.js +876 -0
  24. package/dist/resolvers/FeedbackResolver.js.map +1 -0
  25. package/dist/resolvers/FileResolver.d.ts +27 -0
  26. package/dist/resolvers/FileResolver.d.ts.map +1 -1
  27. package/dist/resolvers/FileResolver.js +32 -3
  28. package/dist/resolvers/FileResolver.js.map +1 -1
  29. package/dist/resolvers/IntegrationDiscoveryResolver.d.ts +18 -1
  30. package/dist/resolvers/IntegrationDiscoveryResolver.d.ts.map +1 -1
  31. package/dist/resolvers/IntegrationDiscoveryResolver.js +247 -22
  32. package/dist/resolvers/IntegrationDiscoveryResolver.js.map +1 -1
  33. package/dist/resolvers/MCPResolver.d.ts +77 -0
  34. package/dist/resolvers/MCPResolver.d.ts.map +1 -1
  35. package/dist/resolvers/MCPResolver.js +300 -1
  36. package/dist/resolvers/MCPResolver.js.map +1 -1
  37. package/dist/resolvers/RunAIAgentResolver.d.ts.map +1 -1
  38. package/dist/resolvers/RunAIAgentResolver.js +87 -32
  39. package/dist/resolvers/RunAIAgentResolver.js.map +1 -1
  40. package/package.json +68 -66
  41. package/src/config.ts +19 -0
  42. package/src/generated/generated.ts +3430 -281
  43. package/src/generic/ResolverBase.ts +41 -4
  44. package/src/generic/RestoreContextInput.ts +32 -0
  45. package/src/index.ts +22 -5
  46. package/src/resolvers/FeedbackResolver.ts +940 -0
  47. package/src/resolvers/FileResolver.ts +33 -4
  48. package/src/resolvers/IntegrationDiscoveryResolver.ts +224 -20
  49. package/src/resolvers/MCPResolver.ts +297 -1
  50. package/src/resolvers/RunAIAgentResolver.ts +89 -32
@@ -368,9 +368,21 @@ export class FileResolver extends FileResolverBase {
368
368
  return { account, provider };
369
369
  }
370
370
 
371
+ /**
372
+ * Legacy file upload path — used by the `<mj-files-file-upload>` Angular component.
373
+ * Creates a File entity record in the database AND generates a pre-authenticated upload URL.
374
+ * The client then PUTs the file binary directly to that URL.
375
+ *
376
+ * Driver initialization: uses `buildUserContext()` (no storage account). The driver
377
+ * initializes from environment variables (e.g. STORAGE_AZURE_ACCOUNT_NAME, STORAGE_DROPBOX_ACCESS_TOKEN).
378
+ *
379
+ * Input: `ProviderID` identifies which storage provider to use.
380
+ * Returns: `{ File, UploadUrl, NameExists }` — the persisted File record, upload URL, and duplicate check.
381
+ *
382
+ * @see CreatePreAuthUploadUrl for the enterprise storage-account-based path (used by File Browser).
383
+ */
371
384
  @Mutation(() => CreateFilePayload)
372
385
  async CreateFile(@Arg('input', () => CreateMJFileInput) input: CreateMJFileInput, @Ctx() context: AppContext, @PubSub() pubSub: PubSubEngine) {
373
- // Check to see if there's already an object with that name
374
386
  const provider = GetReadOnlyProvider(context.providers, { allowFallbackToReadWrite: true });
375
387
  const user = this.GetUserFromPayload(context.userPayload);
376
388
  const fileEntity = await provider.GetEntityObject<MJFileEntity>('MJ: Files', user);
@@ -389,12 +401,15 @@ export class FileResolver extends FileResolverBase {
389
401
 
390
402
  // Create the upload URL and get the record updates (provider key, content type, etc)
391
403
  const userContext = this.buildUserContext(context);
392
- const { updatedInput, UploadUrl } = await createUploadUrl(providerEntity, fileEntity, userContext);
404
+ const { updatedInput, UploadUrl } = await createUploadUrl(providerEntity, fileEntity as unknown as { ID: string; Name: string; ProviderID: string; ContentType?: string; ProviderKey?: string }, userContext);
393
405
 
394
406
  // Save the file record with the updated input
395
407
  const mapper = new FieldMapper();
396
- fileEntity.SetMany(mapper.ReverseMapFields({ ...updatedInput }), true, true);
397
- await fileEntity.Save();
408
+ fileEntity.SetMany(mapper.ReverseMapFields({ ...updatedInput }), true, false);
409
+ const saved = await fileEntity.Save();
410
+ if (!saved) {
411
+ console.error('[CreateFile] File save failed:', fileEntity.LatestResult?.CompleteMessage);
412
+ }
398
413
  const File = mapper.MapFields({ ...fileEntity.GetAll() });
399
414
 
400
415
  return { File, UploadUrl, NameExists };
@@ -534,6 +549,20 @@ export class FileResolver extends FileResolverBase {
534
549
  return downloadUrl;
535
550
  }
536
551
 
552
+ /**
553
+ * Enterprise file upload path — used by the File Browser UI.
554
+ * Generates a pre-authenticated upload URL only (does NOT create a File entity record).
555
+ * The client handles the upload directly to the storage provider via the returned URL.
556
+ *
557
+ * Driver initialization: uses `buildExtendedUserContext()` with a FileStorageAccount entity.
558
+ * Credentials are loaded from the Credential Engine (encrypted in the database), with
559
+ * automatic token refresh for OAuth providers like Dropbox and Box.com.
560
+ *
561
+ * Input: `AccountID` identifies which storage account (and its linked provider/credentials) to use.
562
+ * Returns: `{ UploadUrl, ProviderKey }` — the pre-authenticated URL and optional provider key.
563
+ *
564
+ * @see CreateFile for the legacy path that also creates a File entity record.
565
+ */
537
566
  @Mutation(() => CreatePreAuthUploadUrlPayload)
538
567
  async CreatePreAuthUploadUrl(@Arg('input', () => CreatePreAuthUploadUrlInput) input: CreatePreAuthUploadUrlInput, @Ctx() context: AppContext) {
539
568
  const md = GetReadOnlyProvider(context.providers, { allowFallbackToReadWrite: true });
@@ -1,5 +1,5 @@
1
1
  import { Resolver, Query, Mutation, Arg, Ctx, ObjectType, Field, InputType } from "type-graphql";
2
- import { CompositeKey, Metadata, RunView, UserInfo, LogError } from "@memberjunction/core";
2
+ import { CompositeKey, LocalCacheManager, Metadata, RunView, UserInfo, LogError } from "@memberjunction/core";
3
3
  import { CronExpressionHelper } from "@memberjunction/scheduling-engine";
4
4
  import {
5
5
  MJCompanyIntegrationEntity,
@@ -24,7 +24,8 @@ import {
24
24
  ConnectionTestResult,
25
25
  IntegrationEngine,
26
26
  IntegrationSyncOptions,
27
- SourceSchemaInfo
27
+ SourceSchemaInfo,
28
+ IntegrationSchemaSync
28
29
  } from "@memberjunction/integration-engine";
29
30
  import {
30
31
  SchemaBuilder,
@@ -87,6 +88,7 @@ class ApplyAllInput {
87
88
  @Field(() => Boolean, { nullable: true, defaultValue: true, description: 'If false, skips the sync step after schema + entity maps are created' }) StartSync?: boolean;
88
89
  @Field(() => Boolean, { nullable: true, defaultValue: false, description: 'If true, ignores watermarks and does a full re-fetch' }) FullSync?: boolean;
89
90
  @Field({ nullable: true, defaultValue: 'created', description: 'Sync scope: "created" = only newly created entity maps, "all" = all maps for the connector' }) SyncScope?: string;
91
+ @Field({ nullable: true, defaultValue: 'Pull', description: 'SyncDirection applied to all created entity maps: Pull | Push | Bidirectional. Defaults to Pull.' }) DefaultSyncDirection?: string;
90
92
  }
91
93
 
92
94
  @ObjectType()
@@ -137,6 +139,7 @@ class ApplyAllBatchConnectorInput {
137
139
  /** Optional per-connector schedule. Applied on success. */
138
140
  @Field({ nullable: true }) CronExpression?: string;
139
141
  @Field({ nullable: true }) ScheduleTimezone?: string;
142
+ @Field({ nullable: true, defaultValue: 'Pull', description: 'SyncDirection applied to all created entity maps for this connector: Pull | Push | Bidirectional. Defaults to Pull.' }) DefaultSyncDirection?: string;
140
143
  }
141
144
 
142
145
  @InputType()
@@ -145,6 +148,8 @@ class ApplyAllBatchInput {
145
148
  @Field(() => Boolean, { nullable: true, defaultValue: true, description: 'If false, skips sync after schema + entity maps' }) StartSync?: boolean;
146
149
  @Field(() => Boolean, { nullable: true, defaultValue: false, description: 'If true, ignores watermarks and does a full re-fetch' }) FullSync?: boolean;
147
150
  @Field({ nullable: true, defaultValue: 'created', description: 'Sync scope: "created" = only newly created entity maps, "all" = all maps for the connector' }) SyncScope?: string;
151
+ @Field({ nullable: true, description: 'Override sync direction for the initial sync: Pull | Push | Bidirectional. Defaults to entity map SyncDirection.' }) SyncDirection?: string;
152
+ @Field({ nullable: true, description: 'Override sync direction stored in the created schedule: Pull | Push | Bidirectional.' }) ScheduleSyncDirection?: string;
148
153
  }
149
154
 
150
155
  @ObjectType()
@@ -508,6 +513,14 @@ class StartSyncOutput {
508
513
  @Field({ nullable: true }) RunID?: string;
509
514
  }
510
515
 
516
+ @ObjectType()
517
+ class WriteRecordOutput {
518
+ @Field() Success: boolean;
519
+ @Field() Message: string;
520
+ @Field({ nullable: true }) ExternalID?: string;
521
+ @Field({ nullable: true }) StatusCode?: number;
522
+ }
523
+
511
524
  @InputType()
512
525
  class CreateScheduleInput {
513
526
  @Field() CompanyIntegrationID: string;
@@ -515,6 +528,8 @@ class CreateScheduleInput {
515
528
  @Field() CronExpression: string;
516
529
  @Field({ nullable: true }) Timezone?: string;
517
530
  @Field({ nullable: true }) Description?: string;
531
+ @Field({ nullable: true }) SyncDirection?: string;
532
+ @Field({ nullable: true }) FullSync?: boolean;
518
533
  }
519
534
 
520
535
  @ObjectType()
@@ -1008,15 +1023,23 @@ export class IntegrationDiscoveryResolver extends ResolverBase {
1008
1023
  // but the connector's GetIntegrationObjects() always has them.
1009
1024
  const connectorDescriptions = this.buildDescriptionLookup(connector);
1010
1025
 
1011
- return objects.map(obj => {
1026
+ const results: TargetTableConfig[] = [];
1027
+ for (const obj of objects) {
1012
1028
  const sourceObj = sourceSchema.Objects.find(o => o.ExternalName.toLowerCase() === obj.SourceObjectName.toLowerCase());
1013
1029
  const objDescriptions = connectorDescriptions.get(obj.SourceObjectName.toLowerCase());
1014
1030
 
1031
+ // If the object wasn't discovered in IntrospectSchema (e.g. API error), skip it
1032
+ // rather than generating a broken table with no columns and a fallback PK.
1033
+ if (!sourceObj) {
1034
+ LogError(`[buildTargetConfigs] Skipping "${obj.SourceObjectName}" — not found in source schema (IntrospectSchema may have failed for this object)`);
1035
+ continue;
1036
+ }
1037
+
1015
1038
  // Filter fields if caller specified a subset
1016
1039
  const selectedFieldSet = obj.Fields?.length
1017
1040
  ? new Set(obj.Fields.map(f => f.toLowerCase()))
1018
1041
  : null;
1019
- const sourceFields = (sourceObj?.Fields ?? []).filter(f =>
1042
+ const sourceFields = sourceObj.Fields.filter(f =>
1020
1043
  !selectedFieldSet || selectedFieldSet.has(f.Name.toLowerCase()) || f.IsPrimaryKey
1021
1044
  );
1022
1045
 
@@ -1032,21 +1055,37 @@ export class IntegrationDiscoveryResolver extends ResolverBase {
1032
1055
  Description: f.Description ?? objDescriptions?.fields.get(f.Name.toLowerCase()),
1033
1056
  }));
1034
1057
 
1035
- const primaryKeyFields = (sourceObj?.Fields ?? [])
1058
+ const primaryKeyFields = sourceObj.Fields
1036
1059
  .filter(f => f.IsPrimaryKey)
1037
1060
  .map(f => f.Name.replace(/[^A-Za-z0-9_]/g, '_'));
1038
1061
 
1039
- return {
1062
+ // If no columns were discovered, skip rather than generating a broken table
1063
+ // (DDL with UNIQUE ([ID]) on a non-existent column will always fail).
1064
+ if (columns.length === 0 && primaryKeyFields.length === 0) {
1065
+ LogError(`[buildTargetConfigs] Skipping "${obj.SourceObjectName}" — 0 fields discovered (live API likely failed and no DB-cached fields available)`);
1066
+ continue;
1067
+ }
1068
+
1069
+ // If columns exist but no PK was found, log diagnostic info and skip rather than
1070
+ // generating broken DDL with UNIQUE ([ID]) on a non-existent column.
1071
+ if (primaryKeyFields.length === 0 && columns.length > 0) {
1072
+ const fieldNames = sourceObj.Fields.map(f => `${f.Name}(pk=${f.IsPrimaryKey})`).join(', ');
1073
+ LogError(`[buildTargetConfigs] Skipping "${obj.SourceObjectName}" — ${columns.length} columns but NO primary key field found. Fields: [${fieldNames}]`);
1074
+ continue;
1075
+ }
1076
+
1077
+ results.push({
1040
1078
  SourceObjectName: obj.SourceObjectName,
1041
1079
  SchemaName: obj.SchemaName,
1042
1080
  TableName: obj.TableName,
1043
1081
  EntityName: obj.EntityName,
1044
- Description: sourceObj?.Description ?? objDescriptions?.objectDescription,
1082
+ Description: sourceObj.Description ?? objDescriptions?.objectDescription,
1045
1083
  Columns: columns,
1046
- PrimaryKeyFields: primaryKeyFields.length > 0 ? primaryKeyFields : ['ID'],
1084
+ PrimaryKeyFields: primaryKeyFields,
1047
1085
  SoftForeignKeys: []
1048
- };
1049
- });
1086
+ });
1087
+ }
1088
+ return results;
1050
1089
  }
1051
1090
 
1052
1091
  /** Builds a lookup of object name → { objectDescription, fields: fieldName → description } from the connector's static metadata. */
@@ -1818,9 +1857,73 @@ export class IntegrationDiscoveryResolver extends ResolverBase {
1818
1857
  const { connector, companyIntegration } = await this.resolveConnector(input.CompanyIntegrationID, user);
1819
1858
  const schemaName = this.deriveSchemaName(companyIntegration.Integration);
1820
1859
 
1821
- // Step 2: Resolve object IDs to names, build inputs with per-object Fields
1860
+ // Step 1b: Ensure IntegrationEngine cache is populated so IntrospectSchema's
1861
+ // DB fallback (GetCachedObject/GetCachedFields) can find IntegrationObject records
1862
+ await IntegrationEngine.Instance.Config(false, user);
1863
+
1864
+ // Step 2: Introspect source schema and persist discovered objects/fields
1822
1865
  const sourceSchema = await (connector.IntrospectSchema.bind(connector) as
1823
1866
  (ci: unknown, u: unknown) => Promise<SourceSchemaInfo>)(companyIntegration, user);
1867
+
1868
+ // Step 2b: Persist discovered objects/fields to IntegrationObject/IntegrationObjectField.
1869
+ // Static records (IsCustom=false) are preserved; new/custom records get IsCustom=true.
1870
+ // This ensures custom objects are available for future sync runs, action generation, etc.
1871
+ try {
1872
+ const persistResult = await IntegrationSchemaSync.PersistDiscoveredSchema({
1873
+ IntegrationID: companyIntegration.IntegrationID,
1874
+ SourceSchema: sourceSchema,
1875
+ ContextUser: user,
1876
+ });
1877
+ if (persistResult.ObjectsCreated > 0 || persistResult.FieldsCreated > 0) {
1878
+ console.log(
1879
+ `[IntegrationApplyAll] Persisted discovered schema: ` +
1880
+ `${persistResult.ObjectsCreated} new objects, ${persistResult.FieldsCreated} new fields, ` +
1881
+ `${persistResult.ObjectsUpdated} updated objects, ${persistResult.FieldsUpdated} updated fields`
1882
+ );
1883
+ }
1884
+
1885
+ // Step 2c: Generate CRUD actions for newly discovered custom objects.
1886
+ // Uses the same ActionMetadataGenerator as the offline CLI, persisted via BaseEntity.Save().
1887
+ if (persistResult.ObjectsCreated > 0) {
1888
+ try {
1889
+ const engineObjects = IntegrationEngine.Instance
1890
+ .GetIntegrationObjectsByIntegrationID(companyIntegration.IntegrationID);
1891
+ const customObjects = sourceSchema.Objects
1892
+ .filter(o => !engineObjects
1893
+ .some(ex => ex.Name.toLowerCase() === o.ExternalName.toLowerCase() && !ex.IsCustom))
1894
+ .map(o => ({
1895
+ Name: o.ExternalName,
1896
+ DisplayName: o.ExternalLabel || o.ExternalName,
1897
+ Description: o.Description,
1898
+ SupportsWrite: false,
1899
+ Fields: o.Fields.map(f => ({
1900
+ Name: f.Name,
1901
+ DisplayName: f.Label || f.Name,
1902
+ Description: f.Description || '',
1903
+ Type: f.SourceType || 'string',
1904
+ IsRequired: f.IsRequired,
1905
+ IsReadOnly: false,
1906
+ IsPrimaryKey: f.IsPrimaryKey,
1907
+ })),
1908
+ }));
1909
+ await IntegrationSchemaSync.GenerateActionsForCustomObjects({
1910
+ IntegrationName: companyIntegration.Integration,
1911
+ CustomObjects: customObjects,
1912
+ SupportsSearch: connector.SupportsSearch,
1913
+ SupportsListing: connector.SupportsListing,
1914
+ ContextUser: user,
1915
+ });
1916
+ } catch (actionErr) {
1917
+ const msg = actionErr instanceof Error ? actionErr.message : String(actionErr);
1918
+ console.warn(`[IntegrationApplyAll] Action generation warning (non-fatal): ${msg}`);
1919
+ }
1920
+ }
1921
+ } catch (persistErr) {
1922
+ // Non-fatal: schema persistence failure should not block table creation
1923
+ const msg = persistErr instanceof Error ? persistErr.message : String(persistErr);
1924
+ console.warn(`[IntegrationApplyAll] Schema persistence warning (non-fatal): ${msg}`);
1925
+ }
1926
+
1824
1927
  const objectIDs = input.SourceObjects.map(so => so.SourceObjectID);
1825
1928
  const resolvedNames = await this.resolveSourceObjectNames(objectIDs, undefined, sourceSchema, companyIntegration.IntegrationID, user);
1826
1929
 
@@ -1896,7 +1999,8 @@ export class IntegrationDiscoveryResolver extends ResolverBase {
1896
1999
  if (skipRestart) {
1897
2000
  await Metadata.Provider.Refresh();
1898
2001
  const entityMapsCreated = await this.createEntityAndFieldMaps(
1899
- input.CompanyIntegrationID, objects, connector, companyIntegration, schemaName, user
2002
+ input.CompanyIntegrationID, objects, connector, companyIntegration, schemaName, user,
2003
+ input.DefaultSyncDirection ?? 'Pull'
1900
2004
  );
1901
2005
  const createdMapIDs = entityMapsCreated.map(em => em.EntityMapID).filter(Boolean);
1902
2006
  const scopedMapIDs = input.SyncScope === 'all' ? undefined : createdMapIDs;
@@ -1978,14 +2082,15 @@ export class IntegrationDiscoveryResolver extends ResolverBase {
1978
2082
  connector: BaseIntegrationConnector,
1979
2083
  companyIntegration: MJCompanyIntegrationEntity,
1980
2084
  schemaName: string,
1981
- user: UserInfo
2085
+ user: UserInfo,
2086
+ defaultSyncDirection: string = 'Pull'
1982
2087
  ): Promise<ApplyAllEntityMapCreated[]> {
1983
2088
  const md = new Metadata();
1984
2089
  const results: ApplyAllEntityMapCreated[] = [];
1985
2090
 
1986
2091
  for (const obj of objects) {
1987
2092
  const entityMapResult = await this.createSingleEntityMap(
1988
- companyIntegrationID, obj, connector, companyIntegration, schemaName, user, md
2093
+ companyIntegrationID, obj, connector, companyIntegration, schemaName, user, md, defaultSyncDirection
1989
2094
  );
1990
2095
  if (entityMapResult) {
1991
2096
  results.push(entityMapResult);
@@ -2002,7 +2107,8 @@ export class IntegrationDiscoveryResolver extends ResolverBase {
2002
2107
  companyIntegration: MJCompanyIntegrationEntity,
2003
2108
  schemaName: string,
2004
2109
  user: UserInfo,
2005
- md: Metadata
2110
+ md: Metadata,
2111
+ defaultSyncDirection: string = 'Pull'
2006
2112
  ): Promise<ApplyAllEntityMapCreated | null> {
2007
2113
  // Find the entity by schema + table name
2008
2114
  const entityInfo = md.Entities.find(
@@ -2020,8 +2126,8 @@ export class IntegrationDiscoveryResolver extends ResolverBase {
2020
2126
  em.CompanyIntegrationID = companyIntegrationID;
2021
2127
  em.ExternalObjectName = obj.SourceObjectName;
2022
2128
  em.EntityID = entityInfo.ID;
2023
- em.SyncDirection = 'Pull';
2024
- em.Priority = 0;
2129
+ em.SyncDirection = isValidSyncDirection(defaultSyncDirection) ? defaultSyncDirection : 'Pull';
2130
+ em.Priority = obj.SourceObjectName.startsWith('assoc_') ? 10 : 0;
2025
2131
  em.Status = 'Active';
2026
2132
  em.SyncEnabled = true;
2027
2133
 
@@ -2183,15 +2289,17 @@ export class IntegrationDiscoveryResolver extends ResolverBase {
2183
2289
  @Arg("webhookURL", { nullable: true }) webhookURL: string,
2184
2290
  @Arg("fullSync", () => Boolean, { defaultValue: false, description: 'If true, ignores watermarks and re-fetches all records from the source' }) fullSync: boolean,
2185
2291
  @Arg("entityMapIDs", () => [String], { nullable: true, description: 'Optional: sync only these entity maps. If omitted, syncs all maps for the connector.' }) entityMapIDs: string[],
2292
+ @Arg("syncDirection", () => String, { nullable: true, description: 'Override sync direction: Pull | Push | Bidirectional. If omitted, each entity map\'s own SyncDirection is used.' }) syncDirection: 'Pull' | 'Push' | 'Bidirectional' | undefined,
2186
2293
  @Ctx() ctx: AppContext
2187
2294
  ): Promise<StartSyncOutput> {
2188
2295
  try {
2189
2296
  const user = this.getAuthenticatedUser(ctx);
2190
2297
  await IntegrationEngine.Instance.Config(false, user);
2191
2298
 
2192
- const syncOptions: { FullSync?: boolean; EntityMapIDs?: string[] } = {};
2299
+ const syncOptions: { FullSync?: boolean; EntityMapIDs?: string[]; SyncDirection?: 'Pull' | 'Push' | 'Bidirectional' } = {};
2193
2300
  if (fullSync) syncOptions.FullSync = true;
2194
2301
  if (entityMapIDs?.length) syncOptions.EntityMapIDs = entityMapIDs;
2302
+ if (syncDirection) syncOptions.SyncDirection = syncDirection;
2195
2303
 
2196
2304
  // Fire and forget — progress is tracked inside IntegrationEngine
2197
2305
  const syncPromise = IntegrationEngine.Instance.RunSync(
@@ -2280,6 +2388,85 @@ export class IntegrationDiscoveryResolver extends ResolverBase {
2280
2388
  }
2281
2389
  }
2282
2390
 
2391
+ /**
2392
+ * Writes a single record to an external system via the integration connector.
2393
+ * Supports create, update, and delete operations.
2394
+ */
2395
+ @Mutation(() => WriteRecordOutput)
2396
+ async IntegrationWriteRecord(
2397
+ @Arg("companyIntegrationID") companyIntegrationID: string,
2398
+ @Arg("objectName") objectName: string,
2399
+ @Arg("operation", () => String, { description: 'create, update, or delete' }) operation: string,
2400
+ @Arg("externalID", { nullable: true, description: 'Required for update/delete' }) externalID: string,
2401
+ @Arg("attributes", () => String, { nullable: true, description: 'JSON object of field values for create/update' }) attributesJson: string,
2402
+ @Ctx() ctx: AppContext
2403
+ ): Promise<WriteRecordOutput> {
2404
+ try {
2405
+ const user = this.getAuthenticatedUser(ctx);
2406
+ await IntegrationEngine.Instance.Config(false, user);
2407
+
2408
+ const rv = new RunView();
2409
+ const ciResult = await rv.RunView<MJCompanyIntegrationEntity>({
2410
+ EntityName: 'MJ: Company Integrations',
2411
+ ExtraFilter: `ID='${companyIntegrationID}'`,
2412
+ MaxRows: 1,
2413
+ ResultType: 'entity_object',
2414
+ }, user);
2415
+
2416
+ if (!ciResult.Success || ciResult.Results.length === 0) {
2417
+ return { Success: false, Message: `Company Integration not found: ${companyIntegrationID}` };
2418
+ }
2419
+
2420
+ const companyIntegration = ciResult.Results[0];
2421
+
2422
+ // Load the Integration entity to get the ClassName for connector resolution
2423
+ const integResult = await rv.RunView<MJIntegrationEntity>({
2424
+ EntityName: 'Integrations',
2425
+ ExtraFilter: `ID='${companyIntegration.IntegrationID}'`,
2426
+ MaxRows: 1,
2427
+ ResultType: 'entity_object',
2428
+ }, user);
2429
+ if (!integResult.Success || integResult.Results.length === 0) {
2430
+ return { Success: false, Message: `Integration not found: ${companyIntegration.IntegrationID}` };
2431
+ }
2432
+ const connector = ConnectorFactory.Resolve(integResult.Results[0]);
2433
+
2434
+ const attributes = attributesJson ? JSON.parse(attributesJson) as Record<string, unknown> : {};
2435
+ const crudBase = { CompanyIntegration: companyIntegration, ObjectName: objectName, ContextUser: user };
2436
+
2437
+ let result: { Success: boolean; ExternalID?: string; ErrorMessage?: string; StatusCode: number };
2438
+
2439
+ switch (operation.toLowerCase()) {
2440
+ case 'create':
2441
+ if (!connector.SupportsCreate) return { Success: false, Message: 'Connector does not support create' };
2442
+ result = await connector.CreateRecord({ ...crudBase, Attributes: attributes });
2443
+ break;
2444
+ case 'update':
2445
+ if (!connector.SupportsUpdate) return { Success: false, Message: 'Connector does not support update' };
2446
+ if (!externalID) return { Success: false, Message: 'externalID is required for update' };
2447
+ result = await connector.UpdateRecord({ ...crudBase, ExternalID: externalID, Attributes: attributes });
2448
+ break;
2449
+ case 'delete':
2450
+ if (!connector.SupportsDelete) return { Success: false, Message: 'Connector does not support delete' };
2451
+ if (!externalID) return { Success: false, Message: 'externalID is required for delete' };
2452
+ result = await connector.DeleteRecord({ ...crudBase, ExternalID: externalID });
2453
+ break;
2454
+ default:
2455
+ return { Success: false, Message: `Invalid operation: ${operation}. Must be create, update, or delete` };
2456
+ }
2457
+
2458
+ return {
2459
+ Success: result.Success,
2460
+ Message: result.Success ? `${operation} succeeded` : (result.ErrorMessage ?? `${operation} failed`),
2461
+ ExternalID: result.ExternalID,
2462
+ StatusCode: result.StatusCode,
2463
+ };
2464
+ } catch (e) {
2465
+ LogError(`IntegrationWriteRecord error: ${e}`);
2466
+ return { Success: false, Message: this.formatError(e) };
2467
+ }
2468
+ }
2469
+
2283
2470
  // ── SCHEDULE ────────────────────────────────────────────────────────
2284
2471
 
2285
2472
  @Mutation(() => CreateScheduleOutput)
@@ -2314,7 +2501,10 @@ export class IntegrationDiscoveryResolver extends ResolverBase {
2314
2501
  job.Timezone = input.Timezone || 'UTC';
2315
2502
  job.Status = 'Active';
2316
2503
  job.OwnerUserID = user.ID;
2317
- job.Configuration = JSON.stringify({ CompanyIntegrationID: input.CompanyIntegrationID });
2504
+ const jobConfig: Record<string, unknown> = { CompanyIntegrationID: input.CompanyIntegrationID };
2505
+ if (input.SyncDirection) jobConfig.SyncDirection = input.SyncDirection;
2506
+ if (input.FullSync) jobConfig.FullSync = input.FullSync;
2507
+ job.Configuration = JSON.stringify(jobConfig);
2318
2508
  job.NextRunAt = CronExpressionHelper.GetNextRunTime(input.CronExpression, input.Timezone || 'UTC');
2319
2509
 
2320
2510
  if (!await job.Save()) return { Success: false, Message: 'Failed to create schedule' };
@@ -2884,6 +3074,17 @@ export class IntegrationDiscoveryResolver extends ResolverBase {
2884
3074
  const user = this.getAuthenticatedUser(ctx);
2885
3075
  const validatedPlatform = this.validatePlatform(platform);
2886
3076
 
3077
+ // Bust RunView caches for integration metadata BEFORE Config(true).
3078
+ // mj sync push writes records via stored procedures which do NOT fire
3079
+ // BaseEntity change events, so the RunView cache is never auto-invalidated.
3080
+ // Explicitly clearing these entries ensures Config(true) re-queries the DB.
3081
+ await LocalCacheManager.Instance.InvalidateEntityCaches('MJ: Integration Objects');
3082
+ await LocalCacheManager.Instance.InvalidateEntityCaches('MJ: Integration Object Fields');
3083
+
3084
+ // Force-refresh integration metadata cache so IntrospectSchema
3085
+ // picks up any IntegrationObject/Field changes made via mj sync push
3086
+ await IntegrationEngine.Instance.Config(true, user);
3087
+
2887
3088
  // Phase 1: Build schema for each connector in parallel
2888
3089
  const buildResults = await Promise.allSettled(
2889
3090
  input.Connectors.map(async (connInput) => {
@@ -2933,6 +3134,8 @@ export class IntegrationDiscoveryResolver extends ResolverBase {
2933
3134
  StartSync: input.StartSync,
2934
3135
  FullSync: input.FullSync ?? false,
2935
3136
  SyncScope: input.SyncScope ?? 'created',
3137
+ SyncDirection: input.SyncDirection,
3138
+ ScheduleSyncDirection: input.ScheduleSyncDirection,
2936
3139
  CreatedAt: new Date().toISOString(),
2937
3140
  };
2938
3141
  rsuInput.PostRestartFiles = [
@@ -3028,7 +3231,8 @@ export class IntegrationDiscoveryResolver extends ResolverBase {
3028
3231
  await Metadata.Provider.Refresh();
3029
3232
  const entityMapsCreated = await this.createEntityAndFieldMaps(
3030
3233
  build.connInput.CompanyIntegrationID, build.objects, build.connector,
3031
- build.companyIntegration, build.schemaName, user
3234
+ build.companyIntegration, build.schemaName, user,
3235
+ build.connInput.DefaultSyncDirection ?? 'Pull'
3032
3236
  );
3033
3237
  connResult.EntityMapsCreated = entityMapsCreated;
3034
3238