@salesforce/lds-runtime-mobile 1.208.1 → 1.210.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/main.js CHANGED
@@ -16,7 +16,7 @@ import { setupInstrumentation, instrumentAdapter as instrumentAdapter$1, instrum
16
16
  import { HttpStatusCode, StoreKeySet, serializeStructuredKey, Reader, deepFreeze, emitAdapterEvent, createCustomAdapterEventEmitter, StoreKeyMap, isFileReference, Environment, Luvio, InMemoryStore } from '@luvio/engine';
17
17
  import excludeStaleRecordsGate from '@salesforce/gate/lds.graphqlEvalExcludeStaleRecords';
18
18
  import { parseAndVisit, Kind, visit, execute, buildSchema, isObjectType, defaultFieldResolver } from '@luvio/graphql-parser';
19
- import { getRecordId18, keyBuilderQuickActionExecutionRepresentation, ingestQuickActionExecutionRepresentation, keyBuilderContentDocumentCompositeRepresentation, getResponseCacheKeysContentDocumentCompositeRepresentation, keyBuilderFromTypeContentDocumentCompositeRepresentation, ingestContentDocumentCompositeRepresentation, keyBuilderRecord, getTypeCacheKeysRecord, keyBuilderFromTypeRecordRepresentation, ingestRecord, RecordRepresentationRepresentationType, ObjectInfoRepresentationType, getRecordAdapterFactory, getObjectInfoAdapterFactory, getObjectInfosAdapterFactory, UiApiNamespace, RecordRepresentationType, RecordRepresentationTTL, RecordRepresentationVersion } from '@salesforce/lds-adapters-uiapi';
19
+ import { getRecordId18, keyBuilderQuickActionExecutionRepresentation, ingestQuickActionExecutionRepresentation, keyBuilderContentDocumentCompositeRepresentation, getResponseCacheKeysContentDocumentCompositeRepresentation, keyBuilderFromTypeContentDocumentCompositeRepresentation, ingestContentDocumentCompositeRepresentation, keyBuilderRecord, getTypeCacheKeysRecord, keyBuilderFromTypeRecordRepresentation, ingestRecord, RecordRepresentationRepresentationType, ObjectInfoRepresentationType, getRecordAdapterFactory, getObjectInfoAdapterFactory, getObjectInfosAdapterFactory, UiApiNamespace, RecordRepresentationType, RecordRepresentationTTL, RecordRepresentationVersion, getRecordsAdapterFactory } from '@salesforce/lds-adapters-uiapi';
20
20
  import caseSensitiveUserId from '@salesforce/user/Id';
21
21
  import { idleDetector, getInstrumentation } from 'o11y/client';
22
22
  import ldsUseShortUrlGate from '@salesforce/gate/lds.useShortUrl';
@@ -14437,6 +14437,7 @@ const CREATE_CONTENT_DOCUMENT_AND_VERSION_DRAFT_SYNTHESIZE_ERROR = 'create-conte
14437
14437
  const PRIMING_TOTAL_SESSION_COUNT = 'priming-total-session-count';
14438
14438
  const PRIMING_TOTAL_ERROR_COUNT = 'priming-total-error-count';
14439
14439
  const PRIMING_TOTAL_PRIMED_COUNT = 'priming-total-primed-count';
14440
+ const PRIMING_TOTAL_CONFLICT_COUNT = 'priming-total-conflict-count';
14440
14441
  // logs
14441
14442
  const GRAPHQL_QUERY_PARSE_ERROR = 'gql-query-parse-error';
14442
14443
  const GRAPHQL_SQL_EVAL_PRECONDITION_ERROR = 'gql-sql-pre-eval-error';
@@ -14548,6 +14549,11 @@ function reportPrimingError(errorType, recordCount) {
14548
14549
  function reportPrimingSuccess(recordCount) {
14549
14550
  ldsMobileInstrumentation.incrementCounter(PRIMING_TOTAL_PRIMED_COUNT, recordCount, undefined);
14550
14551
  }
14552
+ function reportPrimingConflict(resolutionType, recordCount) {
14553
+ ldsMobileInstrumentation.incrementCounter(PRIMING_TOTAL_CONFLICT_COUNT, recordCount, undefined, {
14554
+ resolutionType,
14555
+ });
14556
+ }
14551
14557
 
14552
14558
  /**
14553
14559
  * HOF (high-order-function) that instruments any async operation. If the operation
@@ -15549,6 +15555,245 @@ function generateTypedBatches(work, batchSize) {
15549
15555
  return batches;
15550
15556
  }
15551
15557
 
15558
+ function getMissingElementsFromSuperset(superset, subset) {
15559
+ return subset.filter((val) => !superset.includes(val));
15560
+ }
15561
+ function findReferenceFieldForSpanningField(fieldName, objectInfo) {
15562
+ const fieldNames = Object.keys(objectInfo.fields);
15563
+ for (const objectInfoFieldName of fieldNames) {
15564
+ const field = objectInfo.fields[objectInfoFieldName];
15565
+ if (field.reference === true && field.relationshipName === fieldName) {
15566
+ return objectInfoFieldName;
15567
+ }
15568
+ }
15569
+ }
15570
+ function buildFieldUnionArray(existingRecord, incomingRecord, objectInfo) {
15571
+ const allFields = Array.from(new Set([...Object.keys(existingRecord.fields), ...Object.keys(incomingRecord.fields)]));
15572
+ const fieldUnion = [];
15573
+ allFields.forEach((fieldName) => {
15574
+ const objectInfoField = objectInfo.fields[fieldName];
15575
+ if (objectInfoField === undefined) {
15576
+ // find the reference field for the spanning field
15577
+ const referenceField = findReferenceFieldForSpanningField(fieldName, objectInfo);
15578
+ if (referenceField !== undefined) {
15579
+ fieldUnion.push(`${fieldName}.Id`);
15580
+ }
15581
+ }
15582
+ else {
15583
+ fieldUnion.push(fieldName);
15584
+ }
15585
+ });
15586
+ return fieldUnion;
15587
+ }
15588
+ /**
15589
+ * Merges (if possible) an incoming record from a priming session with an existing record in the durable store.
15590
+ *
15591
+ * IMPORTANT NOTE: this is not a suitable function to use for general merging of two DurableRecordRepresentation since it
15592
+ * makes the assumption that the incoming record ONLY contains scalar field values and no spanning records. The same is not
15593
+ * necessarily true for the existing record as it may have been populated in the cache outside of a priming session.
15594
+ * This function should not be moved out of the priming module!
15595
+ *
15596
+ * @param existingRecord Existing record in the durable store
15597
+ * @param incomingRecord Incoming record from the priming session
15598
+ * @param objectInfo Object info for the incoming record type
15599
+ * @returns Merge result describing the success or failure of the merge operation
15600
+ */
15601
+ function mergeRecord(existingRecord, incomingRecord, objectInfo) {
15602
+ // cache already contains everything incoming has
15603
+ if (existingRecord.weakEtag >= incomingRecord.weakEtag &&
15604
+ getMissingElementsFromSuperset(Object.keys(existingRecord.fields), Object.keys(incomingRecord.fields)).length === 0) {
15605
+ return {
15606
+ ok: true,
15607
+ code: 'success',
15608
+ needsWrite: false,
15609
+ record: existingRecord,
15610
+ };
15611
+ }
15612
+ // don't touch records that contain drafts
15613
+ if (existingRecord.drafts !== undefined) {
15614
+ return {
15615
+ ok: false,
15616
+ code: 'conflict-drafts',
15617
+ hasDraft: true,
15618
+ fieldUnion: buildFieldUnionArray(existingRecord, incomingRecord, objectInfo),
15619
+ };
15620
+ }
15621
+ // Check if incoming record's Etag is equal to the existing one
15622
+ if (existingRecord.weakEtag === incomingRecord.weakEtag) {
15623
+ // If so, merge the fields and return the updated record
15624
+ return {
15625
+ ok: true,
15626
+ needsWrite: true,
15627
+ code: 'success',
15628
+ record: {
15629
+ ...existingRecord,
15630
+ fields: {
15631
+ ...existingRecord.fields,
15632
+ ...incomingRecord.fields,
15633
+ },
15634
+ links: {
15635
+ ...existingRecord.links,
15636
+ ...incomingRecord.links,
15637
+ },
15638
+ },
15639
+ };
15640
+ }
15641
+ else if (incomingRecord.weakEtag > existingRecord.weakEtag &&
15642
+ getMissingElementsFromSuperset(Object.keys(incomingRecord.fields), Object.keys(existingRecord.fields)).length === 0) {
15643
+ // If incoming record's Etag is higher and contains all the fields, overwrite the record
15644
+ // NOTE: if existing record contains spanning records, this condition will never hit since incoming won't have those fields
15645
+ return { ok: true, code: 'success', needsWrite: true, record: incomingRecord };
15646
+ }
15647
+ else {
15648
+ const missingFields = getMissingElementsFromSuperset(Object.keys(incomingRecord.fields), Object.keys(existingRecord.fields));
15649
+ // if the only missing fields are spanning fields and their corresponding lookup fields match, we can merge
15650
+ // since none of the changed fields are part of the incoming record
15651
+ if (missingFields.every((field) => {
15652
+ const referenceFieldName = findReferenceFieldForSpanningField(field, objectInfo);
15653
+ if (referenceFieldName !== undefined) {
15654
+ return (incomingRecord.fields[referenceFieldName].value ===
15655
+ existingRecord.fields[referenceFieldName].value);
15656
+ }
15657
+ else {
15658
+ return false;
15659
+ }
15660
+ })) {
15661
+ return {
15662
+ ok: true,
15663
+ needsWrite: true,
15664
+ code: 'success',
15665
+ record: {
15666
+ // we span the existing record to maintain spanning references
15667
+ ...incomingRecord,
15668
+ fields: {
15669
+ ...existingRecord.fields,
15670
+ ...incomingRecord.fields,
15671
+ },
15672
+ links: {
15673
+ ...existingRecord.links,
15674
+ ...incomingRecord.links,
15675
+ },
15676
+ },
15677
+ };
15678
+ }
15679
+ // If Etags do not match and the incoming record does not contain all fields, re-request the record
15680
+ return {
15681
+ ok: false,
15682
+ code: 'conflict-missing-fields',
15683
+ fieldUnion: buildFieldUnionArray(existingRecord, incomingRecord, objectInfo),
15684
+ hasDraft: false,
15685
+ };
15686
+ }
15687
+ }
15688
+
15689
+ const CONFLICT_POOL_SIZE = 5;
15690
+ /**
15691
+ * A pool of workers that resolve conflicts between incoming records and records in the store.
15692
+ */
15693
+ class ConflictPool {
15694
+ constructor(store, objectInfoLoader) {
15695
+ this.store = store;
15696
+ this.objectInfoLoader = objectInfoLoader;
15697
+ this.pool = new AsyncWorkerPool(CONFLICT_POOL_SIZE);
15698
+ }
15699
+ enqueueConflictedRecords(records, abortController) {
15700
+ return this.pool.push({
15701
+ workFn: () => this.resolveConflicts(records, abortController),
15702
+ });
15703
+ }
15704
+ async resolveConflicts(incomingRecords, abortController) {
15705
+ const result = {
15706
+ additionalWork: { type: 'record-fields', records: {} },
15707
+ recordsToWrite: [],
15708
+ resolvedRecords: [],
15709
+ recordsNeedingRefetch: new Map(),
15710
+ errors: [],
15711
+ };
15712
+ const ids = [];
15713
+ const trackedFieldsByType = new Map();
15714
+ const apiNames = new Set();
15715
+ incomingRecords.forEach((record) => {
15716
+ ids.push(record.id);
15717
+ apiNames.add(record.apiName);
15718
+ });
15719
+ const existingRecords = await this.store.readRecords(ids);
15720
+ if (abortController.aborted) {
15721
+ return result;
15722
+ }
15723
+ const objectInfos = await this.objectInfoLoader.getObjectInfos(Array.from(apiNames));
15724
+ if (abortController.aborted) {
15725
+ return result;
15726
+ }
15727
+ const existingRecordsById = new Map(existingRecords.map((record) => [record.record.id, record]));
15728
+ for (const incomingRecord of incomingRecords) {
15729
+ const existingDurableRecordRepresentation = existingRecordsById.get(incomingRecord.id);
15730
+ const objectInfo = objectInfos[incomingRecord.apiName];
15731
+ if (existingDurableRecordRepresentation === undefined) {
15732
+ // this shouldn't happen but if it does, we should write the incoming record since there's nothing to merge
15733
+ result.recordsToWrite.push(incomingRecord);
15734
+ continue;
15735
+ }
15736
+ if (objectInfo === undefined) {
15737
+ // object infos are a prerequisite for priming so if we don't have one, we can't do anything
15738
+ result.errors.push({ id: incomingRecord.id, reason: 'object-info-missing' });
15739
+ continue;
15740
+ }
15741
+ const existingRecord = existingDurableRecordRepresentation.record;
15742
+ const mergedRecordResult = mergeRecord(existingRecord, incomingRecord, objectInfo);
15743
+ if (mergedRecordResult.ok) {
15744
+ if (mergedRecordResult.needsWrite) {
15745
+ result.recordsToWrite.push(mergedRecordResult.record);
15746
+ }
15747
+ else {
15748
+ result.resolvedRecords.push(mergedRecordResult.record.id);
15749
+ }
15750
+ continue;
15751
+ }
15752
+ else {
15753
+ const { code } = mergedRecordResult;
15754
+ const isConflict = code === 'conflict-drafts' ||
15755
+ code === 'conflict-spanning-record' ||
15756
+ code === 'conflict-missing-fields';
15757
+ if (isConflict) {
15758
+ let trackedFields = trackedFieldsByType.get(incomingRecord.apiName);
15759
+ if (trackedFields === undefined) {
15760
+ trackedFields = new Set();
15761
+ trackedFieldsByType.set(incomingRecord.apiName, trackedFields);
15762
+ }
15763
+ mergedRecordResult.fieldUnion.forEach((field) => trackedFields.add(field));
15764
+ if (code === 'conflict-missing-fields') {
15765
+ const additionalWorkForType = result.additionalWork.records[incomingRecord.apiName];
15766
+ if (additionalWorkForType === undefined) {
15767
+ result.additionalWork.records[incomingRecord.apiName] = {
15768
+ ids: [incomingRecord.id],
15769
+ fields: Array.from(trackedFields),
15770
+ };
15771
+ }
15772
+ else {
15773
+ additionalWorkForType.ids.push(incomingRecord.id);
15774
+ additionalWorkForType.fields = Array.from(trackedFields);
15775
+ }
15776
+ }
15777
+ else if (code === 'conflict-drafts' || code === 'conflict-spanning-record') {
15778
+ const recordByType = result.recordsNeedingRefetch.get(incomingRecord.apiName);
15779
+ if (recordByType === undefined) {
15780
+ result.recordsNeedingRefetch.set(incomingRecord.apiName, {
15781
+ ids: [incomingRecord.id],
15782
+ fields: Array.from(trackedFields),
15783
+ });
15784
+ }
15785
+ else {
15786
+ recordByType.ids.push(incomingRecord.id);
15787
+ recordByType.fields = Array.from(trackedFields);
15788
+ }
15789
+ }
15790
+ }
15791
+ }
15792
+ }
15793
+ return result;
15794
+ }
15795
+ }
15796
+
15552
15797
  const DEFAULT_BATCH_SIZE = 500;
15553
15798
  const DEFAULT_CONCURRENCY = 6;
15554
15799
  const DEFAULT_GQL_QUERY_BATCH_SIZE = 5;
@@ -15562,8 +15807,10 @@ class PrimingSession extends EventEmitter {
15562
15807
  this.recordLoader = config.recordLoader;
15563
15808
  this.recordIngestor = config.recordIngestor;
15564
15809
  this.objectInfoLoader = config.objectInfoLoader;
15810
+ this.ldsRecordRefresher = config.ldsRecordRefresher;
15565
15811
  this.networkWorkerPool = new AsyncWorkerPool(this.concurrency);
15566
15812
  this.useBatchGQL = ldsPrimingGraphqlBatch.isOpen({ fallback: false });
15813
+ this.conflictPool = new ConflictPool(config.store, this.objectInfoLoader);
15567
15814
  }
15568
15815
  // function that enqueues priming work
15569
15816
  async enqueue(work) {
@@ -15688,7 +15935,9 @@ class PrimingSession extends EventEmitter {
15688
15935
  const { records } = result;
15689
15936
  const beforeWrite = Date.now();
15690
15937
  // dispatch the write but DO NOT wait on it to unblock the network pool
15691
- this.recordIngestor.insertRecords(records).then(({ written, conflicted, errors }) => {
15938
+ this.recordIngestor
15939
+ .insertRecords(records, false)
15940
+ .then(({ written, conflicted, errors }) => {
15692
15941
  this.emit('batch-written', {
15693
15942
  written,
15694
15943
  conflicted,
@@ -15711,16 +15960,69 @@ class PrimingSession extends EventEmitter {
15711
15960
  if (written.length > 0) {
15712
15961
  this.emit('primed', Array.from(written));
15713
15962
  }
15714
- // TODO [W-12436213]: implement conflict resolution
15963
+ // if any records could not be written to the store because there were conflicts, handle the conflicts
15964
+ if (conflicted.length > 0) {
15965
+ this.handleWriteConflicts(records, conflicted, abortController);
15966
+ }
15967
+ });
15968
+ }
15969
+ async handleWriteConflicts(records, conflicted, abortController) {
15970
+ const result = await this.conflictPool.enqueueConflictedRecords(records.filter((x) => conflicted.includes(x.id)), abortController);
15971
+ if (abortController.aborted) {
15972
+ return;
15973
+ }
15974
+ if (Object.keys(result.additionalWork.records).length > 0) {
15975
+ this.emit('conflict', {
15976
+ ids: Object.values(result.additionalWork.records).flatMap((record) => record.ids),
15977
+ resolution: 'priming-refresh',
15978
+ });
15979
+ this.enqueue(result.additionalWork);
15980
+ }
15981
+ if (result.resolvedRecords.length > 0) {
15982
+ this.emit('conflict', {
15983
+ ids: result.resolvedRecords,
15984
+ resolution: 'priming-merge',
15985
+ });
15986
+ this.emit('primed', result.resolvedRecords);
15987
+ }
15988
+ if (result.recordsToWrite.length > 0) {
15989
+ const { written, errors, conflicted } = await this.recordIngestor.insertRecords(result.recordsToWrite, true);
15990
+ if (written.length > 0) {
15991
+ const ids = Array.from(written);
15992
+ this.emit('conflict', { ids, resolution: 'priming-merge' });
15993
+ this.emit('primed', ids);
15994
+ }
15995
+ if (errors.length > 0) {
15996
+ errors.forEach(({ ids, message }) => {
15997
+ this.emit('error', {
15998
+ ids,
15999
+ code: 'unknown',
16000
+ message: message,
16001
+ });
16002
+ });
16003
+ }
15715
16004
  if (conflicted.length > 0) {
15716
- // for now emit conlicts as errors
15717
16005
  this.emit('error', {
15718
- ids: Array.from(conflicted),
16006
+ ids: conflicted,
15719
16007
  code: 'unknown',
15720
- message: 'conflict when persisting record',
16008
+ message: 'unexpected write conflict',
15721
16009
  });
15722
16010
  }
15723
- });
16011
+ }
16012
+ if (result.recordsNeedingRefetch.size > 0) {
16013
+ const { loaded, errored } = await this.ldsRecordRefresher.loadRecords(result.recordsNeedingRefetch);
16014
+ if (loaded.length > 0) {
16015
+ this.emit('conflict', { resolution: 'lds-refresh', ids: loaded });
16016
+ this.emit('primed', loaded);
16017
+ }
16018
+ if (errored.length > 0) {
16019
+ this.emit('error', {
16020
+ ids: errored,
16021
+ code: 'unknown',
16022
+ message: `could not resolve conflicts`,
16023
+ });
16024
+ }
16025
+ }
15724
16026
  }
15725
16027
  async fetchMetadata(batches) {
15726
16028
  const apiNames = Array.from(batches.reduce((acc, x) => {
@@ -15961,6 +16263,9 @@ function instrumentPrimingSession(session) {
15961
16263
  session.on('primed', ({ length }) => {
15962
16264
  reportPrimingSuccess(length);
15963
16265
  });
16266
+ session.on('conflict', ({ ids, resolution }) => {
16267
+ reportPrimingConflict(resolution, ids.length);
16268
+ });
15964
16269
  return session;
15965
16270
  }
15966
16271
 
@@ -16142,19 +16447,72 @@ function batchArray(arr, batchSize = BATCH_SIZE) {
16142
16447
  return batches;
16143
16448
  }
16144
16449
 
16450
+ /**
16451
+ * A polyfill for Promise.allSettled
16452
+ * @param promises An array of promises
16453
+ * @returns the result of all the promises when they either fulfill or reject
16454
+ */
16455
+ function allSettled(promises) {
16456
+ let wrappedPromises = promises.map((p) => Promise.resolve(p).then((value) => ({ status: 'fulfilled', value }), (reason) => ({ status: 'rejected', reason })));
16457
+ return Promise.all(wrappedPromises);
16458
+ }
16459
+
16460
+ class LdsPrimingRecordRefresher {
16461
+ constructor(getRecordsAdapter) {
16462
+ this.getRecordsAdapter = getRecordsAdapter;
16463
+ }
16464
+ async loadRecords(records) {
16465
+ const requestedRecords = new Set();
16466
+ const promises = Array.from(records).flatMap(([_apiName, value]) => {
16467
+ value.ids.forEach((id) => requestedRecords.add(id));
16468
+ return Promise.resolve(this.getRecordsAdapter({
16469
+ records: [
16470
+ {
16471
+ recordIds: value.ids,
16472
+ optionalFields: value.fields.map((f) => `${_apiName}.${f}`),
16473
+ },
16474
+ ],
16475
+ }));
16476
+ });
16477
+ const promiseResults = await allSettled(promises);
16478
+ const loaded = [];
16479
+ promiseResults.forEach((promiseResult) => {
16480
+ if (promiseResult.status === 'fulfilled') {
16481
+ const batchResultRepresenatation = promiseResult.value;
16482
+ if (batchResultRepresenatation &&
16483
+ batchResultRepresenatation.state === 'Fulfilled') {
16484
+ batchResultRepresenatation.data.results.forEach((result) => {
16485
+ if (result.statusCode === 200) {
16486
+ const id = result.result.id;
16487
+ loaded.push(id);
16488
+ requestedRecords.delete(id);
16489
+ }
16490
+ });
16491
+ }
16492
+ }
16493
+ });
16494
+ // errored contains all the requestedRecords that weren't loaded
16495
+ const errored = Array.from(requestedRecords);
16496
+ return { loaded, errored };
16497
+ }
16498
+ }
16499
+
16145
16500
  function primingSessionFactory(config) {
16146
16501
  const { store, objectInfoService, getLuvio } = config;
16147
16502
  const networkAdapter = new NimbusPrimingNetworkAdapter();
16148
16503
  const recordLoader = new RecordLoaderGraphQL(networkAdapter);
16149
- const recordIngestor = new RecordIngestor(new SqlitePrimingStore(getLuvio, store), getLuvio);
16504
+ const primingStore = new SqlitePrimingStore(getLuvio, store);
16505
+ const recordIngestor = new RecordIngestor(primingStore, getLuvio);
16150
16506
  const session = new PrimingSession({
16151
16507
  recordLoader,
16152
16508
  recordIngestor,
16509
+ store: primingStore,
16153
16510
  objectInfoLoader: {
16154
16511
  getObjectInfos: objectInfoService.getObjectInfos.bind(objectInfoService),
16155
16512
  },
16156
16513
  concurrency: config.concurrency,
16157
16514
  batchSize: config.batchSize,
16515
+ ldsRecordRefresher: new LdsPrimingRecordRefresher(config.getRecords),
16158
16516
  });
16159
16517
  return instrumentPrimingSession(session);
16160
16518
  }
@@ -16168,6 +16526,7 @@ let lazyEnvironment;
16168
16526
  let lazyBaseDurableStore;
16169
16527
  let lazyNetworkAdapter;
16170
16528
  let lazyObjectInfoService;
16529
+ let lazyGetRecords;
16171
16530
  /**
16172
16531
  * This returns the LDS on Mobile Runtime singleton object.
16173
16532
  */
@@ -16238,6 +16597,7 @@ function getRuntime() {
16238
16597
  lazyLuvio = new Luvio(lazyEnvironment, {
16239
16598
  instrument: instrumentLuvio,
16240
16599
  });
16600
+ lazyGetRecords = getRecordsAdapterFactory(lazyLuvio);
16241
16601
  // Currently instruments store runtime perf
16242
16602
  setupMobileInstrumentation(lazyLuvio, store);
16243
16603
  // If the inspection nimbus plugin is configured, inspection is enabled otherwise this is a no-op
@@ -16292,6 +16652,7 @@ function getRuntime() {
16292
16652
  getLuvio: () => lazyLuvio,
16293
16653
  concurrency: config.concurrency,
16294
16654
  batchSize: config.batchSize,
16655
+ getRecords: lazyGetRecords,
16295
16656
  });
16296
16657
  },
16297
16658
  };
@@ -16310,4 +16671,4 @@ register({
16310
16671
  });
16311
16672
 
16312
16673
  export { getRuntime, registerReportObserver, reportGraphqlQueryParseError };
16313
- // version: 1.208.1-8f4c4550e
16674
+ // version: 1.210.0-b2655462f
@@ -14,3 +14,4 @@ export declare function reportDraftAwareContentVersionSynthesizeCalls(mimeType:
14
14
  export declare function reportPrimingSessionCreated(): void;
15
15
  export declare function reportPrimingError(errorType: string, recordCount: number): void;
16
16
  export declare function reportPrimingSuccess(recordCount: number): void;
17
+ export declare function reportPrimingConflict(resolutionType: string, recordCount: number): void;
@@ -1,12 +1,14 @@
1
1
  import { PrimingSession } from '@salesforce/lds-priming';
2
- import type { Luvio } from '@luvio/engine';
2
+ import type { Adapter, Luvio } from '@luvio/engine';
3
3
  import type { NimbusSqliteStore } from '@salesforce/lds-store-nimbus';
4
4
  import type { ObjectInfoService } from '../main';
5
+ import type { BatchRepresentation, GetRecordsConfig } from '@salesforce/lds-adapters-uiapi';
5
6
  export interface PrimingSessionFactoryConfig {
6
7
  store: NimbusSqliteStore;
7
8
  objectInfoService: ObjectInfoService;
8
9
  getLuvio: () => Luvio;
9
10
  concurrency?: number;
10
11
  batchSize?: number;
12
+ getRecords: Adapter<GetRecordsConfig, BatchRepresentation>;
11
13
  }
12
14
  export declare function primingSessionFactory(config: PrimingSessionFactoryConfig): PrimingSession;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@salesforce/lds-runtime-mobile",
3
- "version": "1.208.1",
3
+ "version": "1.210.0",
4
4
  "license": "SEE LICENSE IN LICENSE.txt",
5
5
  "description": "LDS runtime for mobile/hybrid environments.",
6
6
  "main": "dist/main.js",
@@ -58,17 +58,17 @@
58
58
  {
59
59
  "path": "./dist/main.js",
60
60
  "maxSize": {
61
- "none": "700 kB",
61
+ "none": "800 kB",
62
62
  "min": "300 kB",
63
- "compressed": "110 kB"
63
+ "compressed": "150 kB"
64
64
  }
65
65
  },
66
66
  {
67
67
  "path": "./sfdc/main.js",
68
68
  "maxSize": {
69
- "none": "700 kB",
69
+ "none": "800 kB",
70
70
  "min": "300 kB",
71
- "compressed": "110 kB"
71
+ "compressed": "150 kB"
72
72
  }
73
73
  }
74
74
  ],
package/sfdc/main.js CHANGED
@@ -16,7 +16,7 @@ import { setupInstrumentation, instrumentAdapter as instrumentAdapter$1, instrum
16
16
  import { HttpStatusCode, StoreKeySet, serializeStructuredKey, Reader, deepFreeze, emitAdapterEvent, createCustomAdapterEventEmitter, StoreKeyMap, isFileReference, Environment, Luvio, InMemoryStore } from 'force/luvioEngine';
17
17
  import excludeStaleRecordsGate from '@salesforce/gate/lds.graphqlEvalExcludeStaleRecords';
18
18
  import { parseAndVisit, Kind, visit, execute, buildSchema, isObjectType, defaultFieldResolver } from 'force/ldsGraphqlParser';
19
- import { getRecordId18, keyBuilderQuickActionExecutionRepresentation, ingestQuickActionExecutionRepresentation, keyBuilderContentDocumentCompositeRepresentation, getResponseCacheKeysContentDocumentCompositeRepresentation, keyBuilderFromTypeContentDocumentCompositeRepresentation, ingestContentDocumentCompositeRepresentation, keyBuilderRecord, getTypeCacheKeysRecord, keyBuilderFromTypeRecordRepresentation, ingestRecord, RecordRepresentationRepresentationType, ObjectInfoRepresentationType, getRecordAdapterFactory, getObjectInfoAdapterFactory, getObjectInfosAdapterFactory, UiApiNamespace, RecordRepresentationType, RecordRepresentationTTL, RecordRepresentationVersion } from 'force/ldsAdaptersUiapi';
19
+ import { getRecordId18, keyBuilderQuickActionExecutionRepresentation, ingestQuickActionExecutionRepresentation, keyBuilderContentDocumentCompositeRepresentation, getResponseCacheKeysContentDocumentCompositeRepresentation, keyBuilderFromTypeContentDocumentCompositeRepresentation, ingestContentDocumentCompositeRepresentation, keyBuilderRecord, getTypeCacheKeysRecord, keyBuilderFromTypeRecordRepresentation, ingestRecord, RecordRepresentationRepresentationType, ObjectInfoRepresentationType, getRecordAdapterFactory, getObjectInfoAdapterFactory, getObjectInfosAdapterFactory, UiApiNamespace, RecordRepresentationType, RecordRepresentationTTL, RecordRepresentationVersion, getRecordsAdapterFactory } from 'force/ldsAdaptersUiapi';
20
20
  import caseSensitiveUserId from '@salesforce/user/Id';
21
21
  import { idleDetector, getInstrumentation } from 'o11y/client';
22
22
  import ldsUseShortUrlGate from '@salesforce/gate/lds.useShortUrl';
@@ -14437,6 +14437,7 @@ const CREATE_CONTENT_DOCUMENT_AND_VERSION_DRAFT_SYNTHESIZE_ERROR = 'create-conte
14437
14437
  const PRIMING_TOTAL_SESSION_COUNT = 'priming-total-session-count';
14438
14438
  const PRIMING_TOTAL_ERROR_COUNT = 'priming-total-error-count';
14439
14439
  const PRIMING_TOTAL_PRIMED_COUNT = 'priming-total-primed-count';
14440
+ const PRIMING_TOTAL_CONFLICT_COUNT = 'priming-total-conflict-count';
14440
14441
  // logs
14441
14442
  const GRAPHQL_QUERY_PARSE_ERROR = 'gql-query-parse-error';
14442
14443
  const GRAPHQL_SQL_EVAL_PRECONDITION_ERROR = 'gql-sql-pre-eval-error';
@@ -14548,6 +14549,11 @@ function reportPrimingError(errorType, recordCount) {
14548
14549
  function reportPrimingSuccess(recordCount) {
14549
14550
  ldsMobileInstrumentation.incrementCounter(PRIMING_TOTAL_PRIMED_COUNT, recordCount, undefined);
14550
14551
  }
14552
+ function reportPrimingConflict(resolutionType, recordCount) {
14553
+ ldsMobileInstrumentation.incrementCounter(PRIMING_TOTAL_CONFLICT_COUNT, recordCount, undefined, {
14554
+ resolutionType,
14555
+ });
14556
+ }
14551
14557
 
14552
14558
  /**
14553
14559
  * HOF (high-order-function) that instruments any async operation. If the operation
@@ -15549,6 +15555,245 @@ function generateTypedBatches(work, batchSize) {
15549
15555
  return batches;
15550
15556
  }
15551
15557
 
15558
+ function getMissingElementsFromSuperset(superset, subset) {
15559
+ return subset.filter((val) => !superset.includes(val));
15560
+ }
15561
+ function findReferenceFieldForSpanningField(fieldName, objectInfo) {
15562
+ const fieldNames = Object.keys(objectInfo.fields);
15563
+ for (const objectInfoFieldName of fieldNames) {
15564
+ const field = objectInfo.fields[objectInfoFieldName];
15565
+ if (field.reference === true && field.relationshipName === fieldName) {
15566
+ return objectInfoFieldName;
15567
+ }
15568
+ }
15569
+ }
15570
+ function buildFieldUnionArray(existingRecord, incomingRecord, objectInfo) {
15571
+ const allFields = Array.from(new Set([...Object.keys(existingRecord.fields), ...Object.keys(incomingRecord.fields)]));
15572
+ const fieldUnion = [];
15573
+ allFields.forEach((fieldName) => {
15574
+ const objectInfoField = objectInfo.fields[fieldName];
15575
+ if (objectInfoField === undefined) {
15576
+ // find the reference field for the spanning field
15577
+ const referenceField = findReferenceFieldForSpanningField(fieldName, objectInfo);
15578
+ if (referenceField !== undefined) {
15579
+ fieldUnion.push(`${fieldName}.Id`);
15580
+ }
15581
+ }
15582
+ else {
15583
+ fieldUnion.push(fieldName);
15584
+ }
15585
+ });
15586
+ return fieldUnion;
15587
+ }
15588
+ /**
15589
+ * Merges (if possible) an incoming record from a priming session with an existing record in the durable store.
15590
+ *
15591
+ * IMPORTANT NOTE: this is not a suitable function to use for general merging of two DurableRecordRepresentation since it
15592
+ * makes the assumption that the incoming record ONLY contains scalar field values and no spanning records. The same is not
15593
+ * necessarily true for the existing record as it may have been populated in the cache outside of a priming session.
15594
+ * This function should not be moved out of the priming module!
15595
+ *
15596
+ * @param existingRecord Existing record in the durable store
15597
+ * @param incomingRecord Incoming record from the priming session
15598
+ * @param objectInfo Object info for the incoming record type
15599
+ * @returns Merge result describing the success or failure of the merge operation
15600
+ */
15601
+ function mergeRecord(existingRecord, incomingRecord, objectInfo) {
15602
+ // cache already contains everything incoming has
15603
+ if (existingRecord.weakEtag >= incomingRecord.weakEtag &&
15604
+ getMissingElementsFromSuperset(Object.keys(existingRecord.fields), Object.keys(incomingRecord.fields)).length === 0) {
15605
+ return {
15606
+ ok: true,
15607
+ code: 'success',
15608
+ needsWrite: false,
15609
+ record: existingRecord,
15610
+ };
15611
+ }
15612
+ // don't touch records that contain drafts
15613
+ if (existingRecord.drafts !== undefined) {
15614
+ return {
15615
+ ok: false,
15616
+ code: 'conflict-drafts',
15617
+ hasDraft: true,
15618
+ fieldUnion: buildFieldUnionArray(existingRecord, incomingRecord, objectInfo),
15619
+ };
15620
+ }
15621
+ // Check if incoming record's Etag is equal to the existing one
15622
+ if (existingRecord.weakEtag === incomingRecord.weakEtag) {
15623
+ // If so, merge the fields and return the updated record
15624
+ return {
15625
+ ok: true,
15626
+ needsWrite: true,
15627
+ code: 'success',
15628
+ record: {
15629
+ ...existingRecord,
15630
+ fields: {
15631
+ ...existingRecord.fields,
15632
+ ...incomingRecord.fields,
15633
+ },
15634
+ links: {
15635
+ ...existingRecord.links,
15636
+ ...incomingRecord.links,
15637
+ },
15638
+ },
15639
+ };
15640
+ }
15641
+ else if (incomingRecord.weakEtag > existingRecord.weakEtag &&
15642
+ getMissingElementsFromSuperset(Object.keys(incomingRecord.fields), Object.keys(existingRecord.fields)).length === 0) {
15643
+ // If incoming record's Etag is higher and contains all the fields, overwrite the record
15644
+ // NOTE: if existing record contains spanning records, this condition will never hit since incoming won't have those fields
15645
+ return { ok: true, code: 'success', needsWrite: true, record: incomingRecord };
15646
+ }
15647
+ else {
15648
+ const missingFields = getMissingElementsFromSuperset(Object.keys(incomingRecord.fields), Object.keys(existingRecord.fields));
15649
+ // if the only missing fields are spanning fields and their corresponding lookup fields match, we can merge
15650
+ // since none of the changed fields are part of the incoming record
15651
+ if (missingFields.every((field) => {
15652
+ const referenceFieldName = findReferenceFieldForSpanningField(field, objectInfo);
15653
+ if (referenceFieldName !== undefined) {
15654
+ return (incomingRecord.fields[referenceFieldName].value ===
15655
+ existingRecord.fields[referenceFieldName].value);
15656
+ }
15657
+ else {
15658
+ return false;
15659
+ }
15660
+ })) {
15661
+ return {
15662
+ ok: true,
15663
+ needsWrite: true,
15664
+ code: 'success',
15665
+ record: {
15666
+ // we span the existing record to maintain spanning references
15667
+ ...incomingRecord,
15668
+ fields: {
15669
+ ...existingRecord.fields,
15670
+ ...incomingRecord.fields,
15671
+ },
15672
+ links: {
15673
+ ...existingRecord.links,
15674
+ ...incomingRecord.links,
15675
+ },
15676
+ },
15677
+ };
15678
+ }
15679
+ // If Etags do not match and the incoming record does not contain all fields, re-request the record
15680
+ return {
15681
+ ok: false,
15682
+ code: 'conflict-missing-fields',
15683
+ fieldUnion: buildFieldUnionArray(existingRecord, incomingRecord, objectInfo),
15684
+ hasDraft: false,
15685
+ };
15686
+ }
15687
+ }
15688
+
15689
+ const CONFLICT_POOL_SIZE = 5;
15690
+ /**
15691
+ * A pool of workers that resolve conflicts between incoming records and records in the store.
15692
+ */
15693
+ class ConflictPool {
15694
+ constructor(store, objectInfoLoader) {
15695
+ this.store = store;
15696
+ this.objectInfoLoader = objectInfoLoader;
15697
+ this.pool = new AsyncWorkerPool(CONFLICT_POOL_SIZE);
15698
+ }
15699
+ enqueueConflictedRecords(records, abortController) {
15700
+ return this.pool.push({
15701
+ workFn: () => this.resolveConflicts(records, abortController),
15702
+ });
15703
+ }
15704
+ async resolveConflicts(incomingRecords, abortController) {
15705
+ const result = {
15706
+ additionalWork: { type: 'record-fields', records: {} },
15707
+ recordsToWrite: [],
15708
+ resolvedRecords: [],
15709
+ recordsNeedingRefetch: new Map(),
15710
+ errors: [],
15711
+ };
15712
+ const ids = [];
15713
+ const trackedFieldsByType = new Map();
15714
+ const apiNames = new Set();
15715
+ incomingRecords.forEach((record) => {
15716
+ ids.push(record.id);
15717
+ apiNames.add(record.apiName);
15718
+ });
15719
+ const existingRecords = await this.store.readRecords(ids);
15720
+ if (abortController.aborted) {
15721
+ return result;
15722
+ }
15723
+ const objectInfos = await this.objectInfoLoader.getObjectInfos(Array.from(apiNames));
15724
+ if (abortController.aborted) {
15725
+ return result;
15726
+ }
15727
+ const existingRecordsById = new Map(existingRecords.map((record) => [record.record.id, record]));
15728
+ for (const incomingRecord of incomingRecords) {
15729
+ const existingDurableRecordRepresentation = existingRecordsById.get(incomingRecord.id);
15730
+ const objectInfo = objectInfos[incomingRecord.apiName];
15731
+ if (existingDurableRecordRepresentation === undefined) {
15732
+ // this shouldn't happen but if it does, we should write the incoming record since there's nothing to merge
15733
+ result.recordsToWrite.push(incomingRecord);
15734
+ continue;
15735
+ }
15736
+ if (objectInfo === undefined) {
15737
+ // object infos are a prerequisite for priming so if we don't have one, we can't do anything
15738
+ result.errors.push({ id: incomingRecord.id, reason: 'object-info-missing' });
15739
+ continue;
15740
+ }
15741
+ const existingRecord = existingDurableRecordRepresentation.record;
15742
+ const mergedRecordResult = mergeRecord(existingRecord, incomingRecord, objectInfo);
15743
+ if (mergedRecordResult.ok) {
15744
+ if (mergedRecordResult.needsWrite) {
15745
+ result.recordsToWrite.push(mergedRecordResult.record);
15746
+ }
15747
+ else {
15748
+ result.resolvedRecords.push(mergedRecordResult.record.id);
15749
+ }
15750
+ continue;
15751
+ }
15752
+ else {
15753
+ const { code } = mergedRecordResult;
15754
+ const isConflict = code === 'conflict-drafts' ||
15755
+ code === 'conflict-spanning-record' ||
15756
+ code === 'conflict-missing-fields';
15757
+ if (isConflict) {
15758
+ let trackedFields = trackedFieldsByType.get(incomingRecord.apiName);
15759
+ if (trackedFields === undefined) {
15760
+ trackedFields = new Set();
15761
+ trackedFieldsByType.set(incomingRecord.apiName, trackedFields);
15762
+ }
15763
+ mergedRecordResult.fieldUnion.forEach((field) => trackedFields.add(field));
15764
+ if (code === 'conflict-missing-fields') {
15765
+ const additionalWorkForType = result.additionalWork.records[incomingRecord.apiName];
15766
+ if (additionalWorkForType === undefined) {
15767
+ result.additionalWork.records[incomingRecord.apiName] = {
15768
+ ids: [incomingRecord.id],
15769
+ fields: Array.from(trackedFields),
15770
+ };
15771
+ }
15772
+ else {
15773
+ additionalWorkForType.ids.push(incomingRecord.id);
15774
+ additionalWorkForType.fields = Array.from(trackedFields);
15775
+ }
15776
+ }
15777
+ else if (code === 'conflict-drafts' || code === 'conflict-spanning-record') {
15778
+ const recordByType = result.recordsNeedingRefetch.get(incomingRecord.apiName);
15779
+ if (recordByType === undefined) {
15780
+ result.recordsNeedingRefetch.set(incomingRecord.apiName, {
15781
+ ids: [incomingRecord.id],
15782
+ fields: Array.from(trackedFields),
15783
+ });
15784
+ }
15785
+ else {
15786
+ recordByType.ids.push(incomingRecord.id);
15787
+ recordByType.fields = Array.from(trackedFields);
15788
+ }
15789
+ }
15790
+ }
15791
+ }
15792
+ }
15793
+ return result;
15794
+ }
15795
+ }
15796
+
15552
15797
  const DEFAULT_BATCH_SIZE = 500;
15553
15798
  const DEFAULT_CONCURRENCY = 6;
15554
15799
  const DEFAULT_GQL_QUERY_BATCH_SIZE = 5;
@@ -15562,8 +15807,10 @@ class PrimingSession extends EventEmitter {
15562
15807
  this.recordLoader = config.recordLoader;
15563
15808
  this.recordIngestor = config.recordIngestor;
15564
15809
  this.objectInfoLoader = config.objectInfoLoader;
15810
+ this.ldsRecordRefresher = config.ldsRecordRefresher;
15565
15811
  this.networkWorkerPool = new AsyncWorkerPool(this.concurrency);
15566
15812
  this.useBatchGQL = ldsPrimingGraphqlBatch.isOpen({ fallback: false });
15813
+ this.conflictPool = new ConflictPool(config.store, this.objectInfoLoader);
15567
15814
  }
15568
15815
  // function that enqueues priming work
15569
15816
  async enqueue(work) {
@@ -15688,7 +15935,9 @@ class PrimingSession extends EventEmitter {
15688
15935
  const { records } = result;
15689
15936
  const beforeWrite = Date.now();
15690
15937
  // dispatch the write but DO NOT wait on it to unblock the network pool
15691
- this.recordIngestor.insertRecords(records).then(({ written, conflicted, errors }) => {
15938
+ this.recordIngestor
15939
+ .insertRecords(records, false)
15940
+ .then(({ written, conflicted, errors }) => {
15692
15941
  this.emit('batch-written', {
15693
15942
  written,
15694
15943
  conflicted,
@@ -15711,16 +15960,69 @@ class PrimingSession extends EventEmitter {
15711
15960
  if (written.length > 0) {
15712
15961
  this.emit('primed', Array.from(written));
15713
15962
  }
15714
- // TODO [W-12436213]: implement conflict resolution
15963
+ // if any records could not be written to the store because there were conflicts, handle the conflicts
15964
+ if (conflicted.length > 0) {
15965
+ this.handleWriteConflicts(records, conflicted, abortController);
15966
+ }
15967
+ });
15968
+ }
15969
+ async handleWriteConflicts(records, conflicted, abortController) {
15970
+ const result = await this.conflictPool.enqueueConflictedRecords(records.filter((x) => conflicted.includes(x.id)), abortController);
15971
+ if (abortController.aborted) {
15972
+ return;
15973
+ }
15974
+ if (Object.keys(result.additionalWork.records).length > 0) {
15975
+ this.emit('conflict', {
15976
+ ids: Object.values(result.additionalWork.records).flatMap((record) => record.ids),
15977
+ resolution: 'priming-refresh',
15978
+ });
15979
+ this.enqueue(result.additionalWork);
15980
+ }
15981
+ if (result.resolvedRecords.length > 0) {
15982
+ this.emit('conflict', {
15983
+ ids: result.resolvedRecords,
15984
+ resolution: 'priming-merge',
15985
+ });
15986
+ this.emit('primed', result.resolvedRecords);
15987
+ }
15988
+ if (result.recordsToWrite.length > 0) {
15989
+ const { written, errors, conflicted } = await this.recordIngestor.insertRecords(result.recordsToWrite, true);
15990
+ if (written.length > 0) {
15991
+ const ids = Array.from(written);
15992
+ this.emit('conflict', { ids, resolution: 'priming-merge' });
15993
+ this.emit('primed', ids);
15994
+ }
15995
+ if (errors.length > 0) {
15996
+ errors.forEach(({ ids, message }) => {
15997
+ this.emit('error', {
15998
+ ids,
15999
+ code: 'unknown',
16000
+ message: message,
16001
+ });
16002
+ });
16003
+ }
15715
16004
  if (conflicted.length > 0) {
15716
- // for now emit conlicts as errors
15717
16005
  this.emit('error', {
15718
- ids: Array.from(conflicted),
16006
+ ids: conflicted,
15719
16007
  code: 'unknown',
15720
- message: 'conflict when persisting record',
16008
+ message: 'unexpected write conflict',
15721
16009
  });
15722
16010
  }
15723
- });
16011
+ }
16012
+ if (result.recordsNeedingRefetch.size > 0) {
16013
+ const { loaded, errored } = await this.ldsRecordRefresher.loadRecords(result.recordsNeedingRefetch);
16014
+ if (loaded.length > 0) {
16015
+ this.emit('conflict', { resolution: 'lds-refresh', ids: loaded });
16016
+ this.emit('primed', loaded);
16017
+ }
16018
+ if (errored.length > 0) {
16019
+ this.emit('error', {
16020
+ ids: errored,
16021
+ code: 'unknown',
16022
+ message: `could not resolve conflicts`,
16023
+ });
16024
+ }
16025
+ }
15724
16026
  }
15725
16027
  async fetchMetadata(batches) {
15726
16028
  const apiNames = Array.from(batches.reduce((acc, x) => {
@@ -15961,6 +16263,9 @@ function instrumentPrimingSession(session) {
15961
16263
  session.on('primed', ({ length }) => {
15962
16264
  reportPrimingSuccess(length);
15963
16265
  });
16266
+ session.on('conflict', ({ ids, resolution }) => {
16267
+ reportPrimingConflict(resolution, ids.length);
16268
+ });
15964
16269
  return session;
15965
16270
  }
15966
16271
 
@@ -16142,19 +16447,72 @@ function batchArray(arr, batchSize = BATCH_SIZE) {
16142
16447
  return batches;
16143
16448
  }
16144
16449
 
16450
+ /**
16451
+ * A polyfill for Promise.allSettled
16452
+ * @param promises An array of promises
16453
+ * @returns the result of all the promises when they either fulfill or reject
16454
+ */
16455
+ function allSettled(promises) {
16456
+ let wrappedPromises = promises.map((p) => Promise.resolve(p).then((value) => ({ status: 'fulfilled', value }), (reason) => ({ status: 'rejected', reason })));
16457
+ return Promise.all(wrappedPromises);
16458
+ }
16459
+
16460
+ class LdsPrimingRecordRefresher {
16461
+ constructor(getRecordsAdapter) {
16462
+ this.getRecordsAdapter = getRecordsAdapter;
16463
+ }
16464
+ async loadRecords(records) {
16465
+ const requestedRecords = new Set();
16466
+ const promises = Array.from(records).flatMap(([_apiName, value]) => {
16467
+ value.ids.forEach((id) => requestedRecords.add(id));
16468
+ return Promise.resolve(this.getRecordsAdapter({
16469
+ records: [
16470
+ {
16471
+ recordIds: value.ids,
16472
+ optionalFields: value.fields.map((f) => `${_apiName}.${f}`),
16473
+ },
16474
+ ],
16475
+ }));
16476
+ });
16477
+ const promiseResults = await allSettled(promises);
16478
+ const loaded = [];
16479
+ promiseResults.forEach((promiseResult) => {
16480
+ if (promiseResult.status === 'fulfilled') {
16481
+ const batchResultRepresenatation = promiseResult.value;
16482
+ if (batchResultRepresenatation &&
16483
+ batchResultRepresenatation.state === 'Fulfilled') {
16484
+ batchResultRepresenatation.data.results.forEach((result) => {
16485
+ if (result.statusCode === 200) {
16486
+ const id = result.result.id;
16487
+ loaded.push(id);
16488
+ requestedRecords.delete(id);
16489
+ }
16490
+ });
16491
+ }
16492
+ }
16493
+ });
16494
+ // errored contains all the requestedRecords that weren't loaded
16495
+ const errored = Array.from(requestedRecords);
16496
+ return { loaded, errored };
16497
+ }
16498
+ }
16499
+
16145
16500
  function primingSessionFactory(config) {
16146
16501
  const { store, objectInfoService, getLuvio } = config;
16147
16502
  const networkAdapter = new NimbusPrimingNetworkAdapter();
16148
16503
  const recordLoader = new RecordLoaderGraphQL(networkAdapter);
16149
- const recordIngestor = new RecordIngestor(new SqlitePrimingStore(getLuvio, store), getLuvio);
16504
+ const primingStore = new SqlitePrimingStore(getLuvio, store);
16505
+ const recordIngestor = new RecordIngestor(primingStore, getLuvio);
16150
16506
  const session = new PrimingSession({
16151
16507
  recordLoader,
16152
16508
  recordIngestor,
16509
+ store: primingStore,
16153
16510
  objectInfoLoader: {
16154
16511
  getObjectInfos: objectInfoService.getObjectInfos.bind(objectInfoService),
16155
16512
  },
16156
16513
  concurrency: config.concurrency,
16157
16514
  batchSize: config.batchSize,
16515
+ ldsRecordRefresher: new LdsPrimingRecordRefresher(config.getRecords),
16158
16516
  });
16159
16517
  return instrumentPrimingSession(session);
16160
16518
  }
@@ -16168,6 +16526,7 @@ let lazyEnvironment;
16168
16526
  let lazyBaseDurableStore;
16169
16527
  let lazyNetworkAdapter;
16170
16528
  let lazyObjectInfoService;
16529
+ let lazyGetRecords;
16171
16530
  /**
16172
16531
  * This returns the LDS on Mobile Runtime singleton object.
16173
16532
  */
@@ -16238,6 +16597,7 @@ function getRuntime() {
16238
16597
  lazyLuvio = new Luvio(lazyEnvironment, {
16239
16598
  instrument: instrumentLuvio,
16240
16599
  });
16600
+ lazyGetRecords = getRecordsAdapterFactory(lazyLuvio);
16241
16601
  // Currently instruments store runtime perf
16242
16602
  setupMobileInstrumentation(lazyLuvio, store);
16243
16603
  // If the inspection nimbus plugin is configured, inspection is enabled otherwise this is a no-op
@@ -16292,6 +16652,7 @@ function getRuntime() {
16292
16652
  getLuvio: () => lazyLuvio,
16293
16653
  concurrency: config.concurrency,
16294
16654
  batchSize: config.batchSize,
16655
+ getRecords: lazyGetRecords,
16295
16656
  });
16296
16657
  },
16297
16658
  };
@@ -16310,4 +16671,4 @@ register({
16310
16671
  });
16311
16672
 
16312
16673
  export { getRuntime, registerReportObserver, reportGraphqlQueryParseError };
16313
- // version: 1.208.1-8f4c4550e
16674
+ // version: 1.210.0-b2655462f
@@ -14,3 +14,4 @@ export declare function reportDraftAwareContentVersionSynthesizeCalls(mimeType:
14
14
  export declare function reportPrimingSessionCreated(): void;
15
15
  export declare function reportPrimingError(errorType: string, recordCount: number): void;
16
16
  export declare function reportPrimingSuccess(recordCount: number): void;
17
+ export declare function reportPrimingConflict(resolutionType: string, recordCount: number): void;
@@ -1,12 +1,14 @@
1
1
  import { PrimingSession } from '@salesforce/lds-priming';
2
- import type { Luvio } from '@luvio/engine';
2
+ import type { Adapter, Luvio } from '@luvio/engine';
3
3
  import type { NimbusSqliteStore } from '@salesforce/lds-store-nimbus';
4
4
  import type { ObjectInfoService } from '../main';
5
+ import type { BatchRepresentation, GetRecordsConfig } from '@salesforce/lds-adapters-uiapi';
5
6
  export interface PrimingSessionFactoryConfig {
6
7
  store: NimbusSqliteStore;
7
8
  objectInfoService: ObjectInfoService;
8
9
  getLuvio: () => Luvio;
9
10
  concurrency?: number;
10
11
  batchSize?: number;
12
+ getRecords: Adapter<GetRecordsConfig, BatchRepresentation>;
11
13
  }
12
14
  export declare function primingSessionFactory(config: PrimingSessionFactoryConfig): PrimingSession;