@salesforce/lds-runtime-mobile 1.311.1 → 1.313.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/main.js +264 -3461
- package/dist/types/priming/NimbusPrimingNetworkAdapter.d.ts +1 -1
- package/package.json +17 -18
- package/sfdc/main.js +264 -3461
- package/sfdc/types/priming/NimbusPrimingNetworkAdapter.d.ts +1 -1
package/dist/main.js
CHANGED
|
@@ -17,12 +17,11 @@
|
|
|
17
17
|
*/
|
|
18
18
|
import { withRegistration, register } from '@salesforce/lds-default-luvio';
|
|
19
19
|
import { setupInstrumentation, instrumentAdapter as instrumentAdapter$1, instrumentLuvio, setLdsAdaptersUiapiInstrumentation, setLdsNetworkAdapterInstrumentation } from '@salesforce/lds-instrumentation';
|
|
20
|
-
import { HttpStatusCode, setBypassDeepFreeze, StoreKeySet, serializeStructuredKey, StringKeyInMemoryStore, Reader, deepFreeze, emitAdapterEvent,
|
|
21
|
-
import
|
|
22
|
-
import { parseAndVisit, Kind as Kind$1, buildSchema, isObjectType, defaultFieldResolver, visit, execute, parse as parse$7, extendSchema, isScalarType } from '@luvio/graphql-parser';
|
|
23
|
-
import { RECORD_ID_PREFIX, RECORD_FIELDS_KEY_JUNCTION, RECORD_REPRESENTATION_NAME, extractRecordIdFromStoreKey, keyBuilderQuickActionExecutionRepresentation, ingestQuickActionExecutionRepresentation, keyBuilderContentDocumentCompositeRepresentation, getResponseCacheKeysContentDocumentCompositeRepresentation, keyBuilderFromTypeContentDocumentCompositeRepresentation, ingestContentDocumentCompositeRepresentation, keyBuilderRecord, isStoreKeyRecordViewEntity, getTypeCacheKeysRecord, keyBuilderFromTypeRecordRepresentation, ingestRecord, getRecordId18, getRecordsAdapterFactory, RecordRepresentationRepresentationType, ObjectInfoRepresentationType, getRecordAdapterFactory, getObjectInfoAdapterFactory, getObjectInfosAdapterFactory, getObjectInfoDirectoryAdapterFactory, UiApiNamespace, RecordRepresentationType, RecordRepresentationTTL, RecordRepresentationVersion } from '@salesforce/lds-adapters-uiapi-mobile';
|
|
20
|
+
import { HttpStatusCode, setBypassDeepFreeze, StoreKeySet, serializeStructuredKey, StringKeyInMemoryStore, Reader, deepFreeze, emitAdapterEvent, StoreKeyMap, createCustomAdapterEventEmitter, isFileReference, Environment, Luvio, InMemoryStore } from '@luvio/engine';
|
|
21
|
+
import { RECORD_ID_PREFIX, RECORD_FIELDS_KEY_JUNCTION, RECORD_REPRESENTATION_NAME, extractRecordIdFromStoreKey, keyBuilderQuickActionExecutionRepresentation, ingestQuickActionExecutionRepresentation, keyBuilderContentDocumentCompositeRepresentation, getResponseCacheKeysContentDocumentCompositeRepresentation, keyBuilderFromTypeContentDocumentCompositeRepresentation, ingestContentDocumentCompositeRepresentation, keyBuilderRecord, isStoreKeyRecordViewEntity, getTypeCacheKeysRecord, keyBuilderFromTypeRecordRepresentation, ingestRecord, RecordRepresentationVersion, getRecordId18, getRecordsAdapterFactory, RecordRepresentationRepresentationType, ObjectInfoRepresentationType, getRecordAdapterFactory, getObjectInfoAdapterFactory, getObjectInfosAdapterFactory, getObjectInfoDirectoryAdapterFactory, UiApiNamespace, RecordRepresentationType, RecordRepresentationTTL } from '@salesforce/lds-adapters-uiapi-mobile';
|
|
24
22
|
import ldsIdempotencyWriteDisabled from '@salesforce/gate/lds.idempotencyWriteDisabled';
|
|
25
23
|
import ldsBackdatingEnabled from '@salesforce/gate/lds.backdatingEnabled';
|
|
24
|
+
import { Kind as Kind$1, buildSchema, isObjectType, defaultFieldResolver, visit, execute, parse as parse$7, extendSchema, isScalarType } from '@luvio/graphql-parser';
|
|
26
25
|
import FIRST_DAY_OF_WEEK from '@salesforce/i18n/firstDayOfWeek';
|
|
27
26
|
import caseSensitiveUserId from '@salesforce/user/Id';
|
|
28
27
|
import { idleDetector, getInstrumentation } from 'o11y/client';
|
|
@@ -54,8 +53,8 @@ import { isStoreKeyRecordViewEntity as isStoreKeyRecordViewEntity$1, RECORD_ID_P
|
|
|
54
53
|
|
|
55
54
|
const { parse: parse$6, stringify: stringify$6 } = JSON;
|
|
56
55
|
const { join: join$2, push: push$2, unshift } = Array.prototype;
|
|
57
|
-
const { isArray: isArray$
|
|
58
|
-
const { entries: entries$6, keys: keys$
|
|
56
|
+
const { isArray: isArray$4 } = Array;
|
|
57
|
+
const { entries: entries$6, keys: keys$8 } = Object;
|
|
59
58
|
|
|
60
59
|
const UI_API_BASE_URI = '/services/data/v63.0/ui-api';
|
|
61
60
|
|
|
@@ -120,7 +119,7 @@ function isSpanningRecord$1(fieldValue) {
|
|
|
120
119
|
function mergeRecordFields$1(first, second) {
|
|
121
120
|
const { fields: targetFields } = first;
|
|
122
121
|
const { fields: sourceFields } = second;
|
|
123
|
-
const fieldNames = keys$
|
|
122
|
+
const fieldNames = keys$8(sourceFields);
|
|
124
123
|
for (let i = 0, len = fieldNames.length; i < len; i += 1) {
|
|
125
124
|
const fieldName = fieldNames[i];
|
|
126
125
|
const sourceField = sourceFields[fieldName];
|
|
@@ -279,7 +278,7 @@ const getRecordDispatcher = (req) => {
|
|
|
279
278
|
}
|
|
280
279
|
}
|
|
281
280
|
const recordId = urlParams.recordId;
|
|
282
|
-
const fieldsArray = fields !== undefined && isArray$
|
|
281
|
+
const fieldsArray = fields !== undefined && isArray$4(fields) ? fields : [];
|
|
283
282
|
const optionalFieldsArray = optionalFields !== undefined && Array.isArray(optionalFields)
|
|
284
283
|
? optionalFields
|
|
285
284
|
: [];
|
|
@@ -537,7 +536,7 @@ const RedirectDurableSegment = 'REDIRECT_KEYS';
|
|
|
537
536
|
const MessagingDurableSegment = 'MESSAGING';
|
|
538
537
|
const MessageNotifyStoreUpdateAvailable = 'notifyStoreUpdateAvailable';
|
|
539
538
|
|
|
540
|
-
const { keys: keys$
|
|
539
|
+
const { keys: keys$7, create: create$7, assign: assign$7, freeze: freeze$2 } = Object;
|
|
541
540
|
|
|
542
541
|
//Durable store error instrumentation key
|
|
543
542
|
const DURABLE_STORE_ERROR = 'durable-store-error';
|
|
@@ -587,7 +586,7 @@ function publishDurableStoreEntries(durableRecords, put, publishMetadata) {
|
|
|
587
586
|
if (durableRecords === undefined) {
|
|
588
587
|
return { revivedKeys, hadUnexpectedShape };
|
|
589
588
|
}
|
|
590
|
-
const durableKeys = keys$
|
|
589
|
+
const durableKeys = keys$7(durableRecords);
|
|
591
590
|
if (durableKeys.length === 0) {
|
|
592
591
|
// no records to revive
|
|
593
592
|
return { revivedKeys, hadUnexpectedShape };
|
|
@@ -772,7 +771,7 @@ class DurableTTLStore {
|
|
|
772
771
|
overrides,
|
|
773
772
|
};
|
|
774
773
|
}
|
|
775
|
-
const keys$1 = keys$
|
|
774
|
+
const keys$1 = keys$7(entries);
|
|
776
775
|
for (let i = 0, len = keys$1.length; i < len; i++) {
|
|
777
776
|
const key = keys$1[i];
|
|
778
777
|
const entry = entries[key];
|
|
@@ -801,7 +800,7 @@ function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStor
|
|
|
801
800
|
// TODO: W-8909393 Once metadata is stored in its own segment we need to
|
|
802
801
|
// call setEntries for the visitedIds on default segment and call setEntries
|
|
803
802
|
// on the metadata segment for the refreshedIds
|
|
804
|
-
const keys$1 = keys$
|
|
803
|
+
const keys$1 = keys$7({ ...visitedIds, ...refreshedIds });
|
|
805
804
|
for (let i = 0, len = keys$1.length; i < len; i += 1) {
|
|
806
805
|
const key = keys$1[i];
|
|
807
806
|
const canonicalKey = store.getCanonicalRecordId(key);
|
|
@@ -833,7 +832,7 @@ function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStor
|
|
|
833
832
|
}
|
|
834
833
|
}
|
|
835
834
|
const durableStoreOperations = additionalDurableStoreOperations;
|
|
836
|
-
const recordKeys = keys$
|
|
835
|
+
const recordKeys = keys$7(durableRecords);
|
|
837
836
|
if (recordKeys.length > 0) {
|
|
838
837
|
// publishes with data
|
|
839
838
|
durableStoreOperations.push({
|
|
@@ -842,7 +841,7 @@ function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStor
|
|
|
842
841
|
segment: DefaultDurableSegment,
|
|
843
842
|
});
|
|
844
843
|
}
|
|
845
|
-
const refreshKeys = keys$
|
|
844
|
+
const refreshKeys = keys$7(refreshedDurableRecords);
|
|
846
845
|
if (refreshKeys.length > 0) {
|
|
847
846
|
// publishes with only metadata updates
|
|
848
847
|
durableStoreOperations.push({
|
|
@@ -864,7 +863,7 @@ function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStor
|
|
|
864
863
|
});
|
|
865
864
|
});
|
|
866
865
|
// evicts
|
|
867
|
-
const evictedKeys = keys$
|
|
866
|
+
const evictedKeys = keys$7(evictedRecords);
|
|
868
867
|
if (evictedKeys.length > 0) {
|
|
869
868
|
durableStoreOperations.push({
|
|
870
869
|
type: 'evictEntries',
|
|
@@ -1122,7 +1121,7 @@ function makeDurable(environment, { durableStore, instrumentation, useRevivingSt
|
|
|
1122
1121
|
try {
|
|
1123
1122
|
const entries = await durableStore.getEntries(adapterContextKeysFromDifferentInstance, AdapterContextSegment);
|
|
1124
1123
|
if (entries !== undefined) {
|
|
1125
|
-
const entryKeys = keys$
|
|
1124
|
+
const entryKeys = keys$7(entries);
|
|
1126
1125
|
for (let i = 0, len = entryKeys.length; i < len; i++) {
|
|
1127
1126
|
const entryKey = entryKeys[i];
|
|
1128
1127
|
const entry = entries[entryKey];
|
|
@@ -1157,7 +1156,7 @@ function makeDurable(environment, { durableStore, instrumentation, useRevivingSt
|
|
|
1157
1156
|
if (filteredKeys.length > 0) {
|
|
1158
1157
|
const entries = await durableStore.getMetadata(filteredKeys, DefaultDurableSegment);
|
|
1159
1158
|
if (entries !== undefined) {
|
|
1160
|
-
const entryKeys = keys$
|
|
1159
|
+
const entryKeys = keys$7(entries);
|
|
1161
1160
|
for (let i = 0, len = entryKeys.length; i < len; i++) {
|
|
1162
1161
|
const entryKey = entryKeys[i];
|
|
1163
1162
|
const { metadata } = entries[entryKey];
|
|
@@ -1529,7 +1528,7 @@ function makeDurable(environment, { durableStore, instrumentation, useRevivingSt
|
|
|
1529
1528
|
validateNotDisposed();
|
|
1530
1529
|
const entryKeys = keys$1.map(serializeStructuredKey);
|
|
1531
1530
|
const entries = await durableStore.getEntries(entryKeys, DefaultDurableSegment);
|
|
1532
|
-
if (entries === undefined || keys$
|
|
1531
|
+
if (entries === undefined || keys$7(entries).length === 0) {
|
|
1533
1532
|
return environment.notifyStoreUpdateAvailable(keys$1);
|
|
1534
1533
|
}
|
|
1535
1534
|
const now = Date.now();
|
|
@@ -1541,3369 +1540,101 @@ function makeDurable(environment, { durableStore, instrumentation, useRevivingSt
|
|
|
1541
1540
|
const storeEntry = entry;
|
|
1542
1541
|
if (storeEntry.metadata !== undefined) {
|
|
1543
1542
|
storeEntry.metadata = {
|
|
1544
|
-
...storeEntry.metadata,
|
|
1545
|
-
expirationTimestamp: now,
|
|
1546
|
-
};
|
|
1547
|
-
}
|
|
1548
|
-
needWriteBack = true;
|
|
1549
|
-
}
|
|
1550
|
-
}
|
|
1551
|
-
if (needWriteBack) {
|
|
1552
|
-
await durableStore.setEntries(entries, DefaultDurableSegment);
|
|
1553
|
-
}
|
|
1554
|
-
// push a notifyStoreUpdateAvailable message with entryKeys as data into messaging segment
|
|
1555
|
-
await durableStore.setEntries({ notifyStoreUpdateAvailable: { data: entryKeys } }, MessagingDurableSegment);
|
|
1556
|
-
return Promise.resolve(undefined);
|
|
1557
|
-
};
|
|
1558
|
-
const reviveSnapshotWrapper = function (unavailableSnapshot, buildL1Snapshot) {
|
|
1559
|
-
let revivingStore = undefined;
|
|
1560
|
-
if (useRevivingStore) {
|
|
1561
|
-
// NOTE: `store` is private, there doesn't seem to be a better,
|
|
1562
|
-
// cleaner way of accessing it from a derived environment.
|
|
1563
|
-
let baseStore = environment.store;
|
|
1564
|
-
// If we're rebuilding during an ingest, the existing staging store should be the base store.
|
|
1565
|
-
if (stagingStore) {
|
|
1566
|
-
baseStore = stagingStore;
|
|
1567
|
-
}
|
|
1568
|
-
let revivingStore = buildRevivingStagingStore(baseStore);
|
|
1569
|
-
revivingStores.add(revivingStore);
|
|
1570
|
-
}
|
|
1571
|
-
return reviveSnapshot(environment, durableStore, unavailableSnapshot, durableStoreErrorHandler, () => {
|
|
1572
|
-
const tempStore = stagingStore;
|
|
1573
|
-
const result = buildL1Snapshot();
|
|
1574
|
-
stagingStore = tempStore;
|
|
1575
|
-
return result;
|
|
1576
|
-
}, revivingStore).finally(() => {
|
|
1577
|
-
});
|
|
1578
|
-
};
|
|
1579
|
-
const expirePossibleStaleRecords = async function (keys$1, config, refresh) {
|
|
1580
|
-
validateNotDisposed();
|
|
1581
|
-
const metadataKeys = keys$1.map(serializeStructuredKey);
|
|
1582
|
-
const now = Date.now();
|
|
1583
|
-
const entries = await durableStore.getMetadata(metadataKeys, DefaultDurableSegment);
|
|
1584
|
-
if (entries === undefined || keys$8(entries).length === 0) {
|
|
1585
|
-
return environment.expirePossibleStaleRecords(keys$1);
|
|
1586
|
-
}
|
|
1587
|
-
let metaDataChanged = false;
|
|
1588
|
-
const metadataEntries = metadataKeys.reduce((accu, key) => {
|
|
1589
|
-
const metadataEntry = entries[key];
|
|
1590
|
-
if (metadataEntry.metadata !== undefined) {
|
|
1591
|
-
const metadata = { ...metadataEntry.metadata, expirationTimestamp: now };
|
|
1592
|
-
accu[key] = { metadata };
|
|
1593
|
-
metaDataChanged = true;
|
|
1594
|
-
}
|
|
1595
|
-
return accu;
|
|
1596
|
-
}, {});
|
|
1597
|
-
if (metaDataChanged) {
|
|
1598
|
-
await durableStore.setMetadata(metadataEntries, DefaultDurableSegment);
|
|
1599
|
-
}
|
|
1600
|
-
if (config !== undefined && refresh !== undefined) {
|
|
1601
|
-
return environment.refreshPossibleStaleRecords(config, refresh);
|
|
1602
|
-
}
|
|
1603
|
-
return Promise.resolve();
|
|
1604
|
-
};
|
|
1605
|
-
// set the default cache policy of the base environment
|
|
1606
|
-
environment.setDefaultCachePolicy({
|
|
1607
|
-
type: 'stale-while-revalidate',
|
|
1608
|
-
staleDurationSeconds: Number.MAX_SAFE_INTEGER,
|
|
1609
|
-
});
|
|
1610
|
-
return create$7(environment, {
|
|
1611
|
-
publishStoreMetadata: { value: publishStoreMetadata },
|
|
1612
|
-
storeIngest: { value: storeIngest },
|
|
1613
|
-
storeIngestError: { value: storeIngestError },
|
|
1614
|
-
storeBroadcast: { value: storeBroadcast },
|
|
1615
|
-
storeLookup: { value: storeLookup },
|
|
1616
|
-
storeEvict: { value: storeEvict },
|
|
1617
|
-
wrapNormalizedGraphNode: { value: wrapNormalizedGraphNode },
|
|
1618
|
-
getNode: { value: getNode },
|
|
1619
|
-
rebuildSnapshot: { value: rebuildSnapshot },
|
|
1620
|
-
withContext: { value: withContext },
|
|
1621
|
-
storeSetTTLOverride: { value: storeSetTTLOverride },
|
|
1622
|
-
storeSetDefaultTTLOverride: { value: storeSetDefaultTTLOverride },
|
|
1623
|
-
storePublish: { value: storePublish },
|
|
1624
|
-
storeRedirect: { value: storeRedirect },
|
|
1625
|
-
dispose: { value: dispose },
|
|
1626
|
-
publishChangesToDurableStore: { value: publishChangesToDurableStore },
|
|
1627
|
-
getDurableTTLOverrides: { value: getDurableTTLOverrides },
|
|
1628
|
-
dispatchResourceRequest: { value: dispatchResourceRequest },
|
|
1629
|
-
applyCachePolicy: { value: applyCachePolicy },
|
|
1630
|
-
getIngestStagingStoreRecords: { value: getIngestStagingStoreRecords },
|
|
1631
|
-
getIngestStagingStoreMetadata: { value: getIngestStagingStoreMetadata },
|
|
1632
|
-
getIngestStagingStore: { value: getIngestStagingStore },
|
|
1633
|
-
handleSuccessResponse: { value: handleSuccessResponse },
|
|
1634
|
-
handleErrorResponse: { value: handleErrorResponse },
|
|
1635
|
-
getNotifyChangeStoreEntries: { value: getNotifyChangeStoreEntries },
|
|
1636
|
-
notifyStoreUpdateAvailable: { value: notifyStoreUpdateAvailable },
|
|
1637
|
-
expirePossibleStaleRecords: { value: expirePossibleStaleRecords },
|
|
1638
|
-
});
|
|
1639
|
-
}
|
|
1640
|
-
|
|
1641
|
-
/**
|
|
1642
|
-
* Copyright (c) 2022, Salesforce, Inc.,
|
|
1643
|
-
* All rights reserved.
|
|
1644
|
-
* For full license text, see the LICENSE.txt file
|
|
1645
|
-
*/
|
|
1646
|
-
|
|
1647
|
-
|
|
1648
|
-
function isStoreKeyRecordId$1(key) {
|
|
1649
|
-
return key.indexOf(RECORD_ID_PREFIX) > -1 && key.indexOf(RECORD_FIELDS_KEY_JUNCTION) === -1;
|
|
1650
|
-
}
|
|
1651
|
-
function objectsDeepEqual(lhs, rhs) {
|
|
1652
|
-
if (lhs === rhs)
|
|
1653
|
-
return true;
|
|
1654
|
-
if (typeof lhs !== 'object' || typeof rhs !== 'object' || lhs === null || rhs === null)
|
|
1655
|
-
return false;
|
|
1656
|
-
const lhsKeys = Object.keys(lhs);
|
|
1657
|
-
const rhsKeys = Object.keys(rhs);
|
|
1658
|
-
if (lhsKeys.length !== rhsKeys.length)
|
|
1659
|
-
return false;
|
|
1660
|
-
for (let key of lhsKeys) {
|
|
1661
|
-
if (!rhsKeys.includes(key))
|
|
1662
|
-
return false;
|
|
1663
|
-
if (typeof lhs[key] === 'function' || typeof rhs[key] === 'function') {
|
|
1664
|
-
if (lhs[key].toString() !== rhs[key].toString())
|
|
1665
|
-
return false;
|
|
1666
|
-
}
|
|
1667
|
-
else {
|
|
1668
|
-
if (!objectsDeepEqual(lhs[key], rhs[key]))
|
|
1669
|
-
return false;
|
|
1670
|
-
}
|
|
1671
|
-
}
|
|
1672
|
-
return true;
|
|
1673
|
-
}
|
|
1674
|
-
|
|
1675
|
-
/**
|
|
1676
|
-
* Copyright (c) 2022, Salesforce, Inc.,
|
|
1677
|
-
* All rights reserved.
|
|
1678
|
-
* For full license text, see the LICENSE.txt file
|
|
1679
|
-
*/
|
|
1680
|
-
|
|
1681
|
-
|
|
1682
|
-
const GRAPHQL_ROOT_KEY$1 = `GraphQL::graphql`;
|
|
1683
|
-
function findIds(json) {
|
|
1684
|
-
const entries = Object.entries(json);
|
|
1685
|
-
let ids = [];
|
|
1686
|
-
for (let index = 0; index < entries.length; index++) {
|
|
1687
|
-
const entry = entries[index];
|
|
1688
|
-
const key = entry[0];
|
|
1689
|
-
const value = entry[1];
|
|
1690
|
-
if (typeof value === 'object' && value !== null) {
|
|
1691
|
-
const childIds = findIds(value);
|
|
1692
|
-
ids.push(...childIds);
|
|
1693
|
-
}
|
|
1694
|
-
else if (key === 'Id' && typeof value === 'string') {
|
|
1695
|
-
ids.push(value);
|
|
1696
|
-
}
|
|
1697
|
-
}
|
|
1698
|
-
return ids;
|
|
1699
|
-
}
|
|
1700
|
-
function idWithPrefix(id) {
|
|
1701
|
-
return `UiApi::RecordRepresentation:${id}`;
|
|
1702
|
-
}
|
|
1703
|
-
function createSeenRecords$1(json) {
|
|
1704
|
-
const ids = findIds(json)
|
|
1705
|
-
.map(idWithPrefix)
|
|
1706
|
-
.reduce((acc, curr) => (acc.add(curr), acc), new StoreKeySet());
|
|
1707
|
-
return ids;
|
|
1708
|
-
}
|
|
1709
|
-
function createStoreEvalSnapshot(data, seenRecords, rebuildWithStoreEval, recordId) {
|
|
1710
|
-
return {
|
|
1711
|
-
recordId,
|
|
1712
|
-
variables: {},
|
|
1713
|
-
seenRecords,
|
|
1714
|
-
select: {
|
|
1715
|
-
recordId: GRAPHQL_ROOT_KEY$1,
|
|
1716
|
-
variables: {},
|
|
1717
|
-
node: {
|
|
1718
|
-
kind: 'Fragment',
|
|
1719
|
-
private: [],
|
|
1720
|
-
},
|
|
1721
|
-
},
|
|
1722
|
-
state: 'Fulfilled',
|
|
1723
|
-
data,
|
|
1724
|
-
rebuildWithStoreEval,
|
|
1725
|
-
};
|
|
1726
|
-
}
|
|
1727
|
-
|
|
1728
|
-
var PredicateType$1;
|
|
1729
|
-
(function (PredicateType) {
|
|
1730
|
-
PredicateType["compound"] = "compound";
|
|
1731
|
-
PredicateType["comparison"] = "comparison";
|
|
1732
|
-
PredicateType["not"] = "not";
|
|
1733
|
-
PredicateType["nullComparison"] = "nullComparison";
|
|
1734
|
-
PredicateType["recordRepresentation"] = "recordRepresentation";
|
|
1735
|
-
PredicateType["exists"] = "exists";
|
|
1736
|
-
PredicateType["between"] = "between";
|
|
1737
|
-
PredicateType["dateFunction"] = "dateFunction";
|
|
1738
|
-
})(PredicateType$1 || (PredicateType$1 = {}));
|
|
1739
|
-
var DateFunction;
|
|
1740
|
-
(function (DateFunction) {
|
|
1741
|
-
DateFunction["dayOfMonth"] = "DAY_OF_MONTH";
|
|
1742
|
-
})(DateFunction || (DateFunction = {}));
|
|
1743
|
-
var CompoundOperator;
|
|
1744
|
-
(function (CompoundOperator) {
|
|
1745
|
-
CompoundOperator["and"] = "and";
|
|
1746
|
-
CompoundOperator["or"] = "or";
|
|
1747
|
-
})(CompoundOperator || (CompoundOperator = {}));
|
|
1748
|
-
var NullComparisonOperator;
|
|
1749
|
-
(function (NullComparisonOperator) {
|
|
1750
|
-
NullComparisonOperator["is"] = "is";
|
|
1751
|
-
NullComparisonOperator["isNot"] = "isNot";
|
|
1752
|
-
})(NullComparisonOperator || (NullComparisonOperator = {}));
|
|
1753
|
-
var ComparisonOperator;
|
|
1754
|
-
(function (ComparisonOperator) {
|
|
1755
|
-
ComparisonOperator["eq"] = "eq";
|
|
1756
|
-
ComparisonOperator["ne"] = "ne";
|
|
1757
|
-
ComparisonOperator["like"] = "like";
|
|
1758
|
-
ComparisonOperator["lt"] = "lt";
|
|
1759
|
-
ComparisonOperator["gt"] = "gt";
|
|
1760
|
-
ComparisonOperator["lte"] = "lte";
|
|
1761
|
-
ComparisonOperator["gte"] = "gte";
|
|
1762
|
-
ComparisonOperator["in"] = "in";
|
|
1763
|
-
ComparisonOperator["nin"] = "nin";
|
|
1764
|
-
ComparisonOperator["excludes"] = "excludes";
|
|
1765
|
-
ComparisonOperator["includes"] = "includes";
|
|
1766
|
-
})(ComparisonOperator || (ComparisonOperator = {}));
|
|
1767
|
-
var DateEnumType;
|
|
1768
|
-
(function (DateEnumType) {
|
|
1769
|
-
DateEnumType[DateEnumType["yesterday"] = 0] = "yesterday";
|
|
1770
|
-
DateEnumType[DateEnumType["today"] = 1] = "today";
|
|
1771
|
-
DateEnumType[DateEnumType["tomorrow"] = 2] = "tomorrow";
|
|
1772
|
-
DateEnumType[DateEnumType["last_week"] = 3] = "last_week";
|
|
1773
|
-
DateEnumType[DateEnumType["this_week"] = 4] = "this_week";
|
|
1774
|
-
DateEnumType[DateEnumType["next_week"] = 5] = "next_week";
|
|
1775
|
-
DateEnumType[DateEnumType["last_month"] = 6] = "last_month";
|
|
1776
|
-
DateEnumType[DateEnumType["this_month"] = 7] = "this_month";
|
|
1777
|
-
DateEnumType[DateEnumType["next_month"] = 8] = "next_month";
|
|
1778
|
-
DateEnumType[DateEnumType["last_quarter"] = 9] = "last_quarter";
|
|
1779
|
-
DateEnumType[DateEnumType["this_quarter"] = 10] = "this_quarter";
|
|
1780
|
-
DateEnumType[DateEnumType["next_quarter"] = 11] = "next_quarter";
|
|
1781
|
-
DateEnumType[DateEnumType["last_90_days"] = 12] = "last_90_days";
|
|
1782
|
-
DateEnumType[DateEnumType["next_90_days"] = 13] = "next_90_days";
|
|
1783
|
-
DateEnumType[DateEnumType["last_year"] = 14] = "last_year";
|
|
1784
|
-
DateEnumType[DateEnumType["this_year"] = 15] = "this_year";
|
|
1785
|
-
DateEnumType[DateEnumType["next_year"] = 16] = "next_year";
|
|
1786
|
-
})(DateEnumType || (DateEnumType = {}));
|
|
1787
|
-
var DateRangeEnumType;
|
|
1788
|
-
(function (DateRangeEnumType) {
|
|
1789
|
-
DateRangeEnumType[DateRangeEnumType["last_n_months"] = 0] = "last_n_months";
|
|
1790
|
-
DateRangeEnumType[DateRangeEnumType["last_n_days"] = 1] = "last_n_days";
|
|
1791
|
-
})(DateRangeEnumType || (DateRangeEnumType = {}));
|
|
1792
|
-
var FieldType;
|
|
1793
|
-
(function (FieldType) {
|
|
1794
|
-
FieldType["Child"] = "ChildField";
|
|
1795
|
-
FieldType["Scalar"] = "ScalarField";
|
|
1796
|
-
FieldType["Spanning"] = "SpanningField";
|
|
1797
|
-
FieldType["Null"] = "NullField";
|
|
1798
|
-
})(FieldType || (FieldType = {}));
|
|
1799
|
-
var ValueType;
|
|
1800
|
-
(function (ValueType) {
|
|
1801
|
-
ValueType["Extract"] = "JsonExtract";
|
|
1802
|
-
ValueType["BooleanLiteral"] = "BooleanLiteral";
|
|
1803
|
-
ValueType["DoubleLiteral"] = "DoubleLiteral";
|
|
1804
|
-
ValueType["IntLiteral"] = "IntLiteral";
|
|
1805
|
-
ValueType["Identifier"] = "Identifier";
|
|
1806
|
-
ValueType["StringLiteral"] = "StringLiteral";
|
|
1807
|
-
ValueType["StringArray"] = "StringArray";
|
|
1808
|
-
ValueType["NumberArray"] = "NumberArray";
|
|
1809
|
-
ValueType["DateEnum"] = "DateEnum";
|
|
1810
|
-
ValueType["DateValue"] = "DateValue";
|
|
1811
|
-
ValueType["DateArray"] = "DateArray";
|
|
1812
|
-
ValueType["DateRange"] = "DateRange";
|
|
1813
|
-
ValueType["DateTimeEnum"] = "DateTimeEnum";
|
|
1814
|
-
ValueType["DateTimeValue"] = "DateTimeValue";
|
|
1815
|
-
ValueType["DateTimeArray"] = "DateTimeArray";
|
|
1816
|
-
ValueType["DateTimeRange"] = "DateTimeRange";
|
|
1817
|
-
ValueType["RelativeDate"] = "RelativeDate";
|
|
1818
|
-
ValueType["NullValue"] = "NullValue";
|
|
1819
|
-
ValueType["MultiPicklistSet"] = "MultiPicklistSet";
|
|
1820
|
-
})(ValueType || (ValueType = {}));
|
|
1821
|
-
function isCompoundPredicate$1(predicate) {
|
|
1822
|
-
return predicate.type === PredicateType$1.compound;
|
|
1823
|
-
}
|
|
1824
|
-
function isComparisonPredicate(predicate) {
|
|
1825
|
-
return predicate.type === PredicateType$1.comparison;
|
|
1826
|
-
}
|
|
1827
|
-
function isBetweenPredicate(predicate) {
|
|
1828
|
-
return predicate.type === PredicateType$1.between;
|
|
1829
|
-
}
|
|
1830
|
-
function isNullComparisonPredicate(predicate) {
|
|
1831
|
-
return predicate.type === PredicateType$1.nullComparison;
|
|
1832
|
-
}
|
|
1833
|
-
function isNotPredicate$1(predicate) {
|
|
1834
|
-
return predicate.type === PredicateType$1.not;
|
|
1835
|
-
}
|
|
1836
|
-
function isExistsPredicate(predicate) {
|
|
1837
|
-
return predicate.type === PredicateType$1.exists;
|
|
1838
|
-
}
|
|
1839
|
-
function isDateFunctionPredicate(predicate) {
|
|
1840
|
-
return predicate.type === PredicateType$1.dateFunction;
|
|
1841
|
-
}
|
|
1842
|
-
|
|
1843
|
-
function flatten$1(previous, current) {
|
|
1844
|
-
return previous.concat(current);
|
|
1845
|
-
}
|
|
1846
|
-
function flatMap(transform) {
|
|
1847
|
-
return (acc, current) => {
|
|
1848
|
-
const mapped = transform(current);
|
|
1849
|
-
return acc.concat(mapped);
|
|
1850
|
-
};
|
|
1851
|
-
}
|
|
1852
|
-
|
|
1853
|
-
function isExpressionEqual(lh, rh) {
|
|
1854
|
-
if (lh.type === ValueType.StringLiteral && rh.type === ValueType.StringLiteral) {
|
|
1855
|
-
return lh.value === rh.value;
|
|
1856
|
-
}
|
|
1857
|
-
if (lh.type === ValueType.DoubleLiteral && rh.type === ValueType.DoubleLiteral) {
|
|
1858
|
-
return lh.value === rh.value;
|
|
1859
|
-
}
|
|
1860
|
-
if (lh.type === ValueType.IntLiteral && rh.type === ValueType.IntLiteral) {
|
|
1861
|
-
return lh.value === rh.value;
|
|
1862
|
-
}
|
|
1863
|
-
if (lh.type === ValueType.BooleanLiteral && rh.type === ValueType.BooleanLiteral) {
|
|
1864
|
-
return lh.value === rh.value;
|
|
1865
|
-
}
|
|
1866
|
-
if (lh.type === ValueType.StringArray && rh.type === ValueType.StringArray) {
|
|
1867
|
-
return isArrayEqual(lh.value, rh.value, isStrictEqual);
|
|
1868
|
-
}
|
|
1869
|
-
if (lh.type === ValueType.NumberArray && rh.type === ValueType.NumberArray) {
|
|
1870
|
-
return isArrayEqual(lh.value, rh.value, isStrictEqual);
|
|
1871
|
-
}
|
|
1872
|
-
if (lh.type === ValueType.DateValue && rh.type === ValueType.DateValue) {
|
|
1873
|
-
return lh.value === rh.value;
|
|
1874
|
-
}
|
|
1875
|
-
if (lh.type === ValueType.DateEnum && rh.type === ValueType.DateEnum) {
|
|
1876
|
-
return lh.value === rh.value;
|
|
1877
|
-
}
|
|
1878
|
-
if (lh.type === ValueType.DateTimeValue && rh.type === ValueType.DateTimeValue) {
|
|
1879
|
-
return lh.value === rh.value;
|
|
1880
|
-
}
|
|
1881
|
-
if (lh.type === ValueType.DateTimeEnum && rh.type === ValueType.DateTimeEnum) {
|
|
1882
|
-
return lh.value === rh.value;
|
|
1883
|
-
}
|
|
1884
|
-
if (lh.type === ValueType.RelativeDate && rh.type === ValueType.RelativeDate) {
|
|
1885
|
-
return (lh.amount === rh.amount &&
|
|
1886
|
-
lh.hasTime === rh.hasTime &&
|
|
1887
|
-
lh.offset === rh.offset &&
|
|
1888
|
-
lh.unit === rh.unit);
|
|
1889
|
-
}
|
|
1890
|
-
if (lh.type === ValueType.NullValue && rh.type === ValueType.NullValue) {
|
|
1891
|
-
return true;
|
|
1892
|
-
}
|
|
1893
|
-
if (lh.type === ValueType.DateArray && rh.type === ValueType.DateArray) {
|
|
1894
|
-
return isArrayEqual(lh.value, rh.value, isExpressionEqual);
|
|
1895
|
-
}
|
|
1896
|
-
if (lh.type === ValueType.DateTimeArray && rh.type === ValueType.DateTimeArray) {
|
|
1897
|
-
return isArrayEqual(lh.value, rh.value, isExpressionEqual);
|
|
1898
|
-
}
|
|
1899
|
-
if (lh.type === ValueType.Extract && rh.type === ValueType.Extract) {
|
|
1900
|
-
return (lh.field === rh.field && lh.jsonAlias === rh.jsonAlias && lh.subfield === rh.subfield);
|
|
1901
|
-
}
|
|
1902
|
-
return false;
|
|
1903
|
-
}
|
|
1904
|
-
function isStrictEqual(l, r) {
|
|
1905
|
-
return l === r;
|
|
1906
|
-
}
|
|
1907
|
-
function isArrayEqual(lh, rh, compare) {
|
|
1908
|
-
if (lh.length !== rh.length) {
|
|
1909
|
-
return false;
|
|
1910
|
-
}
|
|
1911
|
-
for (let index = 0; index < rh.length; index++) {
|
|
1912
|
-
const r = rh[index];
|
|
1913
|
-
const l = lh[index];
|
|
1914
|
-
if (compare(l, r) === false) {
|
|
1915
|
-
return false;
|
|
1916
|
-
}
|
|
1917
|
-
}
|
|
1918
|
-
return true;
|
|
1919
|
-
}
|
|
1920
|
-
function isPredicateEqual(lh, rh) {
|
|
1921
|
-
if (lh === undefined) {
|
|
1922
|
-
return lh === rh;
|
|
1923
|
-
}
|
|
1924
|
-
if (rh === undefined) {
|
|
1925
|
-
return rh === lh;
|
|
1926
|
-
}
|
|
1927
|
-
const { nullComparison, not, comparison, compound, between } = PredicateType$1;
|
|
1928
|
-
if (rh.type === nullComparison && lh.type === nullComparison) {
|
|
1929
|
-
return rh.operator === lh.operator && isExpressionEqual(rh.left, lh.left);
|
|
1930
|
-
}
|
|
1931
|
-
if (rh.type === not && lh.type === not) {
|
|
1932
|
-
return isPredicateEqual(rh.child, lh.child);
|
|
1933
|
-
}
|
|
1934
|
-
if (rh.type === comparison && lh.type === comparison) {
|
|
1935
|
-
return (rh.operator === lh.operator &&
|
|
1936
|
-
isExpressionEqual(rh.left, lh.left) &&
|
|
1937
|
-
isExpressionEqual(rh.right, lh.right));
|
|
1938
|
-
}
|
|
1939
|
-
if (rh.type === between && lh.type === between) {
|
|
1940
|
-
return (isExpressionEqual(rh.compareDate, lh.compareDate) &&
|
|
1941
|
-
isExpressionEqual(rh.start, lh.start) &&
|
|
1942
|
-
isExpressionEqual(rh.end, lh.end));
|
|
1943
|
-
}
|
|
1944
|
-
if (rh.type === compound && lh.type === compound) {
|
|
1945
|
-
const lChildren = lh.children;
|
|
1946
|
-
const rChildren = rh.children;
|
|
1947
|
-
return isArrayEqual(lChildren, rChildren, isPredicateEqual);
|
|
1948
|
-
}
|
|
1949
|
-
return false;
|
|
1950
|
-
}
|
|
1951
|
-
function containsPredicate(predicates, predicate) {
|
|
1952
|
-
for (let index = 0; index < predicates.length; index++) {
|
|
1953
|
-
const element = predicates[index];
|
|
1954
|
-
if (isPredicateEqual(predicate, element)) {
|
|
1955
|
-
return true;
|
|
1956
|
-
}
|
|
1957
|
-
}
|
|
1958
|
-
return false;
|
|
1959
|
-
}
|
|
1960
|
-
function removeDuplicatePredicates(predicates) {
|
|
1961
|
-
return predicates.reduce(function (acc, b) {
|
|
1962
|
-
if (containsPredicate(acc, b) === false) {
|
|
1963
|
-
acc.push(b);
|
|
1964
|
-
}
|
|
1965
|
-
return acc;
|
|
1966
|
-
}, []);
|
|
1967
|
-
}
|
|
1968
|
-
function isOrderByEqual(lh, rh) {
|
|
1969
|
-
if (lh === undefined) {
|
|
1970
|
-
return lh === rh;
|
|
1971
|
-
}
|
|
1972
|
-
if (rh === undefined) {
|
|
1973
|
-
return lh === rh;
|
|
1974
|
-
}
|
|
1975
|
-
return (lh.asc === rh.asc &&
|
|
1976
|
-
isExpressionEqual(lh.extract, rh.extract) &&
|
|
1977
|
-
lh.nullsFirst === rh.nullsFirst);
|
|
1978
|
-
}
|
|
1979
|
-
function isRecordQueryEqual(lh, rh) {
|
|
1980
|
-
return (lh.alias === rh.alias &&
|
|
1981
|
-
isArrayEqual(lh.fields, rh.fields, isFieldEqual) &&
|
|
1982
|
-
lh.first === rh.first &&
|
|
1983
|
-
isArrayEqual(lh.joins, rh.joins, isStrictEqual) &&
|
|
1984
|
-
isArrayEqual(lh.orderBy, rh.orderBy, isOrderByEqual) &&
|
|
1985
|
-
isPredicateEqual(lh.predicate, rh.predicate));
|
|
1986
|
-
}
|
|
1987
|
-
function isFieldEqual(lh, rh) {
|
|
1988
|
-
if (rh.type === FieldType.Child && lh.type === FieldType.Child) {
|
|
1989
|
-
return isRecordQueryEqual(lh.connection, rh.connection) && lh.path === rh.path;
|
|
1990
|
-
}
|
|
1991
|
-
if (rh.type === FieldType.Scalar && lh.type === FieldType.Scalar) {
|
|
1992
|
-
return isExpressionEqual(lh.extract, rh.extract) && lh.path === rh.path;
|
|
1993
|
-
}
|
|
1994
|
-
return false;
|
|
1995
|
-
}
|
|
1996
|
-
function containsField(fields, field) {
|
|
1997
|
-
for (let index = 0; index < fields.length; index++) {
|
|
1998
|
-
const element = fields[index];
|
|
1999
|
-
if (isFieldEqual(field, element)) {
|
|
2000
|
-
return true;
|
|
2001
|
-
}
|
|
2002
|
-
}
|
|
2003
|
-
return false;
|
|
2004
|
-
}
|
|
2005
|
-
function removeDuplicateFields(fields) {
|
|
2006
|
-
return fields.reduce(function (acc, b) {
|
|
2007
|
-
if (containsField(acc, b) === false) {
|
|
2008
|
-
acc.push(b);
|
|
2009
|
-
}
|
|
2010
|
-
return acc;
|
|
2011
|
-
}, []);
|
|
2012
|
-
}
|
|
2013
|
-
|
|
2014
|
-
function message(message) {
|
|
2015
|
-
return { type: 'MessageError', message };
|
|
2016
|
-
}
|
|
2017
|
-
function missingObjectInfo(object) {
|
|
2018
|
-
return { type: 'MissingObjectInfoError', object };
|
|
2019
|
-
}
|
|
2020
|
-
function concatenatePredicateErrors(errors) {
|
|
2021
|
-
const messages = errors.reduce((accu, error) => {
|
|
2022
|
-
if (error.type === 'MessageError') {
|
|
2023
|
-
accu.push(error.message);
|
|
2024
|
-
}
|
|
2025
|
-
else if (error.type === 'MissingObjectInfoError') {
|
|
2026
|
-
accu.push(`Missing object info for type ${error.object}`);
|
|
2027
|
-
}
|
|
2028
|
-
return accu;
|
|
2029
|
-
}, []);
|
|
2030
|
-
return new Error(messages.join(', '));
|
|
2031
|
-
}
|
|
2032
|
-
|
|
2033
|
-
function success(value) {
|
|
2034
|
-
return {
|
|
2035
|
-
value,
|
|
2036
|
-
isSuccess: true,
|
|
2037
|
-
map: (f) => success(f(value)),
|
|
2038
|
-
flatMap: (f) => f(value),
|
|
2039
|
-
mapError: (_) => success(value),
|
|
2040
|
-
};
|
|
2041
|
-
}
|
|
2042
|
-
function failure(error) {
|
|
2043
|
-
return {
|
|
2044
|
-
error,
|
|
2045
|
-
isSuccess: false,
|
|
2046
|
-
map: (_) => failure(error),
|
|
2047
|
-
flatMap: (_) => failure(error),
|
|
2048
|
-
mapError: (f) => failure(f(error)),
|
|
2049
|
-
};
|
|
2050
|
-
}
|
|
2051
|
-
function isSuccess(result) {
|
|
2052
|
-
return result.isSuccess;
|
|
2053
|
-
}
|
|
2054
|
-
function isFailure(result) {
|
|
2055
|
-
return result.isSuccess === false;
|
|
2056
|
-
}
|
|
2057
|
-
function errors(result) {
|
|
2058
|
-
return result.error;
|
|
2059
|
-
}
|
|
2060
|
-
function values$5(result) {
|
|
2061
|
-
return result.value;
|
|
2062
|
-
}
|
|
2063
|
-
function flattenResults(results) {
|
|
2064
|
-
const fails = results.filter(isFailure).map(errors);
|
|
2065
|
-
if (fails.length > 0) {
|
|
2066
|
-
return failure(fails);
|
|
2067
|
-
}
|
|
2068
|
-
return success(results.filter(isSuccess).map(values$5));
|
|
2069
|
-
}
|
|
2070
|
-
|
|
2071
|
-
function getFieldInfo(apiName, fieldName, infoMap) {
|
|
2072
|
-
const objInfo = infoMap[apiName];
|
|
2073
|
-
if (objInfo === undefined) {
|
|
2074
|
-
return failure(missingObjectInfo(apiName));
|
|
2075
|
-
}
|
|
2076
|
-
// Special casing for WeakEtag which is represented in the GraphQL schema but
|
|
2077
|
-
// has no ObjectInfo representation
|
|
2078
|
-
if (fieldName === 'WeakEtag') {
|
|
2079
|
-
return success({
|
|
2080
|
-
apiName: 'WeakEtag',
|
|
2081
|
-
dataType: 'WeakEtag',
|
|
2082
|
-
});
|
|
2083
|
-
}
|
|
2084
|
-
const fieldInfo = Object.values(objInfo.fields).find((field) => field.apiName === fieldName ||
|
|
2085
|
-
(field.dataType === 'Reference' && field.relationshipName === fieldName));
|
|
2086
|
-
return success(fieldInfo);
|
|
2087
|
-
}
|
|
2088
|
-
function getRelationshipInfo(apiName, fieldName, infoMap) {
|
|
2089
|
-
const objInfo = infoMap[apiName];
|
|
2090
|
-
if (objInfo === undefined) {
|
|
2091
|
-
return failure(missingObjectInfo(apiName));
|
|
2092
|
-
}
|
|
2093
|
-
return success(objInfo.childRelationships.find((relationship) => relationship.relationshipName === fieldName));
|
|
2094
|
-
}
|
|
2095
|
-
function stringLiteral(value, safe = false, isCaseSensitive = false) {
|
|
2096
|
-
return { type: ValueType.StringLiteral, value, safe, isCaseSensitive };
|
|
2097
|
-
}
|
|
2098
|
-
function isStringLiteral(expression) {
|
|
2099
|
-
return expression.type === ValueType.StringLiteral;
|
|
2100
|
-
}
|
|
2101
|
-
function isStringArray(expression) {
|
|
2102
|
-
return expression.type === ValueType.StringArray;
|
|
2103
|
-
}
|
|
2104
|
-
function comparison(left, operator, right) {
|
|
2105
|
-
return { type: PredicateType$1.comparison, left, right, operator };
|
|
2106
|
-
}
|
|
2107
|
-
function compoundOrSelf(children, operator) {
|
|
2108
|
-
if (children.length === 1) {
|
|
2109
|
-
return children[0];
|
|
2110
|
-
}
|
|
2111
|
-
return { type: PredicateType$1.compound, operator, children };
|
|
2112
|
-
}
|
|
2113
|
-
function isEmptyPredicate(predicate) {
|
|
2114
|
-
return isCompoundPredicate$1(predicate) ? predicate.children.length === 0 : false;
|
|
2115
|
-
}
|
|
2116
|
-
/**
|
|
2117
|
-
* Flattens the contents of child predicates of the same type as the new parent compound predicate.
|
|
2118
|
-
* Removes duplicate predicates found within the same compound predicate.
|
|
2119
|
-
*
|
|
2120
|
-
* @param predicates
|
|
2121
|
-
* @param operator
|
|
2122
|
-
* @returns
|
|
2123
|
-
*/
|
|
2124
|
-
function combinePredicates(predicates, operator) {
|
|
2125
|
-
//compound predicates with a different type (and, or) than operator
|
|
2126
|
-
const otherCompoundPredicates = predicates
|
|
2127
|
-
.filter(isCompoundPredicate$1)
|
|
2128
|
-
.filter((pred) => pred.operator !== operator);
|
|
2129
|
-
const flattened = predicates
|
|
2130
|
-
.filter(isCompoundPredicate$1)
|
|
2131
|
-
.filter((pred) => pred.operator === operator)
|
|
2132
|
-
.map((pred) => pred.children)
|
|
2133
|
-
.reduce(flatten$1, []);
|
|
2134
|
-
const compares = predicates.filter((pred) => isComparisonPredicate(pred) ||
|
|
2135
|
-
isNullComparisonPredicate(pred) ||
|
|
2136
|
-
isExistsPredicate(pred) ||
|
|
2137
|
-
isDateFunctionPredicate(pred) ||
|
|
2138
|
-
isBetweenPredicate(pred) ||
|
|
2139
|
-
isNotPredicate$1(pred));
|
|
2140
|
-
const children = [...compares, ...flattened, ...otherCompoundPredicates];
|
|
2141
|
-
const uniques = removeDuplicatePredicates(children);
|
|
2142
|
-
return compoundOrSelf(uniques, operator);
|
|
2143
|
-
}
|
|
2144
|
-
function referencePredicate(fromAlias, toAlias, referenceKey) {
|
|
2145
|
-
return comparison({ type: ValueType.Extract, jsonAlias: fromAlias, field: referenceKey }, ComparisonOperator.eq, {
|
|
2146
|
-
type: ValueType.Extract,
|
|
2147
|
-
jsonAlias: toAlias,
|
|
2148
|
-
field: 'Id',
|
|
2149
|
-
});
|
|
2150
|
-
}
|
|
2151
|
-
function extractPath(fieldName, subfield = undefined) {
|
|
2152
|
-
switch (fieldName) {
|
|
2153
|
-
case 'Id':
|
|
2154
|
-
return 'id';
|
|
2155
|
-
case 'ApiName':
|
|
2156
|
-
return 'apiName';
|
|
2157
|
-
case 'drafts':
|
|
2158
|
-
return 'drafts';
|
|
2159
|
-
case 'RecordTypeId':
|
|
2160
|
-
return 'recordTypeId';
|
|
2161
|
-
case 'WeakEtag':
|
|
2162
|
-
return 'weakEtag';
|
|
2163
|
-
default: {
|
|
2164
|
-
const sub = subfield !== undefined ? subfield : 'value';
|
|
2165
|
-
return `fields.${fieldName}.${sub}`;
|
|
2166
|
-
}
|
|
2167
|
-
}
|
|
2168
|
-
}
|
|
2169
|
-
function removeAllQuotations(s) {
|
|
2170
|
-
return s.replace(/['"]+/g, '');
|
|
2171
|
-
}
|
|
2172
|
-
function removeQuotationsFromKeys(s) {
|
|
2173
|
-
return s.replace(/"([^"]+)":/g, '$1:');
|
|
2174
|
-
}
|
|
2175
|
-
// Turns a single dimensional array into a 2 dimensional array, where
|
|
2176
|
-
// the internal array "chunks" are of length `chunkSize`. The last
|
|
2177
|
-
// chunk will have any remainder and may be less than `chunkSize`
|
|
2178
|
-
function chunkArr(arr, chunkSize) {
|
|
2179
|
-
const res = [];
|
|
2180
|
-
for (let i = 0; i < arr.length; i += chunkSize) {
|
|
2181
|
-
const chunk = arr.slice(i, i + chunkSize);
|
|
2182
|
-
res.push(chunk);
|
|
2183
|
-
}
|
|
2184
|
-
return res;
|
|
2185
|
-
}
|
|
2186
|
-
|
|
2187
|
-
const TODAY_RANGE$1 = {
|
|
2188
|
-
lowerBound: `date('now')`,
|
|
2189
|
-
upperBound: `date('now', '+1 day', '-0.001 seconds')`,
|
|
2190
|
-
};
|
|
2191
|
-
const TOMORROW_RANGE$1 = {
|
|
2192
|
-
lowerBound: `date('now', '+1 day')`,
|
|
2193
|
-
upperBound: `date('now', '+2 day', '-0.001 seconds')`,
|
|
2194
|
-
};
|
|
2195
|
-
const YESTERDAY_RANGE$1 = {
|
|
2196
|
-
lowerBound: `date('now', '-1 day')`,
|
|
2197
|
-
upperBound: `date('now', '-0.001 seconds')`,
|
|
2198
|
-
};
|
|
2199
|
-
const THIS_WEEK_RANGE$1 = {
|
|
2200
|
-
lowerBound: `date('now', 'weekday 0', '-7 days')`,
|
|
2201
|
-
upperBound: `date('now', 'weekday 0', '-1 day')`,
|
|
2202
|
-
};
|
|
2203
|
-
const LAST_WEEK_RANGE$1 = {
|
|
2204
|
-
lowerBound: `date('now', 'weekday 0', '-14 days')`,
|
|
2205
|
-
upperBound: `date('now', 'weekday 0', '-8 days')`,
|
|
2206
|
-
};
|
|
2207
|
-
const NEXT_WEEK_RANGE$1 = {
|
|
2208
|
-
lowerBound: `date('now', 'weekday 0')`,
|
|
2209
|
-
upperBound: `date('now', 'weekday 0', '+6 days')`,
|
|
2210
|
-
};
|
|
2211
|
-
const THIS_MONTH_RANGE$1 = {
|
|
2212
|
-
lowerBound: `date('now', 'start of month')`,
|
|
2213
|
-
upperBound: `date('now', 'start of month', '1 month', '-1 day')`,
|
|
2214
|
-
};
|
|
2215
|
-
const LAST_MONTH_RANGE$1 = {
|
|
2216
|
-
lowerBound: `date('now', 'start of month', '-1 month')`,
|
|
2217
|
-
upperBound: `date('now', 'start of month', '-1 day')`,
|
|
2218
|
-
};
|
|
2219
|
-
const NEXT_MONTH_RANGE$1 = {
|
|
2220
|
-
lowerBound: `date('now', 'start of month', '+1 month')`,
|
|
2221
|
-
// -1 day is the day before the end of the month
|
|
2222
|
-
// this needs to be 0 day
|
|
2223
|
-
upperBound: `date('now', 'start of month', '+2 month', '0 day')`,
|
|
2224
|
-
};
|
|
2225
|
-
const THIS_QUARTER_RANGE$1 = computeQuarterDateRange$1(DateEnumType.this_quarter);
|
|
2226
|
-
const LAST_QUARTER_RANGE$1 = computeQuarterDateRange$1(DateEnumType.last_quarter);
|
|
2227
|
-
const NEXT_QUARTER_RANGE$1 = computeQuarterDateRange$1(DateEnumType.next_quarter);
|
|
2228
|
-
const THIS_YEAR_RANGE$1 = {
|
|
2229
|
-
lowerBound: `date('now', 'start of year')`,
|
|
2230
|
-
upperBound: `date('now', 'start of month', '1 year', '-1 day')`,
|
|
2231
|
-
};
|
|
2232
|
-
const LAST_YEAR_RANGE$1 = {
|
|
2233
|
-
lowerBound: `date('now', 'start of year', '-1 year')`,
|
|
2234
|
-
upperBound: `date('now', 'start of year', '-1 day')`,
|
|
2235
|
-
};
|
|
2236
|
-
const NEXT_YEAR_RANGE$1 = {
|
|
2237
|
-
lowerBound: `date('now', 'start of year', '+1 year')`,
|
|
2238
|
-
upperBound: `date('now', 'start of year', '+2 years', '-1 day')`,
|
|
2239
|
-
};
|
|
2240
|
-
const LAST_90_DAYS_RANGE$1 = {
|
|
2241
|
-
lowerBound: `date('now', '-90 days')`,
|
|
2242
|
-
upperBound: `date('now')`,
|
|
2243
|
-
};
|
|
2244
|
-
const NEXT_90_DAYS_RANGE$1 = {
|
|
2245
|
-
lowerBound: `date('now', '+1 day')`,
|
|
2246
|
-
upperBound: `date('now', '+90 days')`,
|
|
2247
|
-
};
|
|
2248
|
-
const dateRanges = initDateRanges();
|
|
2249
|
-
function initDateRanges() {
|
|
2250
|
-
const dateRanges = {};
|
|
2251
|
-
dateRanges[DateEnumType.today] = TODAY_RANGE$1;
|
|
2252
|
-
dateRanges[DateEnumType.yesterday] = YESTERDAY_RANGE$1;
|
|
2253
|
-
dateRanges[DateEnumType.tomorrow] = TOMORROW_RANGE$1;
|
|
2254
|
-
dateRanges[DateEnumType.this_week] = THIS_WEEK_RANGE$1;
|
|
2255
|
-
dateRanges[DateEnumType.last_week] = LAST_WEEK_RANGE$1;
|
|
2256
|
-
dateRanges[DateEnumType.next_week] = NEXT_WEEK_RANGE$1;
|
|
2257
|
-
dateRanges[DateEnumType.this_month] = THIS_MONTH_RANGE$1;
|
|
2258
|
-
dateRanges[DateEnumType.last_month] = LAST_MONTH_RANGE$1;
|
|
2259
|
-
dateRanges[DateEnumType.next_month] = NEXT_MONTH_RANGE$1;
|
|
2260
|
-
dateRanges[DateEnumType.this_year] = THIS_YEAR_RANGE$1;
|
|
2261
|
-
dateRanges[DateEnumType.last_year] = LAST_YEAR_RANGE$1;
|
|
2262
|
-
dateRanges[DateEnumType.next_year] = NEXT_YEAR_RANGE$1;
|
|
2263
|
-
dateRanges[DateEnumType.this_quarter] = THIS_QUARTER_RANGE$1;
|
|
2264
|
-
dateRanges[DateEnumType.last_quarter] = LAST_QUARTER_RANGE$1;
|
|
2265
|
-
dateRanges[DateEnumType.next_quarter] = NEXT_QUARTER_RANGE$1;
|
|
2266
|
-
dateRanges[DateEnumType.last_90_days] = LAST_90_DAYS_RANGE$1;
|
|
2267
|
-
dateRanges[DateEnumType.next_90_days] = NEXT_90_DAYS_RANGE$1;
|
|
2268
|
-
return dateRanges;
|
|
2269
|
-
}
|
|
2270
|
-
function computeQuarterDateRange$1(enumType) {
|
|
2271
|
-
const curQuarterStart = quarterStart$1(new Date());
|
|
2272
|
-
switch (enumType) {
|
|
2273
|
-
case DateEnumType.last_quarter:
|
|
2274
|
-
return {
|
|
2275
|
-
lowerBound: `date('${curQuarterStart}', '-3 months')`,
|
|
2276
|
-
upperBound: `date('${curQuarterStart}', '-1 day')`,
|
|
2277
|
-
};
|
|
2278
|
-
case DateEnumType.this_quarter:
|
|
2279
|
-
return {
|
|
2280
|
-
lowerBound: `date('${curQuarterStart}')`,
|
|
2281
|
-
// -1 days would set the day before the end of the quarter
|
|
2282
|
-
// 0 days sets it to the end of the month
|
|
2283
|
-
upperBound: `date('${curQuarterStart}', '3 months', '0 days')`,
|
|
2284
|
-
};
|
|
2285
|
-
case DateEnumType.next_quarter:
|
|
2286
|
-
return {
|
|
2287
|
-
lowerBound: `date('${curQuarterStart}', '+3 months')`,
|
|
2288
|
-
// -1 days would set the day before the end of the quarter
|
|
2289
|
-
// 0 days sets it to the end of the month
|
|
2290
|
-
upperBound: `date('${curQuarterStart}', '+6 months', '0 days')`,
|
|
2291
|
-
};
|
|
2292
|
-
}
|
|
2293
|
-
}
|
|
2294
|
-
// querter start date in UTC
|
|
2295
|
-
function quarterStart$1(date) {
|
|
2296
|
-
let month = Math.floor(date.getUTCMonth() / 3) * 3 + 1;
|
|
2297
|
-
const year = date.getUTCFullYear();
|
|
2298
|
-
return month === 10 ? `${year}-${month}-01` : `${year}-0${month}-01`;
|
|
2299
|
-
}
|
|
2300
|
-
function comparisonDateRightExpressionToSql(operator, dateEnum) {
|
|
2301
|
-
let dateRange = dateRanges[dateEnum];
|
|
2302
|
-
if (dateEnum === DateEnumType.last_quarter ||
|
|
2303
|
-
dateEnum === DateEnumType.this_quarter ||
|
|
2304
|
-
dateEnum === DateEnumType.next_quarter) {
|
|
2305
|
-
const latestQuarterRange = computeQuarterDateRange$1(dateEnum);
|
|
2306
|
-
if (latestQuarterRange.lowerBound !== dateRange.lowerBound) {
|
|
2307
|
-
dateRanges[dateEnum] = latestQuarterRange;
|
|
2308
|
-
dateRange = latestQuarterRange;
|
|
2309
|
-
}
|
|
2310
|
-
}
|
|
2311
|
-
const isDiscreteDate = dateRange.lowerBound === dateRange.upperBound;
|
|
2312
|
-
switch (operator) {
|
|
2313
|
-
case ComparisonOperator.eq:
|
|
2314
|
-
if (isDiscreteDate) {
|
|
2315
|
-
return `= ` + dateRange.lowerBound;
|
|
2316
|
-
}
|
|
2317
|
-
else {
|
|
2318
|
-
return `between ` + dateRange.lowerBound + ` and ` + dateRange.upperBound;
|
|
2319
|
-
}
|
|
2320
|
-
case ComparisonOperator.ne:
|
|
2321
|
-
if (isDiscreteDate) {
|
|
2322
|
-
return `!= ` + dateRange.lowerBound;
|
|
2323
|
-
}
|
|
2324
|
-
else {
|
|
2325
|
-
return `not between ` + dateRange.lowerBound + ` and ` + dateRange.upperBound;
|
|
2326
|
-
}
|
|
2327
|
-
case ComparisonOperator.gt:
|
|
2328
|
-
return `> ` + dateRange.upperBound;
|
|
2329
|
-
case ComparisonOperator.gte:
|
|
2330
|
-
return `>= ` + dateRange.lowerBound;
|
|
2331
|
-
case ComparisonOperator.lt:
|
|
2332
|
-
return `< ` + dateRange.lowerBound;
|
|
2333
|
-
case ComparisonOperator.lte:
|
|
2334
|
-
return `<= ` + dateRange.upperBound;
|
|
2335
|
-
default:
|
|
2336
|
-
return ``;
|
|
2337
|
-
}
|
|
2338
|
-
}
|
|
2339
|
-
function useInOrNinOperator(dateInput) {
|
|
2340
|
-
let useInOrNin = true;
|
|
2341
|
-
dateInput.value.forEach((element) => {
|
|
2342
|
-
if (element.type === ValueType.DateEnum || element.type === ValueType.DateTimeEnum) {
|
|
2343
|
-
if (element.value !== DateEnumType.yesterday &&
|
|
2344
|
-
element.value !== DateEnumType.today &&
|
|
2345
|
-
element.value !== DateEnumType.tomorrow) {
|
|
2346
|
-
useInOrNin = false;
|
|
2347
|
-
}
|
|
2348
|
-
return;
|
|
2349
|
-
}
|
|
2350
|
-
if (element.type !== ValueType.DateValue &&
|
|
2351
|
-
element.type !== ValueType.DateTimeValue &&
|
|
2352
|
-
element.type !== ValueType.NullValue) {
|
|
2353
|
-
useInOrNin = false;
|
|
2354
|
-
}
|
|
2355
|
-
});
|
|
2356
|
-
return useInOrNin;
|
|
2357
|
-
}
|
|
2358
|
-
function comparisonOperatorToSql(operator) {
|
|
2359
|
-
switch (operator) {
|
|
2360
|
-
case ComparisonOperator.eq:
|
|
2361
|
-
return '=';
|
|
2362
|
-
case ComparisonOperator.ne:
|
|
2363
|
-
return 'IS NOT';
|
|
2364
|
-
case ComparisonOperator.gt:
|
|
2365
|
-
return '>';
|
|
2366
|
-
case ComparisonOperator.gte:
|
|
2367
|
-
return '>=';
|
|
2368
|
-
case ComparisonOperator.lt:
|
|
2369
|
-
return '<';
|
|
2370
|
-
case ComparisonOperator.lte:
|
|
2371
|
-
return '<=';
|
|
2372
|
-
case ComparisonOperator.like:
|
|
2373
|
-
return 'like';
|
|
2374
|
-
case ComparisonOperator.in:
|
|
2375
|
-
return 'IN';
|
|
2376
|
-
case ComparisonOperator.nin:
|
|
2377
|
-
return 'NOT IN';
|
|
2378
|
-
case ComparisonOperator.includes:
|
|
2379
|
-
return 'LIKE';
|
|
2380
|
-
case ComparisonOperator.excludes:
|
|
2381
|
-
return 'NOT LIKE';
|
|
2382
|
-
}
|
|
2383
|
-
}
|
|
2384
|
-
// Supports the date literal within the 'in' and 'nin' expressions. Literals is limited to 'TODAY', 'TOMORROW' and 'YESTERDAY'
|
|
2385
|
-
function discreteDateEnumToSql(dateEnum) {
|
|
2386
|
-
switch (dateEnum) {
|
|
2387
|
-
case DateEnumType.yesterday:
|
|
2388
|
-
return `date('now', '-1 day')`;
|
|
2389
|
-
case DateEnumType.today:
|
|
2390
|
-
return `date('now')`;
|
|
2391
|
-
case DateEnumType.tomorrow:
|
|
2392
|
-
return `date('now', '+1 day')`;
|
|
2393
|
-
default:
|
|
2394
|
-
return '';
|
|
2395
|
-
}
|
|
2396
|
-
}
|
|
2397
|
-
|
|
2398
|
-
const recordPrefix = '.data.uiapi.query';
|
|
2399
|
-
const recordSuffix = 'edges';
|
|
2400
|
-
const pathPrefix = '$';
|
|
2401
|
-
const recordsCTE = 'recordsCTE';
|
|
2402
|
-
const MultiPickListValueSeparator$1 = ';';
|
|
2403
|
-
const recordCTESQL = excludeStaleRecordsGate.isOpen({ fallback: false })
|
|
2404
|
-
? `WITH ${recordsCTE} AS NOT materialized ` +
|
|
2405
|
-
`(select data, metadata from lds_data where key like 'UiApi::RecordRepresentation:%')`
|
|
2406
|
-
: `WITH ${recordsCTE} AS NOT materialized ` +
|
|
2407
|
-
`(select data from lds_data where key like 'UiApi::RecordRepresentation:%')`;
|
|
2408
|
-
function cteSql() {
|
|
2409
|
-
return recordCTESQL;
|
|
2410
|
-
}
|
|
2411
|
-
function computeSql(rootQuery) {
|
|
2412
|
-
const fields = rootQuery.connections.map((connection) => {
|
|
2413
|
-
const { sql: recordQuerySql, bindings } = recordQueryToSql(connection);
|
|
2414
|
-
return {
|
|
2415
|
-
sql: `'${pathPrefix}${recordPrefix}.${connection.alias}.${recordSuffix}', (${recordQuerySql})`,
|
|
2416
|
-
bindings,
|
|
2417
|
-
};
|
|
2418
|
-
});
|
|
2419
|
-
const fieldSql = fields.map((v) => v.sql).join(`, `);
|
|
2420
|
-
const bindings = fields.map((v) => v.bindings).reduce(flatten$1);
|
|
2421
|
-
return { sql: `${cteSql()} SELECT json_set('{}', ${fieldSql} ) as json`, bindings };
|
|
2422
|
-
}
|
|
2423
|
-
function fieldToSql(field) {
|
|
2424
|
-
const { path } = field;
|
|
2425
|
-
if (field.type === FieldType.Child) {
|
|
2426
|
-
const { sql, bindings } = recordQueryToSql(field.connection);
|
|
2427
|
-
return { sql: `'${pathPrefix}.${path}', (${sql})`, bindings };
|
|
2428
|
-
}
|
|
2429
|
-
if (field.type === FieldType.Null) {
|
|
2430
|
-
return { sql: `'${pathPrefix}.${path}', null`, bindings: [] };
|
|
2431
|
-
}
|
|
2432
|
-
const { sql, bindings } = expressionToSql(field.extract, field.targetDataType);
|
|
2433
|
-
return { sql: `'${pathPrefix}.${path}', (${sql})`, bindings };
|
|
2434
|
-
}
|
|
2435
|
-
function recordQueryToSql(recordQuery) {
|
|
2436
|
-
const { predicate, first, orderBy, fields: recordFields, joins, alias: name } = recordQuery;
|
|
2437
|
-
const fields = recordFields.map((f) => fieldToSql(f));
|
|
2438
|
-
const fieldBindings = fields.map((v) => v.bindings).reduce(flatten$1);
|
|
2439
|
-
const { sql: select, bindings: selectBindings } = selectSql(predicate, name, first, orderBy, joins);
|
|
2440
|
-
// In W-12058909, we learned that 63 fields was the upper bound of the json_set approach
|
|
2441
|
-
// before we ran into sqlite errors. Backing off that threshold a bit in case there are
|
|
2442
|
-
// other unknown-unknowns. Lower thresholds will have the effect of more json_set nesting
|
|
2443
|
-
// which may be more computationally expensive for the DB to produce.
|
|
2444
|
-
const chunks = chunkArr(fields, 60);
|
|
2445
|
-
// Recursively builds a nested string, where each "chunk" of fields gets added
|
|
2446
|
-
// to a json_set, and embedded in the next json_set to work around a sqlite function arg
|
|
2447
|
-
// limit for GraphQL queries that compute to large numbers of json_sets
|
|
2448
|
-
const nestJsonSets = (sql) => {
|
|
2449
|
-
// take a batch of fields
|
|
2450
|
-
const fields = chunks.shift();
|
|
2451
|
-
// exit when there are no more batches of fields
|
|
2452
|
-
if (!fields)
|
|
2453
|
-
return sql;
|
|
2454
|
-
// Wrap the chunk of fields in a new json_set until exhaustion
|
|
2455
|
-
// eslint-disable-next-line no-param-reassign
|
|
2456
|
-
sql = `json_set(${sql || "'{}'"}, ${fields.map((field) => field.sql).join(`, `)} )`;
|
|
2457
|
-
return nestJsonSets(sql);
|
|
2458
|
-
};
|
|
2459
|
-
const jsonSets = nestJsonSets('');
|
|
2460
|
-
return {
|
|
2461
|
-
sql: `SELECT json_group_array(${jsonSets}) FROM ${select}`,
|
|
2462
|
-
bindings: fieldBindings.concat(selectBindings),
|
|
2463
|
-
};
|
|
2464
|
-
}
|
|
2465
|
-
function selectSql(predicate, name, first, orderBy, joins) {
|
|
2466
|
-
const { sql: joinString, bindings: joinBindings } = joinsToSql(joins);
|
|
2467
|
-
const columns = columnsSql(joins.map((join) => join.name).concat(name));
|
|
2468
|
-
let predicateString = '';
|
|
2469
|
-
let predicateBindings = [];
|
|
2470
|
-
if (predicate !== undefined) {
|
|
2471
|
-
const { sql: predicateSql, bindings } = predicateToSql(predicate);
|
|
2472
|
-
predicateBindings = bindings;
|
|
2473
|
-
predicateString = `WHERE ${predicateSql}`;
|
|
2474
|
-
}
|
|
2475
|
-
const limitString = first !== undefined ? `LIMIT ${first}` : '';
|
|
2476
|
-
const { sql: orderBySql, bindings: orderByBindings } = orderbyToSql(orderBy);
|
|
2477
|
-
const sql = `(SELECT ${columns} FROM ${recordsCTE} as '${name}' ` +
|
|
2478
|
-
`${joinString} ${predicateString} ${orderBySql}${limitString})`;
|
|
2479
|
-
const bindings = joinBindings.concat(predicateBindings).concat(orderByBindings);
|
|
2480
|
-
return { sql, bindings };
|
|
2481
|
-
}
|
|
2482
|
-
function orderbyToSql(orderBy = []) {
|
|
2483
|
-
if (orderBy.length === 0) {
|
|
2484
|
-
return { sql: '', bindings: [] };
|
|
2485
|
-
}
|
|
2486
|
-
const clauses = orderBy.map((clause) => {
|
|
2487
|
-
const { sql: extractSql, bindings } = expressionToSql(clause.extract);
|
|
2488
|
-
const order = clause.asc ? 'ASC' : 'DESC';
|
|
2489
|
-
const nullsOrder = clause.nullsFirst ? 'DESC' : 'ASC';
|
|
2490
|
-
//As of fall 2021 most devices don't have NULLS FIRST|LAST support which was added to sqlite in 2019,
|
|
2491
|
-
//so we use a CASE expression and sort by an "is null" column and then by the actual column order.
|
|
2492
|
-
return {
|
|
2493
|
-
sql: `CASE WHEN ${extractSql} IS NULL THEN 1 ELSE 0 END ${nullsOrder}, ${extractSql} ${order} `,
|
|
2494
|
-
bindings,
|
|
2495
|
-
};
|
|
2496
|
-
});
|
|
2497
|
-
const clausesSql = clauses.map((v) => v.sql).join(`, `);
|
|
2498
|
-
const bindings = clauses.map((v) => v.bindings).reduce(flatten$1);
|
|
2499
|
-
return { sql: `ORDER BY ${clausesSql}`, bindings };
|
|
2500
|
-
}
|
|
2501
|
-
function columnsSql(names) {
|
|
2502
|
-
return names.map((name) => `'${name}'.data as '${name}.JSON'`).join(', ');
|
|
2503
|
-
}
|
|
2504
|
-
function joinsToSql(joins) {
|
|
2505
|
-
const allBindings = [];
|
|
2506
|
-
const sql = joins.reduce((joinAccumulator, join) => {
|
|
2507
|
-
const conditionSQL = join.conditions.reduce((conditionAccumalator, condition) => {
|
|
2508
|
-
const { sql, bindings } = predicateToSql(condition);
|
|
2509
|
-
const conditionSQL = `${conditionAccumalator}${conditionAccumalator.length === 0 ? '' : ' AND '}${sql}`;
|
|
2510
|
-
allBindings.push(...bindings);
|
|
2511
|
-
return conditionSQL;
|
|
2512
|
-
}, '');
|
|
2513
|
-
const joinSQL = `${joinAccumulator}${joinAccumulator.length === 0 ? '' : ' '}${join.type} JOIN ${recordsCTE} as '${join.name}' ${conditionSQL.length === 0 ? '' : 'ON ' + conditionSQL}`;
|
|
2514
|
-
return joinSQL;
|
|
2515
|
-
}, '');
|
|
2516
|
-
return { sql, bindings: allBindings };
|
|
2517
|
-
//return joins.map((join) => `${join.type} join ${recordsCTE} as '${join.name}'`).join(' ');
|
|
2518
|
-
}
|
|
2519
|
-
function predicateToSql(predicate, isNotOperator = false) {
|
|
2520
|
-
if (isCompoundPredicate$1(predicate)) {
|
|
2521
|
-
return compoundPredicateToSql(predicate, isNotOperator);
|
|
2522
|
-
}
|
|
2523
|
-
if (isComparisonPredicate(predicate)) {
|
|
2524
|
-
return isNotOperator
|
|
2525
|
-
? comparisonIsAlsoNotNullToSql(predicate)
|
|
2526
|
-
: comparisonPredicateToSql(predicate);
|
|
2527
|
-
}
|
|
2528
|
-
if (isNullComparisonPredicate(predicate)) {
|
|
2529
|
-
return nullComparisonPredicateToSql(predicate);
|
|
2530
|
-
}
|
|
2531
|
-
if (isExistsPredicate(predicate)) {
|
|
2532
|
-
return existsPredicateToSql(predicate);
|
|
2533
|
-
}
|
|
2534
|
-
if (isDateFunctionPredicate(predicate)) {
|
|
2535
|
-
return dateFunctionPredicateToSql(predicate);
|
|
2536
|
-
}
|
|
2537
|
-
if (isBetweenPredicate(predicate)) {
|
|
2538
|
-
return betweenPredicateToSql(predicate);
|
|
2539
|
-
}
|
|
2540
|
-
return notPredicateToSql$1(predicate);
|
|
2541
|
-
}
|
|
2542
|
-
function compoundPredicateToSql(predicate, isNotOperator = false) {
|
|
2543
|
-
const operatorString = compoundOperatorToSql(predicate.operator);
|
|
2544
|
-
const results = predicate.children.map((child) => predicateToSql(child, isNotOperator));
|
|
2545
|
-
const statementSql = results.map((v) => v.sql).join(` ${operatorString} `);
|
|
2546
|
-
const bindings = results.map((v) => v.bindings).reduce(flatten$1);
|
|
2547
|
-
return { sql: `( ${statementSql} )`, bindings };
|
|
2548
|
-
}
|
|
2549
|
-
function dateFunctionPredicateToSql(predicate) {
|
|
2550
|
-
const operator = comparisonOperatorToSql(predicate.operator);
|
|
2551
|
-
const { sql: extract, bindings: extractBindings } = expressionToSql(predicate.extract);
|
|
2552
|
-
switch (predicate.function) {
|
|
2553
|
-
case DateFunction.dayOfMonth: {
|
|
2554
|
-
const day = String(predicate.value).padStart(2, '0');
|
|
2555
|
-
const bindings = extractBindings.concat(`'${day}'`);
|
|
2556
|
-
return { sql: `strftime('%d', ${extract}) ${operator} ?`, bindings };
|
|
2557
|
-
}
|
|
2558
|
-
}
|
|
2559
|
-
}
|
|
2560
|
-
// This is useful for the 'scope' filter
|
|
2561
|
-
function existsPredicateToSql(exists) {
|
|
2562
|
-
const { predicate, joinNames, alias } = exists;
|
|
2563
|
-
const joins = joinNames.map((joinName) => {
|
|
2564
|
-
return {
|
|
2565
|
-
name: joinName,
|
|
2566
|
-
type: 'INNER',
|
|
2567
|
-
conditions: [],
|
|
2568
|
-
};
|
|
2569
|
-
});
|
|
2570
|
-
const { sql: select, bindings } = selectSql(predicate, alias, undefined, undefined, joins);
|
|
2571
|
-
return { sql: `EXISTS ${select}`, bindings };
|
|
2572
|
-
}
|
|
2573
|
-
function comparisonPredicateToSql(predicate) {
|
|
2574
|
-
const operator = comparisonOperatorToSql(predicate.operator);
|
|
2575
|
-
let { sql: left, bindings: leftBindings } = expressionToSql(predicate.left, undefined, predicate.operator);
|
|
2576
|
-
if (predicate.right.type === ValueType.DateEnum ||
|
|
2577
|
-
predicate.right.type === ValueType.DateTimeEnum ||
|
|
2578
|
-
predicate.right.type === ValueType.DateArray ||
|
|
2579
|
-
predicate.right.type === ValueType.DateTimeArray ||
|
|
2580
|
-
predicate.right.type === ValueType.DateValue ||
|
|
2581
|
-
predicate.right.type === ValueType.DateTimeValue) {
|
|
2582
|
-
const dateFunction = predicate.right.type === ValueType.DateTimeEnum ||
|
|
2583
|
-
predicate.right.type === ValueType.DateTimeArray ||
|
|
2584
|
-
predicate.right.type === ValueType.DateTimeValue
|
|
2585
|
-
? 'datetime'
|
|
2586
|
-
: 'date';
|
|
2587
|
-
const fieldDateValue = `${dateFunction}(${left})`;
|
|
2588
|
-
return comparisonDateLiteralToSql(fieldDateValue, predicate.operator, predicate.right);
|
|
2589
|
-
}
|
|
2590
|
-
if (predicate.right.type === ValueType.RelativeDate) {
|
|
2591
|
-
const dateFunc = predicate.right.hasTime ? 'datetime' : 'date';
|
|
2592
|
-
left = `${dateFunc}(${left})`;
|
|
2593
|
-
}
|
|
2594
|
-
const { sql: right, bindings: rightBindings } = expressionToSql(predicate.right, undefined, predicate.operator);
|
|
2595
|
-
let bindings = leftBindings.concat(rightBindings);
|
|
2596
|
-
if (predicate.operator === ComparisonOperator.eq &&
|
|
2597
|
-
predicate.right.type === ValueType.StringLiteral &&
|
|
2598
|
-
predicate.right.isCaseSensitive === false) {
|
|
2599
|
-
return { sql: `${left} ${operator} ${right} COLLATE NOCASE`, bindings };
|
|
2600
|
-
}
|
|
2601
|
-
if (predicate.operator === ComparisonOperator.in &&
|
|
2602
|
-
predicate.right.type === ValueType.StringArray &&
|
|
2603
|
-
predicate.right.isCaseSensitive === false) {
|
|
2604
|
-
// If an explicit collating sequence is required on an IN operator it should be applied to the left operand,
|
|
2605
|
-
// like this: "x COLLATE nocase IN (y,z, ...)".
|
|
2606
|
-
return { sql: `${left} COLLATE NOCASE ${operator} ${right}`, bindings };
|
|
2607
|
-
}
|
|
2608
|
-
return { sql: `${left} ${operator} ${right}`, bindings };
|
|
2609
|
-
}
|
|
2610
|
-
function betweenPredicateToSql(predicate) {
|
|
2611
|
-
const { sql: compareDateSql, bindings: compareBindings } = expressionToSql(predicate.compareDate);
|
|
2612
|
-
const { sql: startSql, bindings: startBindings } = expressionToSql(predicate.start);
|
|
2613
|
-
const { sql: endSql, bindings: endBindings } = expressionToSql(predicate.end);
|
|
2614
|
-
const bindings = compareBindings.concat(startBindings).concat(endBindings);
|
|
2615
|
-
return { sql: `${compareDateSql} BETWEEN ${startSql} AND ${endSql}`, bindings };
|
|
2616
|
-
}
|
|
2617
|
-
/**
|
|
2618
|
-
* used to make a sql statement that also checks if that field is not null
|
|
2619
|
-
* @param predicate
|
|
2620
|
-
* @returns
|
|
2621
|
-
*/
|
|
2622
|
-
function comparisonIsAlsoNotNullToSql(predicate) {
|
|
2623
|
-
const { sql, bindings } = predicateToSql(predicate);
|
|
2624
|
-
const isNotNullPredicate = {
|
|
2625
|
-
left: predicate.left,
|
|
2626
|
-
operator: NullComparisonOperator.isNot,
|
|
2627
|
-
type: PredicateType$1.nullComparison,
|
|
2628
|
-
};
|
|
2629
|
-
const { sql: notNullSql, bindings: notNullBindings } = predicateToSql(isNotNullPredicate);
|
|
2630
|
-
return {
|
|
2631
|
-
sql: `${sql} AND ${notNullSql}`,
|
|
2632
|
-
bindings: bindings.concat(notNullBindings),
|
|
2633
|
-
};
|
|
2634
|
-
}
|
|
2635
|
-
function notPredicateToSql$1(predicate) {
|
|
2636
|
-
if (predicate.child === undefined) {
|
|
2637
|
-
return { sql: '', bindings: [] };
|
|
2638
|
-
}
|
|
2639
|
-
if (isComparisonPredicate(predicate.child)) {
|
|
2640
|
-
const { sql, bindings } = comparisonIsAlsoNotNullToSql(predicate.child);
|
|
2641
|
-
return {
|
|
2642
|
-
sql: `NOT (${sql})`,
|
|
2643
|
-
bindings,
|
|
2644
|
-
};
|
|
2645
|
-
}
|
|
2646
|
-
else if (isCompoundPredicate$1(predicate.child)) {
|
|
2647
|
-
const createCompoundNots = (p) => {
|
|
2648
|
-
if (isComparisonPredicate(p)) {
|
|
2649
|
-
return comparisonIsAlsoNotNullToSql(p);
|
|
2650
|
-
}
|
|
2651
|
-
else {
|
|
2652
|
-
return predicateToSql(p, true);
|
|
2653
|
-
}
|
|
2654
|
-
};
|
|
2655
|
-
const compoundNot = predicate.child.children.map(createCompoundNots);
|
|
2656
|
-
const operatorString = compoundOperatorToSql(predicate.child.operator);
|
|
2657
|
-
const statementSql = compoundNot.map((v) => `(${v.sql})`).join(` ${operatorString} `);
|
|
2658
|
-
const bindings = compoundNot.map((v) => v.bindings).reduce(flatten$1);
|
|
2659
|
-
return { sql: `NOT ( ${statementSql} )`, bindings };
|
|
2660
|
-
}
|
|
2661
|
-
else {
|
|
2662
|
-
const { sql, bindings } = predicateToSql(predicate.child);
|
|
2663
|
-
return { sql: `NOT (${sql})`, bindings };
|
|
2664
|
-
}
|
|
2665
|
-
}
|
|
2666
|
-
function nullComparisonPredicateToSql(predicate) {
|
|
2667
|
-
const operator = predicate.operator === NullComparisonOperator.is ? 'IS' : 'IS NOT';
|
|
2668
|
-
const { sql: leftSql, bindings } = expressionToSql(predicate.left);
|
|
2669
|
-
return { sql: `${leftSql} ${operator} NULL`, bindings };
|
|
2670
|
-
}
|
|
2671
|
-
function coerceToTargetDataType(initialSql, targetDataType) {
|
|
2672
|
-
if (targetDataType === 'Boolean') {
|
|
2673
|
-
return `case when ${initialSql} = 1 then json('true') else json('false') end`;
|
|
2674
|
-
}
|
|
2675
|
-
else {
|
|
2676
|
-
return initialSql;
|
|
2677
|
-
}
|
|
2678
|
-
}
|
|
2679
|
-
function expressionToSql(expression, targetDataType, operator) {
|
|
2680
|
-
switch (expression.type) {
|
|
2681
|
-
case ValueType.Extract: {
|
|
2682
|
-
// displayValue's for Booleans are special, they return null
|
|
2683
|
-
if (expression.subfield === 'displayValue' && targetDataType === 'Boolean') {
|
|
2684
|
-
return { sql: 'null', bindings: [] };
|
|
2685
|
-
}
|
|
2686
|
-
// metadata extract is somewhat different than a data extract
|
|
2687
|
-
if (expression.metadata === true) {
|
|
2688
|
-
let sql = `json_extract("${expression.jsonAlias}".metadata, '${pathPrefix}.${expression.field}')`;
|
|
2689
|
-
if (targetDataType !== undefined) {
|
|
2690
|
-
sql = coerceToTargetDataType(sql, targetDataType);
|
|
2691
|
-
}
|
|
2692
|
-
return { sql, bindings: [] };
|
|
2693
|
-
}
|
|
2694
|
-
let path = extractPath(expression.field, expression.subfield);
|
|
2695
|
-
// For multiple picklist includes/excluding filtering, we need to prefix and suffix the field value with ';'
|
|
2696
|
-
// to make the match safe.
|
|
2697
|
-
// sample: field value: 'item12;item123', input value is 'item1'; they need to be converted to
|
|
2698
|
-
// ';item12;item123;' and '%;item1;%' first, then do sqlite like operation.
|
|
2699
|
-
let sql = operator === ComparisonOperator.includes || operator === ComparisonOperator.excludes
|
|
2700
|
-
? `'${MultiPickListValueSeparator$1}' || json_extract("${expression.jsonAlias}.JSON", '${pathPrefix}.${path}') || '${MultiPickListValueSeparator$1}'`
|
|
2701
|
-
: `json_extract("${expression.jsonAlias}.JSON", '${pathPrefix}.${path}')`;
|
|
2702
|
-
if (targetDataType !== undefined) {
|
|
2703
|
-
sql = coerceToTargetDataType(sql, targetDataType);
|
|
2704
|
-
}
|
|
2705
|
-
return {
|
|
2706
|
-
sql,
|
|
2707
|
-
bindings: [],
|
|
2708
|
-
};
|
|
2709
|
-
}
|
|
2710
|
-
case ValueType.BooleanLiteral:
|
|
2711
|
-
// SQLite does not have a boolean type, instead use number 0 or 1
|
|
2712
|
-
return { sql: '?', bindings: [expression.value.valueOf() ? 1 : 0] };
|
|
2713
|
-
case ValueType.DoubleLiteral:
|
|
2714
|
-
case ValueType.IntLiteral:
|
|
2715
|
-
return { sql: '?', bindings: [expression.value] };
|
|
2716
|
-
case ValueType.StringArray:
|
|
2717
|
-
return expressionArrayToSql(expression.value, (e) => ({
|
|
2718
|
-
sql: '?',
|
|
2719
|
-
bindings: [e],
|
|
2720
|
-
}));
|
|
2721
|
-
case ValueType.NumberArray:
|
|
2722
|
-
return expressionArrayToSql(expression.value, (e) => ({
|
|
2723
|
-
sql: '?',
|
|
2724
|
-
bindings: [e],
|
|
2725
|
-
}));
|
|
2726
|
-
case ValueType.NullValue:
|
|
2727
|
-
return { sql: 'null', bindings: [] };
|
|
2728
|
-
case ValueType.DateEnum:
|
|
2729
|
-
return { sql: dateEnumToSql(expression.value), bindings: [] };
|
|
2730
|
-
case ValueType.DateTimeEnum:
|
|
2731
|
-
return { sql: dateTimeEnumToSql(expression.value), bindings: [] };
|
|
2732
|
-
case ValueType.DateTimeArray:
|
|
2733
|
-
return expressionArrayToSql(expression.value, expressionToSql);
|
|
2734
|
-
case ValueType.DateArray:
|
|
2735
|
-
return expressionArrayToSql(expression.value, expressionToSql);
|
|
2736
|
-
case ValueType.DateRange:
|
|
2737
|
-
case ValueType.DateTimeRange:
|
|
2738
|
-
//not used
|
|
2739
|
-
return { sql: '', bindings: [] };
|
|
2740
|
-
case ValueType.RelativeDate:
|
|
2741
|
-
return relativeDateToSql(expression);
|
|
2742
|
-
case ValueType.DateValue:
|
|
2743
|
-
case ValueType.DateTimeValue:
|
|
2744
|
-
return { sql: '?', bindings: [`'${expression.value}'`] };
|
|
2745
|
-
case ValueType.StringLiteral:
|
|
2746
|
-
return stringLiteralToSql(expression);
|
|
2747
|
-
case ValueType.MultiPicklistSet:
|
|
2748
|
-
return multiPicklistToSql$1(expression, operator);
|
|
2749
|
-
}
|
|
2750
|
-
}
|
|
2751
|
-
function stringLiteralToSql(string) {
|
|
2752
|
-
const { safe, value } = string;
|
|
2753
|
-
if (safe === true) {
|
|
2754
|
-
return { sql: `'${value}'`, bindings: [] };
|
|
2755
|
-
}
|
|
2756
|
-
return { sql: '?', bindings: [value] };
|
|
2757
|
-
}
|
|
2758
|
-
function expressionArrayToSql(expressions, toSql) {
|
|
2759
|
-
const results = expressions.map(toSql);
|
|
2760
|
-
const sql = `(${results.map((v) => v.sql).join(', ')})`;
|
|
2761
|
-
const bindings = results.length > 0 ? results.map((v) => v.bindings).reduce(flatten$1) : [];
|
|
2762
|
-
return { sql, bindings };
|
|
2763
|
-
}
|
|
2764
|
-
function multiPicklistToSql$1({ value }, operator) {
|
|
2765
|
-
// Individual multipicklist terms that delimited by semicolon are stored server-side
|
|
2766
|
-
// as lexically sorted strings and treated like logical ANDs. We can approximate this
|
|
2767
|
-
// behavior in SQL with wildcarded `LIKE` SQL operators. Terms with no delimiter can
|
|
2768
|
-
// be treated as string literals. Multiple terms are logically OR'd together to
|
|
2769
|
-
// match the behavior described in SOQL documentation (https://sfdc.co/c9j0r)
|
|
2770
|
-
// To make sure the match is safe for includes/excludes. the value is prefix and
|
|
2771
|
-
// suffix with ';', like 'abc' to '%;abc;%'. raw value for eq and ne.
|
|
2772
|
-
const sql = '?';
|
|
2773
|
-
const binding = operator === ComparisonOperator.includes || operator === ComparisonOperator.excludes
|
|
2774
|
-
? `%${MultiPickListValueSeparator$1}${value}${MultiPickListValueSeparator$1}%`
|
|
2775
|
-
: value;
|
|
2776
|
-
return { sql, bindings: [binding] };
|
|
2777
|
-
}
|
|
2778
|
-
function relativeDateToSql(expression) {
|
|
2779
|
-
const funcName = expression.hasTime ? 'datetime' : 'date';
|
|
2780
|
-
switch (expression.unit) {
|
|
2781
|
-
case 'month':
|
|
2782
|
-
if (expression.offset === 'end') {
|
|
2783
|
-
return {
|
|
2784
|
-
sql: `${funcName}('now', 'start of month', ?, '-1 day')`,
|
|
2785
|
-
bindings: [`${expression.amount + 1} months`],
|
|
2786
|
-
};
|
|
2787
|
-
}
|
|
2788
|
-
return {
|
|
2789
|
-
sql: `${funcName}('now', 'start of month', ?)`,
|
|
2790
|
-
bindings: [`${expression.amount} months`],
|
|
2791
|
-
};
|
|
2792
|
-
case 'day':
|
|
2793
|
-
return { sql: `${funcName}('now', ?)`, bindings: [`${expression.amount} days`] };
|
|
2794
|
-
}
|
|
2795
|
-
}
|
|
2796
|
-
function comparisonDateLiteralToSql(leftOperand, operator, dateInput) {
|
|
2797
|
-
if (dateInput.type === ValueType.DateEnum || dateInput.type === ValueType.DateTimeEnum) {
|
|
2798
|
-
return {
|
|
2799
|
-
sql: `${leftOperand} ${comparisonDateRightExpressionToSql(operator, dateInput.value)}`,
|
|
2800
|
-
bindings: [],
|
|
2801
|
-
};
|
|
2802
|
-
}
|
|
2803
|
-
if (dateInput.type === ValueType.DateTimeArray || dateInput.type === ValueType.DateArray) {
|
|
2804
|
-
// Only discrete date could use 'in' keyword in SQL
|
|
2805
|
-
if (useInOrNinOperator(dateInput)) {
|
|
2806
|
-
const sectionResults = dateInput.value.map((element) => {
|
|
2807
|
-
if (element.type === ValueType.DateEnum ||
|
|
2808
|
-
element.type === ValueType.DateTimeEnum) {
|
|
2809
|
-
return { sql: discreteDateEnumToSql(element.value), bindings: [] };
|
|
2810
|
-
}
|
|
2811
|
-
else if (element.type === ValueType.DateValue ||
|
|
2812
|
-
element.type === ValueType.DateTimeValue) {
|
|
2813
|
-
return { sql: '?', bindings: [`'${element.value}'`] };
|
|
2814
|
-
}
|
|
2815
|
-
return { sql: 'null', bindings: [] };
|
|
2816
|
-
});
|
|
2817
|
-
if (operator === 'in' || operator === 'nin') {
|
|
2818
|
-
const sectionSQL = sectionResults.map((v) => v.sql).join(', ');
|
|
2819
|
-
const sectionBinding = sectionResults
|
|
2820
|
-
.map((v) => v.bindings)
|
|
2821
|
-
.reduce(flatten$1);
|
|
2822
|
-
const transformedOperator = operator === 'in' ? 'IN' : 'NOT IN';
|
|
2823
|
-
return {
|
|
2824
|
-
sql: `${leftOperand} ${transformedOperator} (${sectionSQL})`,
|
|
2825
|
-
bindings: sectionBinding,
|
|
2826
|
-
};
|
|
2827
|
-
}
|
|
2828
|
-
return { sql: '', bindings: [] };
|
|
2829
|
-
}
|
|
2830
|
-
const sectionResults = dateInput.value.map((scalerDateInput) => comparisonDateLiteralToSql(leftOperand, ComparisonOperator.eq, scalerDateInput));
|
|
2831
|
-
const sectionBinding = sectionResults.map((v) => v.bindings).reduce(flatten$1);
|
|
2832
|
-
if (operator === 'in') {
|
|
2833
|
-
return {
|
|
2834
|
-
sql: `(` + sectionResults.map((v) => v.sql).join(' or ') + `)`,
|
|
2835
|
-
bindings: sectionBinding,
|
|
2836
|
-
};
|
|
2837
|
-
}
|
|
2838
|
-
else if (operator === 'nin') {
|
|
2839
|
-
return {
|
|
2840
|
-
sql: `not (` + sectionResults.map((v) => v.sql).join(' or ') + `)`,
|
|
2841
|
-
bindings: sectionBinding,
|
|
2842
|
-
};
|
|
2843
|
-
}
|
|
2844
|
-
}
|
|
2845
|
-
if (dateInput.type === ValueType.DateValue || dateInput.type === ValueType.DateTimeValue) {
|
|
2846
|
-
const dateFunction = dateInput.type === ValueType.DateTimeValue ? 'datetime' : 'date';
|
|
2847
|
-
const compOperator = comparisonOperatorToSql(operator);
|
|
2848
|
-
return {
|
|
2849
|
-
sql: leftOperand + ` ${compOperator} ` + `${dateFunction}(?)`,
|
|
2850
|
-
bindings: [`${dateInput.value}`],
|
|
2851
|
-
};
|
|
2852
|
-
}
|
|
2853
|
-
return { sql: '', bindings: [] };
|
|
2854
|
-
}
|
|
2855
|
-
function dateTimeEnumToSql(dateEnum) {
|
|
2856
|
-
switch (dateEnum) {
|
|
2857
|
-
case DateEnumType.today:
|
|
2858
|
-
return `datetime('now')`;
|
|
2859
|
-
case DateEnumType.tomorrow:
|
|
2860
|
-
return `datetime('now', '+1 day')`;
|
|
2861
|
-
// TODO [W-1109312]: apply the date filter [W-1109312]
|
|
2862
|
-
default:
|
|
2863
|
-
return ``;
|
|
2864
|
-
}
|
|
2865
|
-
}
|
|
2866
|
-
function dateEnumToSql(dateEnum) {
|
|
2867
|
-
switch (dateEnum) {
|
|
2868
|
-
case DateEnumType.today:
|
|
2869
|
-
return `date('now')`;
|
|
2870
|
-
case DateEnumType.tomorrow:
|
|
2871
|
-
return `date('now', '+1 day')`;
|
|
2872
|
-
// TODO [W-1109312]: apply the date filter [W-1109312]
|
|
2873
|
-
default:
|
|
2874
|
-
return ``;
|
|
2875
|
-
}
|
|
2876
|
-
}
|
|
2877
|
-
function compoundOperatorToSql(operator) {
|
|
2878
|
-
switch (operator) {
|
|
2879
|
-
case CompoundOperator.and:
|
|
2880
|
-
return 'AND';
|
|
2881
|
-
case CompoundOperator.or:
|
|
2882
|
-
return 'OR';
|
|
2883
|
-
}
|
|
2884
|
-
}
|
|
2885
|
-
|
|
2886
|
-
const { isArray: isArray$4 } = Array;
|
|
2887
|
-
const { keys: keys$7 } = Object;
|
|
2888
|
-
|
|
2889
|
-
function isListValueNode(node) {
|
|
2890
|
-
return node.kind === 'ListValue';
|
|
2891
|
-
}
|
|
2892
|
-
function isObjectFieldSelection(node) {
|
|
2893
|
-
return node !== undefined && node.kind === 'ObjectFieldSelection';
|
|
2894
|
-
}
|
|
2895
|
-
function isCustomFieldNode(node) {
|
|
2896
|
-
return node !== undefined && node.kind === 'CustomFieldSelection';
|
|
2897
|
-
}
|
|
2898
|
-
function isScalarFieldNode(node) {
|
|
2899
|
-
return node !== undefined && node.kind === 'ScalarFieldSelection';
|
|
2900
|
-
}
|
|
2901
|
-
function isOperationDefinition(node) {
|
|
2902
|
-
return node.kind === 'OperationDefinition';
|
|
2903
|
-
}
|
|
2904
|
-
function isObjectValueNode$1(node) {
|
|
2905
|
-
return node.kind === 'ObjectValue';
|
|
2906
|
-
}
|
|
2907
|
-
function is(node, kind) {
|
|
2908
|
-
return node.kind === kind;
|
|
2909
|
-
}
|
|
2910
|
-
function isDefined(item) {
|
|
2911
|
-
return item !== undefined;
|
|
2912
|
-
}
|
|
2913
|
-
function isCompoundOperator(value) {
|
|
2914
|
-
let values = [CompoundOperator.and, CompoundOperator.or];
|
|
2915
|
-
return values.includes(value);
|
|
2916
|
-
}
|
|
2917
|
-
function isScalarDataType(type) {
|
|
2918
|
-
return [
|
|
2919
|
-
'Boolean',
|
|
2920
|
-
'String',
|
|
2921
|
-
'Double',
|
|
2922
|
-
'Date',
|
|
2923
|
-
'DateTime',
|
|
2924
|
-
'Int',
|
|
2925
|
-
'WeakEtag',
|
|
2926
|
-
'Picklist',
|
|
2927
|
-
'Currency',
|
|
2928
|
-
'MultiPicklist',
|
|
2929
|
-
'Time',
|
|
2930
|
-
'Phone',
|
|
2931
|
-
'Url',
|
|
2932
|
-
'Email',
|
|
2933
|
-
'TextArea',
|
|
2934
|
-
'Percent',
|
|
2935
|
-
'EncryptedString',
|
|
2936
|
-
].includes(type);
|
|
2937
|
-
}
|
|
2938
|
-
|
|
2939
|
-
const NotOperator = 'not';
|
|
2940
|
-
const { eq, ne, gt, gte, lt, lte, nin, like, includes, excludes } = ComparisonOperator;
|
|
2941
|
-
const inOp = ComparisonOperator.in;
|
|
2942
|
-
function fieldsToFilters(fieldValues, joinAlias, apiName, input, compoundOperator = CompoundOperator.and, joins, draftFunctions) {
|
|
2943
|
-
const results = fieldValues
|
|
2944
|
-
.map((value) => {
|
|
2945
|
-
if (!isObjectValueNode$1(value)) {
|
|
2946
|
-
return [failure([message('Parent filter node should be an object.')])];
|
|
2947
|
-
}
|
|
2948
|
-
return Object.entries(value.fields).map(([key, value]) => filter(key, value, joinAlias, apiName, input, joins, draftFunctions));
|
|
2949
|
-
})
|
|
2950
|
-
.reduce(flatten$1, []);
|
|
2951
|
-
const failures = results.filter(isFailure).reduce(flatMap(errors), []);
|
|
2952
|
-
if (failures.length > 0) {
|
|
2953
|
-
return failure(failures);
|
|
2954
|
-
}
|
|
2955
|
-
const containers = results.filter(isSuccess).map(values$5);
|
|
2956
|
-
const predicates = [];
|
|
2957
|
-
containers.forEach((c) => {
|
|
2958
|
-
if (c.predicate !== undefined) {
|
|
2959
|
-
predicates.push(c.predicate);
|
|
2960
|
-
}
|
|
2961
|
-
});
|
|
2962
|
-
// it is possible its subfields return nothing; for example the first 'CreateBy' within 'where: { CreatedBy: { CreatedBy: { Email: { eq: "xyz" } } } '
|
|
2963
|
-
if (predicates.length === 0) {
|
|
2964
|
-
return success({ predicate: undefined });
|
|
2965
|
-
}
|
|
2966
|
-
const resolvedPredicate = combinePredicates(predicates, compoundOperator);
|
|
2967
|
-
return success({ predicate: resolvedPredicate });
|
|
2968
|
-
}
|
|
2969
|
-
//{where: {Field: ... | and: ... | or: ... | not: ...}}
|
|
2970
|
-
function recordFilter(where, joinAlias, apiName, input, joins, draftFunctions) {
|
|
2971
|
-
if (where === undefined) {
|
|
2972
|
-
return success(undefined);
|
|
2973
|
-
}
|
|
2974
|
-
// when 'recordFilter' starts, there is no 'NotPredicated'
|
|
2975
|
-
return fieldsToFilters([where.value], joinAlias, apiName, input, CompoundOperator.and, joins, draftFunctions).map((result) => result.predicate === undefined || isEmptyPredicate(result.predicate) ? undefined : result);
|
|
2976
|
-
}
|
|
2977
|
-
function filter(name, value, tableAlias, apiName, input, joins, draftFunctions) {
|
|
2978
|
-
if (isCompoundOperator(name)) {
|
|
2979
|
-
if (!isListValueNode(value)) {
|
|
2980
|
-
return failure([message(`Value for ${name} node must be a list.`)]);
|
|
2981
|
-
}
|
|
2982
|
-
return compoundPredicate(name, value, tableAlias, apiName, input, joins, draftFunctions);
|
|
2983
|
-
}
|
|
2984
|
-
if (name === NotOperator) {
|
|
2985
|
-
const children = fieldsToFilters([value], tableAlias, apiName, input, CompoundOperator.and, joins, draftFunctions);
|
|
2986
|
-
// take the children of a not predicate
|
|
2987
|
-
// and wrap them all inside it
|
|
2988
|
-
return children.flatMap((container) => {
|
|
2989
|
-
if (container.predicate !== undefined) {
|
|
2990
|
-
return success({
|
|
2991
|
-
predicate: {
|
|
2992
|
-
type: PredicateType$1.not,
|
|
2993
|
-
child: container.predicate,
|
|
2994
|
-
},
|
|
2995
|
-
});
|
|
2996
|
-
}
|
|
2997
|
-
return failure([message('Unable to create not predicate.')]);
|
|
2998
|
-
});
|
|
2999
|
-
}
|
|
3000
|
-
if (!isObjectValueNode$1(value)) {
|
|
3001
|
-
return failure([message('Filter node must be an object or list.')]);
|
|
3002
|
-
}
|
|
3003
|
-
return fieldFilter(name, value, tableAlias, apiName, input, joins, draftFunctions);
|
|
3004
|
-
}
|
|
3005
|
-
function compoundPredicate(operator, list, joinAlias, apiName, input, joins, draftFunctions) {
|
|
3006
|
-
return fieldsToFilters(list.values, joinAlias, apiName, input, operator, joins, draftFunctions);
|
|
3007
|
-
}
|
|
3008
|
-
/**
|
|
3009
|
-
* spans a FieldNode with its ObjectValueNode is passed. All the predicates is added into the 'join' array.
|
|
3010
|
-
* @param fieldInfo
|
|
3011
|
-
* @param fieldNode
|
|
3012
|
-
* @param alias
|
|
3013
|
-
* @param input
|
|
3014
|
-
* @param joins
|
|
3015
|
-
* @returns undefined predicate
|
|
3016
|
-
*/
|
|
3017
|
-
function spanningFilter(fieldInfo, fieldNode, alias, input, joins, draftFunctions) {
|
|
3018
|
-
const { apiName: fieldName, referenceToInfos, relationshipName } = fieldInfo;
|
|
3019
|
-
const referenceInfo = referenceToInfos[0];
|
|
3020
|
-
const jsonAlias = `${alias}.${relationshipName}`;
|
|
3021
|
-
const joinPredicate = referencePredicate(alias, jsonAlias, fieldName);
|
|
3022
|
-
if (referenceInfo === undefined) {
|
|
3023
|
-
return failure([message(`No reference info found for ${fieldName}`)]);
|
|
3024
|
-
}
|
|
3025
|
-
const { apiName } = referenceInfo;
|
|
3026
|
-
const conditions = [joinPredicate];
|
|
3027
|
-
const join = {
|
|
3028
|
-
name: jsonAlias,
|
|
3029
|
-
type: 'LEFT',
|
|
3030
|
-
to: alias,
|
|
3031
|
-
conditions,
|
|
3032
|
-
};
|
|
3033
|
-
joins.push(join);
|
|
3034
|
-
// moves constraint predicate to where
|
|
3035
|
-
const constraintPredicates = [];
|
|
3036
|
-
const filterResult = fieldsToFilters([fieldNode], jsonAlias, apiName, input, CompoundOperator.and, joins, draftFunctions);
|
|
3037
|
-
if (filterResult.isSuccess === false) {
|
|
3038
|
-
return filterResult;
|
|
3039
|
-
}
|
|
3040
|
-
filterResult.map((container) => {
|
|
3041
|
-
const { predicate } = container;
|
|
3042
|
-
if (predicate !== undefined) {
|
|
3043
|
-
constraintPredicates.push(predicate);
|
|
3044
|
-
}
|
|
3045
|
-
});
|
|
3046
|
-
return success({ predicate: combinePredicates(constraintPredicates, CompoundOperator.and) });
|
|
3047
|
-
}
|
|
3048
|
-
function isIDValueField$1(fieldInfo) {
|
|
3049
|
-
return (fieldInfo.apiName === 'Id' ||
|
|
3050
|
-
(fieldInfo.referenceToInfos !== undefined && fieldInfo.referenceToInfos.length > 0));
|
|
3051
|
-
}
|
|
3052
|
-
function fieldFilter(fieldName, fieldNode, alias, apiName, input, joins, draftFunctions) {
|
|
3053
|
-
const fieldInfoResult = getFieldInfo(apiName, fieldName, input);
|
|
3054
|
-
if (fieldInfoResult.isSuccess === false) {
|
|
3055
|
-
return failure([fieldInfoResult.error]);
|
|
3056
|
-
}
|
|
3057
|
-
const fieldInfo = fieldInfoResult.value;
|
|
3058
|
-
if (fieldInfo === undefined) {
|
|
3059
|
-
return failure([message(`Field ${fieldName} for type ${apiName} not found.`)]);
|
|
3060
|
-
}
|
|
3061
|
-
if (fieldInfo.dataType === 'Reference' && fieldInfo.relationshipName === fieldName) {
|
|
3062
|
-
return spanningFilter(fieldInfo, fieldNode, alias, input, joins, draftFunctions);
|
|
3063
|
-
}
|
|
3064
|
-
const idProcessingNeeded = isIDValueField$1(fieldInfo);
|
|
3065
|
-
const extract = {
|
|
3066
|
-
type: ValueType.Extract,
|
|
3067
|
-
jsonAlias: alias,
|
|
3068
|
-
field: fieldName,
|
|
3069
|
-
};
|
|
3070
|
-
const dateFunction = dateFunctions(fieldNode, extract, fieldInfo.dataType);
|
|
3071
|
-
//It's possible for a field to have more than one comparison operator which
|
|
3072
|
-
//should combine into compound predicate with 'and'
|
|
3073
|
-
const operators = fieldOperators(fieldNode, fieldInfo.dataType);
|
|
3074
|
-
if (dateFunction.isSuccess === false) {
|
|
3075
|
-
return failure(dateFunction.error);
|
|
3076
|
-
}
|
|
3077
|
-
if (operators.isSuccess === false) {
|
|
3078
|
-
return failure(operators.error);
|
|
3079
|
-
}
|
|
3080
|
-
let comparisons = operators.value.map((op) => {
|
|
3081
|
-
if (op.type === 'NullOperator') {
|
|
3082
|
-
return { type: PredicateType$1.nullComparison, left: extract, operator: op.operator };
|
|
3083
|
-
}
|
|
3084
|
-
if (op.type === 'DateOperator' && op.value.type === ValueType.DateRange) {
|
|
3085
|
-
return dateRangeComparison(op.value, op.operator, extract);
|
|
3086
|
-
}
|
|
3087
|
-
if (op.type === 'DateTimeOperator' && op.value.type === ValueType.DateTimeRange) {
|
|
3088
|
-
return dateRangeComparison(op.value, op.operator, extract);
|
|
3089
|
-
}
|
|
3090
|
-
if (op.type === 'MultiPicklistSetOperator') {
|
|
3091
|
-
const operator = op.operator === ComparisonOperator.includes
|
|
3092
|
-
? CompoundOperator.or
|
|
3093
|
-
: CompoundOperator.and;
|
|
3094
|
-
const children = [];
|
|
3095
|
-
const length = op.value.value.length;
|
|
3096
|
-
for (let i = 0; i < length; i++) {
|
|
3097
|
-
const term = op.value.value[i];
|
|
3098
|
-
if (term !== null) {
|
|
3099
|
-
const splittedValue = term
|
|
3100
|
-
.split(MultiPickListValueSeparator$1)
|
|
3101
|
-
.map((v) => v.trim())
|
|
3102
|
-
.filter((v) => v.length > 0);
|
|
3103
|
-
if (splittedValue.length === 1) {
|
|
3104
|
-
children.push(comparison(extract, op.operator, {
|
|
3105
|
-
type: ValueType.MultiPicklistSet,
|
|
3106
|
-
value: term,
|
|
3107
|
-
}));
|
|
3108
|
-
}
|
|
3109
|
-
else {
|
|
3110
|
-
children.push({
|
|
3111
|
-
type: PredicateType$1.compound,
|
|
3112
|
-
operator: op.operator === ComparisonOperator.includes
|
|
3113
|
-
? CompoundOperator.and
|
|
3114
|
-
: CompoundOperator.or,
|
|
3115
|
-
children: splittedValue.map((singleValue) => {
|
|
3116
|
-
return comparison(extract, op.operator, {
|
|
3117
|
-
type: ValueType.MultiPicklistSet,
|
|
3118
|
-
value: singleValue,
|
|
3119
|
-
});
|
|
3120
|
-
}),
|
|
3121
|
-
});
|
|
3122
|
-
}
|
|
3123
|
-
}
|
|
3124
|
-
}
|
|
3125
|
-
return {
|
|
3126
|
-
type: PredicateType$1.compound,
|
|
3127
|
-
operator,
|
|
3128
|
-
children,
|
|
3129
|
-
};
|
|
3130
|
-
}
|
|
3131
|
-
if (op.type === 'StringSetOperator' && op.value.value.includes(null)) {
|
|
3132
|
-
const children = [
|
|
3133
|
-
{
|
|
3134
|
-
type: PredicateType$1.nullComparison,
|
|
3135
|
-
left: extract,
|
|
3136
|
-
operator: op.operator === 'in'
|
|
3137
|
-
? NullComparisonOperator.is
|
|
3138
|
-
: NullComparisonOperator.isNot,
|
|
3139
|
-
},
|
|
3140
|
-
];
|
|
3141
|
-
if (op.value.value.length > 1) {
|
|
3142
|
-
children.push(comparison(extract, op.operator, {
|
|
3143
|
-
type: ValueType.StringArray,
|
|
3144
|
-
safe: false,
|
|
3145
|
-
isCaseSensitive: false,
|
|
3146
|
-
value: op.value.value.filter((val) => val !== null),
|
|
3147
|
-
}));
|
|
3148
|
-
}
|
|
3149
|
-
return {
|
|
3150
|
-
type: PredicateType$1.compound,
|
|
3151
|
-
operator: op.operator === 'in' ? CompoundOperator.or : CompoundOperator.and,
|
|
3152
|
-
children,
|
|
3153
|
-
};
|
|
3154
|
-
}
|
|
3155
|
-
const rightOperand = op.value;
|
|
3156
|
-
if (idProcessingNeeded) {
|
|
3157
|
-
if (isStringLiteral(rightOperand)) {
|
|
3158
|
-
if (rightOperand.value !== null) {
|
|
3159
|
-
if (draftFunctions.isDraftId(rightOperand.value)) {
|
|
3160
|
-
rightOperand.value = draftFunctions.getCanonicalId(rightOperand.value);
|
|
3161
|
-
}
|
|
3162
|
-
}
|
|
3163
|
-
}
|
|
3164
|
-
else if (isStringArray(rightOperand)) {
|
|
3165
|
-
if (rightOperand.value !== null) {
|
|
3166
|
-
rightOperand.value = rightOperand.value.map((originalId) => {
|
|
3167
|
-
if (originalId !== null) {
|
|
3168
|
-
if (draftFunctions.isDraftId(originalId)) {
|
|
3169
|
-
return draftFunctions.getCanonicalId(originalId);
|
|
3170
|
-
}
|
|
3171
|
-
}
|
|
3172
|
-
return originalId;
|
|
3173
|
-
});
|
|
3174
|
-
}
|
|
3175
|
-
}
|
|
3176
|
-
}
|
|
3177
|
-
return comparison(extract, op.operator, rightOperand);
|
|
3178
|
-
});
|
|
3179
|
-
const combined = combinePredicates(comparisons.concat(...dateFunction.value), CompoundOperator.and);
|
|
3180
|
-
const container = {
|
|
3181
|
-
predicate: combined,
|
|
3182
|
-
};
|
|
3183
|
-
return success(container);
|
|
3184
|
-
}
|
|
3185
|
-
function dateRangeComparison(dateRange, operator, compareDate) {
|
|
3186
|
-
switch (operator) {
|
|
3187
|
-
case eq:
|
|
3188
|
-
return {
|
|
3189
|
-
type: PredicateType$1.between,
|
|
3190
|
-
compareDate,
|
|
3191
|
-
start: dateRange.start,
|
|
3192
|
-
end: dateRange.end,
|
|
3193
|
-
};
|
|
3194
|
-
case ne:
|
|
3195
|
-
return {
|
|
3196
|
-
type: PredicateType$1.not,
|
|
3197
|
-
child: {
|
|
3198
|
-
type: PredicateType$1.between,
|
|
3199
|
-
compareDate,
|
|
3200
|
-
start: dateRange.start,
|
|
3201
|
-
end: dateRange.end,
|
|
3202
|
-
},
|
|
3203
|
-
};
|
|
3204
|
-
case lt:
|
|
3205
|
-
return comparison(compareDate, lt, dateRange.start);
|
|
3206
|
-
case lte:
|
|
3207
|
-
return comparison(compareDate, lte, dateRange.end);
|
|
3208
|
-
case gt:
|
|
3209
|
-
return comparison(compareDate, gt, dateRange.end);
|
|
3210
|
-
case gte:
|
|
3211
|
-
return comparison(compareDate, gte, dateRange.start);
|
|
3212
|
-
}
|
|
3213
|
-
}
|
|
3214
|
-
function dateFunctions(operatorNode, extract, dataType) {
|
|
3215
|
-
if (dataType !== 'Date' && dataType !== 'DateTime') {
|
|
3216
|
-
return success([]);
|
|
3217
|
-
}
|
|
3218
|
-
const results = Object.entries(operatorNode.fields).map(([key, valueNode]) => {
|
|
3219
|
-
if (isFilterFunction(key) === false) {
|
|
3220
|
-
return success([]);
|
|
3221
|
-
}
|
|
3222
|
-
if (!isObjectValueNode$1(valueNode)) {
|
|
3223
|
-
return failure([message('Date function expects an object node.')]);
|
|
3224
|
-
}
|
|
3225
|
-
const [opKey, opValue] = Object.entries(valueNode.fields)[0];
|
|
3226
|
-
const result = operatorWithValue(opKey, opValue, 'Int')
|
|
3227
|
-
.flatMap((op) => {
|
|
3228
|
-
if (op.type !== 'IntOperator') {
|
|
3229
|
-
return failure([message('Date function expects Int values')]);
|
|
3230
|
-
}
|
|
3231
|
-
const predicate = {
|
|
3232
|
-
type: PredicateType$1.dateFunction,
|
|
3233
|
-
operator: op.operator,
|
|
3234
|
-
function: DateFunction.dayOfMonth,
|
|
3235
|
-
value: op.value.value,
|
|
3236
|
-
extract,
|
|
3237
|
-
};
|
|
3238
|
-
return success(predicate);
|
|
3239
|
-
})
|
|
3240
|
-
.map((r) => [r]);
|
|
3241
|
-
return result;
|
|
3242
|
-
});
|
|
3243
|
-
const fails = results.filter(isFailure).reduce(flatMap(errors), []);
|
|
3244
|
-
if (fails.length > 0) {
|
|
3245
|
-
return failure(fails);
|
|
3246
|
-
}
|
|
3247
|
-
const vals = results.filter(isSuccess).reduce(flatMap(values$5), []);
|
|
3248
|
-
return success(vals);
|
|
3249
|
-
}
|
|
3250
|
-
function isFilterFunction(name) {
|
|
3251
|
-
return name === 'DAY_OF_MONTH';
|
|
3252
|
-
}
|
|
3253
|
-
function fieldOperators(operatorNode, dataType) {
|
|
3254
|
-
const results = Object.entries(operatorNode.fields)
|
|
3255
|
-
.filter(([key, _]) => isFilterFunction(key) === false)
|
|
3256
|
-
.map(([key, value]) => operatorWithValue(key, value, dataType));
|
|
3257
|
-
const _values = results.filter(isSuccess).map(values$5);
|
|
3258
|
-
const fails = results.filter(isFailure).reduce(flatMap(errors), []);
|
|
3259
|
-
if (fails.length > 0) {
|
|
3260
|
-
return failure(fails);
|
|
3261
|
-
}
|
|
3262
|
-
return success(_values);
|
|
3263
|
-
}
|
|
3264
|
-
function isSetOperatorType(value) {
|
|
3265
|
-
let values = [inOp, nin];
|
|
3266
|
-
return values.includes(value);
|
|
3267
|
-
}
|
|
3268
|
-
function isMultiPicklistSetOperatorType(value) {
|
|
3269
|
-
let values = [excludes, includes];
|
|
3270
|
-
return values.includes(value);
|
|
3271
|
-
}
|
|
3272
|
-
function isScalarOperatorType(value) {
|
|
3273
|
-
let values = [eq, ne, lt, gt, lte, gte];
|
|
3274
|
-
return values.includes(value);
|
|
3275
|
-
}
|
|
3276
|
-
function isBooleanOperatorType(value) {
|
|
3277
|
-
return value === eq || value === ne;
|
|
3278
|
-
}
|
|
3279
|
-
function nullOperatorTypeFrom(value) {
|
|
3280
|
-
switch (value) {
|
|
3281
|
-
case eq:
|
|
3282
|
-
return NullComparisonOperator.is;
|
|
3283
|
-
case ne:
|
|
3284
|
-
return NullComparisonOperator.isNot;
|
|
3285
|
-
}
|
|
3286
|
-
}
|
|
3287
|
-
function isStringOperatorType(value) {
|
|
3288
|
-
return isScalarOperatorType(value) || value === like;
|
|
3289
|
-
}
|
|
3290
|
-
function isPicklistOperatorType(value) {
|
|
3291
|
-
let values = [eq, ne, like, lt, gt, lte, gte];
|
|
3292
|
-
return values.includes(value);
|
|
3293
|
-
}
|
|
3294
|
-
function isCurrencyOperatorType(value) {
|
|
3295
|
-
return isScalarOperatorType(value);
|
|
3296
|
-
}
|
|
3297
|
-
function isMultiPicklistOperatorType(value) {
|
|
3298
|
-
let values = [eq, ne];
|
|
3299
|
-
return values.includes(value);
|
|
3300
|
-
}
|
|
3301
|
-
function listNodeToTypeArray(list, kind) {
|
|
3302
|
-
const typeAssert = (node) => is(node, kind);
|
|
3303
|
-
const badValue = list.values.filter((n) => !typeAssert(n))[0];
|
|
3304
|
-
if (badValue !== undefined) {
|
|
3305
|
-
return failure(message(`${JSON.stringify(badValue)} is not a valid value in list of ${kind}.`));
|
|
3306
|
-
}
|
|
3307
|
-
const values = list.values.filter(typeAssert).map((u) => u.value);
|
|
3308
|
-
return success(values);
|
|
3309
|
-
}
|
|
3310
|
-
function operatorWithValue(operator, valueNode, objectInfoDataType) {
|
|
3311
|
-
if (is(valueNode, 'NullValue')) {
|
|
3312
|
-
return parseNullValue(operator).mapError((e) => [e]);
|
|
3313
|
-
}
|
|
3314
|
-
if (['String', 'Reference', 'Phone', 'Url', 'Email', 'TextArea'].includes(objectInfoDataType)) {
|
|
3315
|
-
if (isStringOperatorType(operator)) {
|
|
3316
|
-
return is(valueNode, 'StringValue')
|
|
3317
|
-
? success({
|
|
3318
|
-
type: `StringOperator`,
|
|
3319
|
-
operator,
|
|
3320
|
-
value: stringLiteral(valueNode.value),
|
|
3321
|
-
})
|
|
3322
|
-
: failure([message(`Comparison value must be a ${objectInfoDataType}.`)]);
|
|
3323
|
-
}
|
|
3324
|
-
if (isSetOperatorType(operator)) {
|
|
3325
|
-
if (!is(valueNode, 'ListValue')) {
|
|
3326
|
-
return failure([
|
|
3327
|
-
message(`Comparison value must be a ${objectInfoDataType} array.`),
|
|
3328
|
-
]);
|
|
3329
|
-
}
|
|
3330
|
-
const listValues = [];
|
|
3331
|
-
for (const node of valueNode.values) {
|
|
3332
|
-
switch (node.kind) {
|
|
3333
|
-
case 'NullValue':
|
|
3334
|
-
listValues.push(null);
|
|
3335
|
-
break;
|
|
3336
|
-
case 'StringValue':
|
|
3337
|
-
listValues.push(node.value);
|
|
3338
|
-
break;
|
|
3339
|
-
default:
|
|
3340
|
-
continue;
|
|
3341
|
-
}
|
|
3342
|
-
}
|
|
3343
|
-
return success({
|
|
3344
|
-
operator,
|
|
3345
|
-
type: 'StringSetOperator',
|
|
3346
|
-
value: {
|
|
3347
|
-
type: ValueType.StringArray,
|
|
3348
|
-
value: listValues,
|
|
3349
|
-
safe: false,
|
|
3350
|
-
isCaseSensitive: false,
|
|
3351
|
-
},
|
|
3352
|
-
});
|
|
3353
|
-
}
|
|
3354
|
-
}
|
|
3355
|
-
if (objectInfoDataType === 'Int') {
|
|
3356
|
-
if (isScalarOperatorType(operator)) {
|
|
3357
|
-
return is(valueNode, 'IntValue')
|
|
3358
|
-
? success({
|
|
3359
|
-
type: 'IntOperator',
|
|
3360
|
-
operator,
|
|
3361
|
-
value: { type: ValueType.IntLiteral, value: parseInt(valueNode.value) },
|
|
3362
|
-
})
|
|
3363
|
-
: failure([message(`Comparison value must be an int.`)]);
|
|
3364
|
-
}
|
|
3365
|
-
if (isSetOperatorType(operator)) {
|
|
3366
|
-
return is(valueNode, 'ListValue')
|
|
3367
|
-
? listNodeToTypeArray(valueNode, 'IntValue')
|
|
3368
|
-
.map((strings) => {
|
|
3369
|
-
return {
|
|
3370
|
-
operator,
|
|
3371
|
-
type: 'IntSetOperator',
|
|
3372
|
-
value: {
|
|
3373
|
-
type: ValueType.NumberArray,
|
|
3374
|
-
value: strings.map((s) => parseInt(s)),
|
|
3375
|
-
},
|
|
3376
|
-
};
|
|
3377
|
-
})
|
|
3378
|
-
.mapError((e) => [e])
|
|
3379
|
-
: failure([message(`Comparison value must be an int array.`)]);
|
|
3380
|
-
}
|
|
3381
|
-
}
|
|
3382
|
-
if (objectInfoDataType === 'Double') {
|
|
3383
|
-
if (isScalarOperatorType(operator)) {
|
|
3384
|
-
// allow a float/double value to be passed
|
|
3385
|
-
// also allow an integer to be passed to a double, but not a double to an integer
|
|
3386
|
-
const isFloatOrInt = is(valueNode, 'FloatValue') ||
|
|
3387
|
-
is(valueNode, 'IntValue');
|
|
3388
|
-
return isFloatOrInt
|
|
3389
|
-
? success({
|
|
3390
|
-
type: 'DoubleOperator',
|
|
3391
|
-
operator,
|
|
3392
|
-
value: { type: ValueType.DoubleLiteral, value: parseFloat(valueNode.value) },
|
|
3393
|
-
})
|
|
3394
|
-
: failure([message(`Comparison value must be a double.`)]);
|
|
3395
|
-
}
|
|
3396
|
-
if (isSetOperatorType(operator)) {
|
|
3397
|
-
return is(valueNode, 'ListValue')
|
|
3398
|
-
? listNodeToTypeArray(valueNode, 'FloatValue')
|
|
3399
|
-
.map((strings) => {
|
|
3400
|
-
return {
|
|
3401
|
-
operator,
|
|
3402
|
-
type: 'DoubleSetOperator',
|
|
3403
|
-
value: {
|
|
3404
|
-
type: ValueType.NumberArray,
|
|
3405
|
-
value: strings.map(parseFloat),
|
|
3406
|
-
},
|
|
3407
|
-
};
|
|
3408
|
-
})
|
|
3409
|
-
.mapError((e) => [e])
|
|
3410
|
-
: failure([message(`Comparison value must be a double array.`)]);
|
|
3411
|
-
}
|
|
3412
|
-
}
|
|
3413
|
-
if (objectInfoDataType === 'Percent') {
|
|
3414
|
-
if (isScalarOperatorType(operator)) {
|
|
3415
|
-
// Percents are documented as being Double-like, but in practice the UIAPI GraphQL
|
|
3416
|
-
// API will accent Integer (50) and Float (50.0) inputs and treat them equally
|
|
3417
|
-
const isPercentLike = (is(valueNode, 'FloatValue') ||
|
|
3418
|
-
is(valueNode, 'IntValue')) &&
|
|
3419
|
-
!isNaN(parseFloat(valueNode.value));
|
|
3420
|
-
if (isPercentLike) {
|
|
3421
|
-
return success({
|
|
3422
|
-
type: 'DoubleOperator',
|
|
3423
|
-
operator,
|
|
3424
|
-
value: { type: ValueType.DoubleLiteral, value: parseFloat(valueNode.value) },
|
|
3425
|
-
});
|
|
3426
|
-
}
|
|
3427
|
-
else {
|
|
3428
|
-
return failure([message(`Comparison value must be a ${objectInfoDataType}.`)]);
|
|
3429
|
-
}
|
|
3430
|
-
}
|
|
3431
|
-
if (isSetOperatorType(operator)) {
|
|
3432
|
-
if (is(valueNode, 'ListValue')) {
|
|
3433
|
-
const typeErrors = [];
|
|
3434
|
-
const values = [];
|
|
3435
|
-
for (const node of valueNode.values) {
|
|
3436
|
-
if (is(node, 'FloatValue') ||
|
|
3437
|
-
is(node, 'IntValue')) {
|
|
3438
|
-
values.push(parseFloat(node.value));
|
|
3439
|
-
}
|
|
3440
|
-
else {
|
|
3441
|
-
typeErrors.push(message(`Comparison value must be a ${objectInfoDataType} array.`));
|
|
3442
|
-
}
|
|
3443
|
-
}
|
|
3444
|
-
if (typeErrors.length) {
|
|
3445
|
-
return failure(typeErrors);
|
|
3446
|
-
}
|
|
3447
|
-
return success({
|
|
3448
|
-
operator,
|
|
3449
|
-
type: 'DoubleSetOperator',
|
|
3450
|
-
value: {
|
|
3451
|
-
type: ValueType.NumberArray,
|
|
3452
|
-
value: values,
|
|
3453
|
-
},
|
|
3454
|
-
});
|
|
3455
|
-
}
|
|
3456
|
-
else {
|
|
3457
|
-
return failure([
|
|
3458
|
-
message(`Comparison value must be a ${objectInfoDataType} array.`),
|
|
3459
|
-
]);
|
|
3460
|
-
}
|
|
3461
|
-
}
|
|
3462
|
-
}
|
|
3463
|
-
if (objectInfoDataType === 'Boolean') {
|
|
3464
|
-
if (isBooleanOperatorType(operator)) {
|
|
3465
|
-
return is(valueNode, 'BooleanValue')
|
|
3466
|
-
? success({
|
|
3467
|
-
type: 'BooleanOperator',
|
|
3468
|
-
operator,
|
|
3469
|
-
value: { type: ValueType.BooleanLiteral, value: valueNode.value },
|
|
3470
|
-
})
|
|
3471
|
-
: failure([message(`Comparison value must be a boolean.`)]);
|
|
3472
|
-
}
|
|
3473
|
-
}
|
|
3474
|
-
if (objectInfoDataType === 'Date') {
|
|
3475
|
-
if (isScalarOperatorType(operator)) {
|
|
3476
|
-
const result = dateInput(valueNode).mapError((e) => [e]);
|
|
3477
|
-
if (result.isSuccess === false) {
|
|
3478
|
-
return failure(result.error);
|
|
3479
|
-
}
|
|
3480
|
-
const { value: input } = result;
|
|
3481
|
-
if (input.type === ValueType.NullValue) {
|
|
3482
|
-
return parseNullValue(operator).mapError((e) => [e]);
|
|
3483
|
-
}
|
|
3484
|
-
return success({ type: 'DateOperator', operator, value: input });
|
|
3485
|
-
}
|
|
3486
|
-
if (isSetOperatorType(operator)) {
|
|
3487
|
-
if (is(valueNode, 'ListValue')) {
|
|
3488
|
-
return flattenResults(valueNode.values.map(dateInput)).map((value) => {
|
|
3489
|
-
return {
|
|
3490
|
-
type: 'DateSetOperator',
|
|
3491
|
-
operator,
|
|
3492
|
-
value: { type: ValueType.DateArray, value },
|
|
3493
|
-
};
|
|
3494
|
-
});
|
|
3495
|
-
}
|
|
3496
|
-
return failure([message('Comparison value must be a date array.')]);
|
|
3497
|
-
}
|
|
3498
|
-
}
|
|
3499
|
-
if (objectInfoDataType === 'DateTime') {
|
|
3500
|
-
if (isScalarOperatorType(operator)) {
|
|
3501
|
-
const result = dateTimeInput(valueNode).mapError((e) => [e]);
|
|
3502
|
-
if (result.isSuccess === false) {
|
|
3503
|
-
return failure(result.error);
|
|
3504
|
-
}
|
|
3505
|
-
const { value: input } = result;
|
|
3506
|
-
if (input.type === ValueType.NullValue) {
|
|
3507
|
-
return parseNullValue(operator).mapError((e) => [e]);
|
|
3508
|
-
}
|
|
3509
|
-
return success({ type: 'DateTimeOperator', operator, value: input });
|
|
3510
|
-
}
|
|
3511
|
-
if (isSetOperatorType(operator)) {
|
|
3512
|
-
if (is(valueNode, 'ListValue')) {
|
|
3513
|
-
return flattenResults(valueNode.values.map(dateTimeInput)).map((value) => {
|
|
3514
|
-
return {
|
|
3515
|
-
type: 'DateTimeSetOperator',
|
|
3516
|
-
operator,
|
|
3517
|
-
value: { type: ValueType.DateTimeArray, value },
|
|
3518
|
-
};
|
|
3519
|
-
});
|
|
3520
|
-
}
|
|
3521
|
-
return failure([message('Comparison value must be a date time array.')]);
|
|
3522
|
-
}
|
|
3523
|
-
}
|
|
3524
|
-
if (objectInfoDataType === 'Picklist') {
|
|
3525
|
-
if (isPicklistOperatorType(operator)) {
|
|
3526
|
-
return is(valueNode, 'StringValue')
|
|
3527
|
-
? success({
|
|
3528
|
-
type: 'PicklistOperator',
|
|
3529
|
-
operator,
|
|
3530
|
-
value: stringLiteral(valueNode.value),
|
|
3531
|
-
})
|
|
3532
|
-
: failure([message(`Comparison value must be a Picklist.`)]);
|
|
3533
|
-
}
|
|
3534
|
-
if (isSetOperatorType(operator)) {
|
|
3535
|
-
return is(valueNode, 'ListValue')
|
|
3536
|
-
? listNodeToTypeArray(valueNode, 'StringValue')
|
|
3537
|
-
.map((value) => {
|
|
3538
|
-
return {
|
|
3539
|
-
operator,
|
|
3540
|
-
type: 'PicklistSetOperator',
|
|
3541
|
-
value: {
|
|
3542
|
-
type: ValueType.StringArray,
|
|
3543
|
-
value,
|
|
3544
|
-
safe: false,
|
|
3545
|
-
isCaseSensitive: false,
|
|
3546
|
-
},
|
|
3547
|
-
};
|
|
3548
|
-
})
|
|
3549
|
-
.mapError((e) => [e])
|
|
3550
|
-
: failure([message(`Comparison value must be a Picklist array.`)]);
|
|
3551
|
-
}
|
|
3552
|
-
}
|
|
3553
|
-
if (objectInfoDataType === 'Currency') {
|
|
3554
|
-
if (isCurrencyOperatorType(operator)) {
|
|
3555
|
-
return is(valueNode, 'FloatValue')
|
|
3556
|
-
? success({
|
|
3557
|
-
type: 'CurrencyOperator',
|
|
3558
|
-
operator,
|
|
3559
|
-
value: { type: ValueType.DoubleLiteral, value: parseFloat(valueNode.value) },
|
|
3560
|
-
})
|
|
3561
|
-
: failure([message(`Comparison value must be a Currency.`)]);
|
|
3562
|
-
}
|
|
3563
|
-
if (isSetOperatorType(operator)) {
|
|
3564
|
-
return is(valueNode, 'ListValue')
|
|
3565
|
-
? listNodeToTypeArray(valueNode, 'FloatValue')
|
|
3566
|
-
.map((strings) => {
|
|
3567
|
-
return {
|
|
3568
|
-
operator,
|
|
3569
|
-
type: 'CurrencySetOperator',
|
|
3570
|
-
value: {
|
|
3571
|
-
type: ValueType.NumberArray,
|
|
3572
|
-
value: strings.map(parseFloat),
|
|
3573
|
-
},
|
|
3574
|
-
};
|
|
3575
|
-
})
|
|
3576
|
-
.mapError((e) => [e])
|
|
3577
|
-
: failure([message(`Comparison value must be a Currency array.`)]);
|
|
3578
|
-
}
|
|
3579
|
-
}
|
|
3580
|
-
if (objectInfoDataType === 'MultiPicklist') {
|
|
3581
|
-
if (isMultiPicklistOperatorType(operator)) {
|
|
3582
|
-
if (is(valueNode, 'StringValue')) {
|
|
3583
|
-
// The raw value could be ';;a; b;;', clean it up to 'a;b'
|
|
3584
|
-
const welformatedValue = valueNode.value
|
|
3585
|
-
.split(MultiPickListValueSeparator$1)
|
|
3586
|
-
.map((v) => v.trim())
|
|
3587
|
-
.filter((v) => v.length > 0)
|
|
3588
|
-
.join(MultiPickListValueSeparator$1);
|
|
3589
|
-
return success({
|
|
3590
|
-
type: 'MultiPicklistOperator',
|
|
3591
|
-
operator,
|
|
3592
|
-
value: stringLiteral(welformatedValue),
|
|
3593
|
-
});
|
|
3594
|
-
}
|
|
3595
|
-
return failure([message(`Comparison value must be a MultiPicklist`)]);
|
|
3596
|
-
}
|
|
3597
|
-
if (isMultiPicklistSetOperatorType(operator)) {
|
|
3598
|
-
if (is(valueNode, 'ListValue')) {
|
|
3599
|
-
return listNodeToTypeArray(valueNode, 'StringValue')
|
|
3600
|
-
.map((val) => {
|
|
3601
|
-
return {
|
|
3602
|
-
operator,
|
|
3603
|
-
type: 'MultiPicklistSetOperator',
|
|
3604
|
-
value: {
|
|
3605
|
-
type: ValueType.StringArray,
|
|
3606
|
-
value: val,
|
|
3607
|
-
safe: false,
|
|
3608
|
-
isCaseSensitive: true,
|
|
3609
|
-
},
|
|
3610
|
-
};
|
|
3611
|
-
})
|
|
3612
|
-
.mapError((e) => [e]);
|
|
3613
|
-
}
|
|
3614
|
-
else {
|
|
3615
|
-
return failure([message(`Comparison value must be a MultiPicklist array.`)]);
|
|
3616
|
-
}
|
|
3617
|
-
}
|
|
3618
|
-
}
|
|
3619
|
-
if (objectInfoDataType === 'Time') {
|
|
3620
|
-
if (isScalarOperatorType(operator)) {
|
|
3621
|
-
return is(valueNode, 'StringValue')
|
|
3622
|
-
? success({
|
|
3623
|
-
type: 'TimeOperator',
|
|
3624
|
-
operator,
|
|
3625
|
-
value: stringLiteral(valueNode.value),
|
|
3626
|
-
})
|
|
3627
|
-
: failure([message(`Comparison value must be a Time`)]);
|
|
3628
|
-
}
|
|
3629
|
-
if (isSetOperatorType(operator)) {
|
|
3630
|
-
return is(valueNode, 'ListValue')
|
|
3631
|
-
? listNodeToTypeArray(valueNode, 'StringValue')
|
|
3632
|
-
.map((value) => {
|
|
3633
|
-
return {
|
|
3634
|
-
operator,
|
|
3635
|
-
type: 'TimeSetOperator',
|
|
3636
|
-
value: {
|
|
3637
|
-
type: ValueType.StringArray,
|
|
3638
|
-
value,
|
|
3639
|
-
isCaseSensitive: true,
|
|
3640
|
-
safe: false,
|
|
3641
|
-
},
|
|
3642
|
-
};
|
|
3643
|
-
})
|
|
3644
|
-
.mapError((e) => [e])
|
|
3645
|
-
: failure([message(`Comparison value must be a Time array.`)]);
|
|
3646
|
-
}
|
|
3647
|
-
}
|
|
3648
|
-
return failure([
|
|
3649
|
-
message(`Comparison operator ${operator} is not supported for type ${objectInfoDataType}.`),
|
|
3650
|
-
]);
|
|
3651
|
-
}
|
|
3652
|
-
function isValidDate(value) {
|
|
3653
|
-
return isNaN(Date.parse(value)) === false;
|
|
3654
|
-
}
|
|
3655
|
-
function dateInput(node) {
|
|
3656
|
-
return parseDateNode(node, false, 'YYYY-MM-DD', isValidDate).map((result) => {
|
|
3657
|
-
switch (result.type) {
|
|
3658
|
-
case ValueType.NullValue:
|
|
3659
|
-
return result;
|
|
3660
|
-
case ValueType.StringLiteral:
|
|
3661
|
-
return { type: ValueType.DateValue, value: result.value };
|
|
3662
|
-
case 'range':
|
|
3663
|
-
return { type: ValueType.DateRange, start: result.start, end: result.end };
|
|
3664
|
-
case 'enum':
|
|
3665
|
-
return { type: ValueType.DateEnum, value: result.value };
|
|
3666
|
-
}
|
|
3667
|
-
});
|
|
3668
|
-
}
|
|
3669
|
-
function dateTimeInput(node) {
|
|
3670
|
-
return parseDateNode(node, true, 'YYYY-MM-DDTHH:MM:SS.SSSZ, YYYY-MM-DDTHH:MM:SSZ, YYYY-MM-DDTHH:MM:SS.SSS+|-HH:MM, or YYYY-MM-DDTHH:MM:SS+|-HH:MM', isValidDate).map((result) => {
|
|
3671
|
-
switch (result.type) {
|
|
3672
|
-
case ValueType.NullValue:
|
|
3673
|
-
return result;
|
|
3674
|
-
case ValueType.StringLiteral: {
|
|
3675
|
-
let dateString = result.value;
|
|
3676
|
-
if (dateString !== null && dateString.indexOf('.') === -1) {
|
|
3677
|
-
dateString = dateString.substring(0, dateString.length - 1) + '.000Z';
|
|
3678
|
-
}
|
|
3679
|
-
return { type: ValueType.DateTimeValue, value: dateString };
|
|
3680
|
-
}
|
|
3681
|
-
case 'range':
|
|
3682
|
-
return { type: ValueType.DateTimeRange, start: result.start, end: result.end };
|
|
3683
|
-
case 'enum':
|
|
3684
|
-
return { type: ValueType.DateTimeEnum, value: result.value };
|
|
3685
|
-
}
|
|
3686
|
-
});
|
|
3687
|
-
}
|
|
3688
|
-
function parseNullValue(op) {
|
|
3689
|
-
const operator = nullOperatorTypeFrom(op);
|
|
3690
|
-
if (operator !== undefined) {
|
|
3691
|
-
return success({ type: 'NullOperator', operator });
|
|
3692
|
-
}
|
|
3693
|
-
return failure(message(`Null can not be compared with ${op}`));
|
|
3694
|
-
}
|
|
3695
|
-
function parseDateNode(node, hasTime, dateFormat, isValidDate) {
|
|
3696
|
-
const typeName = hasTime ? 'DateTime' : 'Date';
|
|
3697
|
-
if (!isObjectValueNode$1(node)) {
|
|
3698
|
-
return failure(message(`Comparison value must be a ${typeName} input.`));
|
|
3699
|
-
}
|
|
3700
|
-
const valueField = node.fields.value;
|
|
3701
|
-
if (valueField !== undefined) {
|
|
3702
|
-
if (is(valueField, 'StringValue')) {
|
|
3703
|
-
// check the date is valid
|
|
3704
|
-
// then make sure if it isnt suppose to contain time stamps that it doesnt
|
|
3705
|
-
// and if it should have a timestamp it should contain it
|
|
3706
|
-
const includesTimeStamp = valueField.value.includes('T');
|
|
3707
|
-
if (isValidDate(valueField.value) &&
|
|
3708
|
-
((hasTime && includesTimeStamp) || (!hasTime && !includesTimeStamp))) {
|
|
3709
|
-
return success(stringLiteral(valueField.value));
|
|
3710
|
-
}
|
|
3711
|
-
return failure(message(`${typeName} format must be ${dateFormat}.`));
|
|
3712
|
-
}
|
|
3713
|
-
if (is(valueField, 'NullValue')) {
|
|
3714
|
-
return success({ type: ValueType.NullValue });
|
|
3715
|
-
}
|
|
3716
|
-
return failure(message(`${typeName} input value field must be a string.`));
|
|
3717
|
-
}
|
|
3718
|
-
const literalField = node.fields['literal'];
|
|
3719
|
-
if (literalField !== undefined) {
|
|
3720
|
-
if (is(literalField, 'EnumValue')) {
|
|
3721
|
-
switch (literalField.value) {
|
|
3722
|
-
case 'TODAY':
|
|
3723
|
-
return success({ type: 'enum', value: DateEnumType.today });
|
|
3724
|
-
case 'TOMORROW':
|
|
3725
|
-
return success({ type: 'enum', value: DateEnumType.tomorrow });
|
|
3726
|
-
case 'YESTERDAY':
|
|
3727
|
-
return success({ type: 'enum', value: DateEnumType.yesterday });
|
|
3728
|
-
case 'LAST_WEEK':
|
|
3729
|
-
return success({ type: 'enum', value: DateEnumType.last_week });
|
|
3730
|
-
case 'THIS_WEEK':
|
|
3731
|
-
return success({ type: 'enum', value: DateEnumType.this_week });
|
|
3732
|
-
case 'NEXT_WEEK':
|
|
3733
|
-
return success({ type: 'enum', value: DateEnumType.next_week });
|
|
3734
|
-
case 'LAST_MONTH':
|
|
3735
|
-
return success({ type: 'enum', value: DateEnumType.last_month });
|
|
3736
|
-
case 'THIS_MONTH':
|
|
3737
|
-
return success({ type: 'enum', value: DateEnumType.this_month });
|
|
3738
|
-
case 'NEXT_MONTH':
|
|
3739
|
-
return success({ type: 'enum', value: DateEnumType.next_month });
|
|
3740
|
-
case 'LAST_90_DAYS':
|
|
3741
|
-
return success({ type: 'enum', value: DateEnumType.last_90_days });
|
|
3742
|
-
case 'NEXT_90_DAYS':
|
|
3743
|
-
return success({ type: 'enum', value: DateEnumType.next_90_days });
|
|
3744
|
-
case 'LAST_QUARTER':
|
|
3745
|
-
return success({ type: 'enum', value: DateEnumType.last_quarter });
|
|
3746
|
-
case 'THIS_QUARTER':
|
|
3747
|
-
return success({ type: 'enum', value: DateEnumType.this_quarter });
|
|
3748
|
-
case 'NEXT_QUARTER':
|
|
3749
|
-
return success({ type: 'enum', value: DateEnumType.next_quarter });
|
|
3750
|
-
case 'LAST_YEAR':
|
|
3751
|
-
return success({ type: 'enum', value: DateEnumType.last_year });
|
|
3752
|
-
case 'THIS_YEAR':
|
|
3753
|
-
return success({ type: 'enum', value: DateEnumType.this_year });
|
|
3754
|
-
case 'NEXT_YEAR':
|
|
3755
|
-
return success({ type: 'enum', value: DateEnumType.next_year });
|
|
3756
|
-
default:
|
|
3757
|
-
return failure(message(`Unknown ${typeName} literal ${literalField.value}.`));
|
|
3758
|
-
}
|
|
3759
|
-
}
|
|
3760
|
-
return failure(message(`${typeName} input literal field must be an enum.`));
|
|
3761
|
-
}
|
|
3762
|
-
const rangeField = node.fields['range'];
|
|
3763
|
-
if (rangeField !== undefined) {
|
|
3764
|
-
if (is(rangeField, 'ObjectValue')) {
|
|
3765
|
-
const fieldsField = rangeField.fields;
|
|
3766
|
-
const last_n_months = fieldsField['last_n_months'];
|
|
3767
|
-
if (last_n_months !== undefined) {
|
|
3768
|
-
if (is(last_n_months, 'IntValue')) {
|
|
3769
|
-
const amount = -parseInt(last_n_months.value);
|
|
3770
|
-
const start = {
|
|
3771
|
-
type: ValueType.RelativeDate,
|
|
3772
|
-
unit: 'month',
|
|
3773
|
-
amount,
|
|
3774
|
-
offset: 'start',
|
|
3775
|
-
hasTime,
|
|
3776
|
-
};
|
|
3777
|
-
const end = {
|
|
3778
|
-
type: ValueType.RelativeDate,
|
|
3779
|
-
unit: 'month',
|
|
3780
|
-
amount: -1,
|
|
3781
|
-
offset: 'end',
|
|
3782
|
-
hasTime,
|
|
3783
|
-
};
|
|
3784
|
-
return success({ type: 'range', start, end });
|
|
3785
|
-
}
|
|
3786
|
-
}
|
|
3787
|
-
const next_n_months = fieldsField['next_n_months'];
|
|
3788
|
-
if (next_n_months !== undefined) {
|
|
3789
|
-
if (is(next_n_months, 'IntValue')) {
|
|
3790
|
-
const amount = parseInt(next_n_months.value);
|
|
3791
|
-
const start = {
|
|
3792
|
-
type: ValueType.RelativeDate,
|
|
3793
|
-
unit: 'month',
|
|
3794
|
-
amount: 1,
|
|
3795
|
-
offset: 'start',
|
|
3796
|
-
hasTime,
|
|
3797
|
-
};
|
|
3798
|
-
const end = {
|
|
3799
|
-
type: ValueType.RelativeDate,
|
|
3800
|
-
unit: 'month',
|
|
3801
|
-
amount,
|
|
3802
|
-
offset: 'end',
|
|
3803
|
-
hasTime,
|
|
3804
|
-
};
|
|
3805
|
-
return success({ type: 'range', start, end });
|
|
3806
|
-
}
|
|
3807
|
-
}
|
|
3808
|
-
const last_n_days = fieldsField['last_n_days'];
|
|
3809
|
-
if (last_n_days !== undefined) {
|
|
3810
|
-
if (is(last_n_days, 'IntValue')) {
|
|
3811
|
-
const amount = -parseInt(last_n_days.value);
|
|
3812
|
-
const start = {
|
|
3813
|
-
type: ValueType.RelativeDate,
|
|
3814
|
-
unit: 'day',
|
|
3815
|
-
amount,
|
|
3816
|
-
offset: undefined,
|
|
3817
|
-
hasTime,
|
|
3818
|
-
};
|
|
3819
|
-
const end = {
|
|
3820
|
-
type: ValueType.RelativeDate,
|
|
3821
|
-
unit: 'day',
|
|
3822
|
-
amount: 0,
|
|
3823
|
-
offset: undefined,
|
|
3824
|
-
hasTime,
|
|
3825
|
-
};
|
|
3826
|
-
return success({ type: 'range', start, end });
|
|
3827
|
-
}
|
|
3828
|
-
}
|
|
3829
|
-
const next_n_days = fieldsField['next_n_days'];
|
|
3830
|
-
if (next_n_days !== undefined) {
|
|
3831
|
-
if (is(next_n_days, 'IntValue')) {
|
|
3832
|
-
const amount = parseInt(next_n_days.value);
|
|
3833
|
-
const start = {
|
|
3834
|
-
type: ValueType.RelativeDate,
|
|
3835
|
-
unit: 'day',
|
|
3836
|
-
amount: 1,
|
|
3837
|
-
offset: undefined,
|
|
3838
|
-
hasTime,
|
|
3839
|
-
};
|
|
3840
|
-
const end = {
|
|
3841
|
-
type: ValueType.RelativeDate,
|
|
3842
|
-
unit: 'day',
|
|
3843
|
-
amount,
|
|
3844
|
-
offset: undefined,
|
|
3845
|
-
hasTime,
|
|
3846
|
-
};
|
|
3847
|
-
return success({ type: 'range', start, end });
|
|
3848
|
-
}
|
|
3849
|
-
}
|
|
3850
|
-
return failure(message(`invalid date range name`));
|
|
3851
|
-
}
|
|
3852
|
-
return failure(message(`${typeName} range must be an object.`));
|
|
3853
|
-
}
|
|
3854
|
-
return failure(message(`${typeName} input must include a value or literal field.`));
|
|
3855
|
-
}
|
|
3856
|
-
|
|
3857
|
-
function fieldsToOrderBy(fieldValues, joinAlias, apiName, input, joins) {
|
|
3858
|
-
const [node] = fieldValues;
|
|
3859
|
-
if (!isObjectValueNode$1(node)) {
|
|
3860
|
-
return failure([
|
|
3861
|
-
message('Parent OrderBy node should be an object.'),
|
|
3862
|
-
]);
|
|
3863
|
-
}
|
|
3864
|
-
const orderByContainers = [];
|
|
3865
|
-
const errors = [];
|
|
3866
|
-
const orderByResults = Object.entries(node.fields).map(([key, value]) => orderBy(key, value, joinAlias, apiName, input, joins));
|
|
3867
|
-
for (const result of orderByResults) {
|
|
3868
|
-
if (isSuccess(result)) {
|
|
3869
|
-
orderByContainers.push(result.value);
|
|
3870
|
-
}
|
|
3871
|
-
else {
|
|
3872
|
-
errors.push(result.error);
|
|
3873
|
-
}
|
|
3874
|
-
}
|
|
3875
|
-
if (errors.length) {
|
|
3876
|
-
return failure(errors);
|
|
3877
|
-
}
|
|
3878
|
-
return success(orderByContainers);
|
|
3879
|
-
}
|
|
3880
|
-
function orderBy(name, value, tableAlias, apiName, input, joins) {
|
|
3881
|
-
if (!isObjectValueNode$1(value)) {
|
|
3882
|
-
return failure(message('OrderBy node must be an object.'));
|
|
3883
|
-
}
|
|
3884
|
-
return fieldsOrderBy(name, value, tableAlias, apiName, input, joins);
|
|
3885
|
-
}
|
|
3886
|
-
function spanningOrderBy(fieldInfo, fieldNode, alias, input, joins) {
|
|
3887
|
-
const { apiName: fieldName, referenceToInfos, relationshipName } = fieldInfo;
|
|
3888
|
-
const referenceInfo = referenceToInfos[0];
|
|
3889
|
-
const jsonAlias = `${alias}.${relationshipName}`;
|
|
3890
|
-
const joinPredicate = referencePredicate(alias, jsonAlias, fieldName);
|
|
3891
|
-
if (referenceInfo === undefined) {
|
|
3892
|
-
return failure(message(`No reference info found for ${fieldName}`));
|
|
3893
|
-
}
|
|
3894
|
-
const { apiName } = referenceInfo;
|
|
3895
|
-
const join = {
|
|
3896
|
-
name: jsonAlias,
|
|
3897
|
-
to: alias,
|
|
3898
|
-
type: 'LEFT',
|
|
3899
|
-
conditions: [joinPredicate],
|
|
3900
|
-
};
|
|
3901
|
-
joins.push(join);
|
|
3902
|
-
const result = fieldsToOrderBy([fieldNode], jsonAlias, apiName, input, joins);
|
|
3903
|
-
if (!result.isSuccess) {
|
|
3904
|
-
return failure(result.error[0]);
|
|
3905
|
-
}
|
|
3906
|
-
const [container] = result.value;
|
|
3907
|
-
const { orderBy } = container;
|
|
3908
|
-
return success({ orderBy });
|
|
3909
|
-
}
|
|
3910
|
-
function fieldsOrderBy(fieldName, fieldNode, alias, apiName, input, joins) {
|
|
3911
|
-
const fieldInfoResult = getFieldInfo(apiName, fieldName, input);
|
|
3912
|
-
if (fieldInfoResult.isSuccess === false) {
|
|
3913
|
-
return failure(fieldInfoResult.error);
|
|
3914
|
-
}
|
|
3915
|
-
const fieldInfo = fieldInfoResult.value;
|
|
3916
|
-
if (fieldInfo === undefined) {
|
|
3917
|
-
return failure(message(`Field ${fieldName} for type ${apiName} not found.`));
|
|
3918
|
-
}
|
|
3919
|
-
if (fieldInfo.dataType === 'Reference' && fieldInfo.relationshipName === fieldName) {
|
|
3920
|
-
return spanningOrderBy(fieldInfo, fieldNode, alias, input, joins);
|
|
3921
|
-
}
|
|
3922
|
-
const result = orderByDetails(fieldNode, alias, fieldName);
|
|
3923
|
-
if (!result.isSuccess) {
|
|
3924
|
-
return failure(result.error);
|
|
3925
|
-
}
|
|
3926
|
-
return success({ orderBy: result.value });
|
|
3927
|
-
}
|
|
3928
|
-
function orderByDetails(fieldNode, jsonAlias, path) {
|
|
3929
|
-
const extract = { type: ValueType.Extract, jsonAlias, field: path };
|
|
3930
|
-
const orderField = fieldNode.fields['order'];
|
|
3931
|
-
const nullsField = fieldNode.fields['nulls'];
|
|
3932
|
-
const asc = isAsc(orderField);
|
|
3933
|
-
const nulls = nullsFirst(nullsField);
|
|
3934
|
-
if (asc.isSuccess === false) {
|
|
3935
|
-
return failure(asc.error);
|
|
3936
|
-
}
|
|
3937
|
-
if (nulls.isSuccess === false) {
|
|
3938
|
-
return failure(nulls.error);
|
|
3939
|
-
}
|
|
3940
|
-
return success({ asc: asc.value, extract, nullsFirst: nulls.value });
|
|
3941
|
-
}
|
|
3942
|
-
function isAsc(field) {
|
|
3943
|
-
if (field !== undefined) {
|
|
3944
|
-
if (is(field, 'EnumValue')) {
|
|
3945
|
-
switch (field.value) {
|
|
3946
|
-
case 'ASC':
|
|
3947
|
-
return success(true);
|
|
3948
|
-
case 'DESC':
|
|
3949
|
-
return success(false);
|
|
3950
|
-
default:
|
|
3951
|
-
return failure(message(`Unknown order enum ${field.value}.`));
|
|
3952
|
-
}
|
|
3953
|
-
}
|
|
3954
|
-
return failure(message(`OrderBy order field must be an enum.`));
|
|
3955
|
-
}
|
|
3956
|
-
return success(true);
|
|
3957
|
-
}
|
|
3958
|
-
function nullsFirst(field) {
|
|
3959
|
-
if (field !== undefined) {
|
|
3960
|
-
if (is(field, 'EnumValue')) {
|
|
3961
|
-
switch (field.value) {
|
|
3962
|
-
case 'FIRST':
|
|
3963
|
-
return success(true);
|
|
3964
|
-
case 'LAST':
|
|
3965
|
-
return success(false);
|
|
3966
|
-
default:
|
|
3967
|
-
return failure(message(`Unknown nulls enum ${field.value}.`));
|
|
3968
|
-
}
|
|
3969
|
-
}
|
|
3970
|
-
return failure(message(`OrderBy nulls field must be an enum.`));
|
|
3971
|
-
}
|
|
3972
|
-
return success(false);
|
|
3973
|
-
}
|
|
3974
|
-
function parseOrderBy(orderByArg, joinAlias, apiName, input, joins) {
|
|
3975
|
-
if (orderByArg === undefined) {
|
|
3976
|
-
return success([]);
|
|
3977
|
-
}
|
|
3978
|
-
return fieldsToOrderBy([orderByArg.value], joinAlias, apiName, input, joins);
|
|
3979
|
-
}
|
|
3980
|
-
|
|
3981
|
-
function scopeFilter(scopeArg, jsonAlias, apiName, input, joins) {
|
|
3982
|
-
if (scopeArg === undefined) {
|
|
3983
|
-
return success(undefined);
|
|
3984
|
-
}
|
|
3985
|
-
const value = scopeArg.value;
|
|
3986
|
-
if (value.kind !== 'EnumValue') {
|
|
3987
|
-
return failure(message('Scope type should be an EnumValueNode.'));
|
|
3988
|
-
}
|
|
3989
|
-
const scope = value.value;
|
|
3990
|
-
if (scope === 'MINE') {
|
|
3991
|
-
const fieldInfoResult = getFieldInfo(apiName, 'OwnerId', input.objectInfoMap);
|
|
3992
|
-
if (fieldInfoResult.isSuccess === false) {
|
|
3993
|
-
return failure(fieldInfoResult.error);
|
|
3994
|
-
}
|
|
3995
|
-
const fieldInfo = fieldInfoResult.value;
|
|
3996
|
-
if (fieldInfo === undefined) {
|
|
3997
|
-
return failure(message('Scope MINE requires the entity type to have an OwnerId field.'));
|
|
3998
|
-
}
|
|
3999
|
-
return success({
|
|
4000
|
-
type: PredicateType$1.comparison,
|
|
4001
|
-
left: {
|
|
4002
|
-
type: ValueType.Extract,
|
|
4003
|
-
jsonAlias,
|
|
4004
|
-
field: fieldInfo.apiName,
|
|
4005
|
-
},
|
|
4006
|
-
operator: ComparisonOperator.eq,
|
|
4007
|
-
right: stringLiteral(input.userId),
|
|
4008
|
-
});
|
|
4009
|
-
}
|
|
4010
|
-
if (scope === 'ASSIGNEDTOME') {
|
|
4011
|
-
if (apiName !== 'ServiceAppointment') {
|
|
4012
|
-
return failure(message('ASSIGNEDTOME can only be used with ServiceAppointment'));
|
|
4013
|
-
}
|
|
4014
|
-
return success(assignedToMe(input, jsonAlias, joins));
|
|
4015
|
-
}
|
|
4016
|
-
return failure(message(`Scope '${scope} is not supported.`));
|
|
4017
|
-
}
|
|
4018
|
-
/**
|
|
4019
|
-
* generates predicates and joins for 'assignToMe' scope
|
|
4020
|
-
* @param input
|
|
4021
|
-
* @param parentAlias it would be 'ServiceAppointment' if scope belongs to a top level record query.
|
|
4022
|
-
* @param joins it would be populated with joins
|
|
4023
|
-
* @returns a compound predicates which contains its api type predicates.
|
|
4024
|
-
*/
|
|
4025
|
-
function assignedToMe(input, parentAlias, joins) {
|
|
4026
|
-
const srApiName = 'ServiceResource';
|
|
4027
|
-
const arApiName = 'AssignedResource';
|
|
4028
|
-
const assignedResourceAlias = `${parentAlias}.${arApiName}`;
|
|
4029
|
-
const serviceResourceAlias = `${assignedResourceAlias}.${srApiName}`;
|
|
4030
|
-
const serviceAppointmentIdPredicate = comparison({
|
|
4031
|
-
type: ValueType.Extract,
|
|
4032
|
-
jsonAlias: assignedResourceAlias,
|
|
4033
|
-
field: 'ServiceAppointmentId',
|
|
4034
|
-
}, ComparisonOperator.eq, { type: ValueType.Extract, jsonAlias: parentAlias, field: 'Id' });
|
|
4035
|
-
// Uses 'inner join' instead of 'left join', otherwise the query plan tries to scan 'lds_data' table, which might be due to the fact that 'AssignedResource is child of both 'ServiceAppointment' and
|
|
4036
|
-
// 'ServiceResource'. It is fine to use like this since its sole purpose is to identify the 'ServiceAppointment'. It won't have the missing result behavior
|
|
4037
|
-
// since alias 'ServiceAppointment.AssignedResource' and 'ServiceAppointment.AssignedResource.ServiceResource' are crafted in the way that they will not collide with any other relationship.
|
|
4038
|
-
const assignedResourceToServiceAppointmentJoin = {
|
|
4039
|
-
name: assignedResourceAlias,
|
|
4040
|
-
to: parentAlias,
|
|
4041
|
-
conditions: [serviceAppointmentIdPredicate],
|
|
4042
|
-
type: 'INNER',
|
|
4043
|
-
};
|
|
4044
|
-
joins.push(assignedResourceToServiceAppointmentJoin);
|
|
4045
|
-
const serviceResourceIdPredicate = comparison({ type: ValueType.Extract, jsonAlias: assignedResourceAlias, field: 'ServiceResourceId' }, ComparisonOperator.eq, { type: ValueType.Extract, jsonAlias: serviceResourceAlias, field: 'Id' });
|
|
4046
|
-
const serviceResourceToAssignedResourceJoin = {
|
|
4047
|
-
name: serviceResourceAlias,
|
|
4048
|
-
to: assignedResourceAlias,
|
|
4049
|
-
conditions: [serviceResourceIdPredicate],
|
|
4050
|
-
type: 'INNER',
|
|
4051
|
-
};
|
|
4052
|
-
joins.push(serviceResourceToAssignedResourceJoin);
|
|
4053
|
-
const userIdPredicate = comparison({ type: ValueType.Extract, jsonAlias: serviceResourceAlias, field: 'RelatedRecordId' }, ComparisonOperator.eq, stringLiteral(input.userId));
|
|
4054
|
-
return userIdPredicate;
|
|
4055
|
-
}
|
|
4056
|
-
|
|
4057
|
-
const REFERENCE_NAME_KEY = 'Reference';
|
|
4058
|
-
const API_NAME_KEY = 'ApiName';
|
|
4059
|
-
const { Extract } = ValueType;
|
|
4060
|
-
function isSpanningField(value) {
|
|
4061
|
-
return value.type === FieldType.Spanning;
|
|
4062
|
-
}
|
|
4063
|
-
function luvioSelections(node) {
|
|
4064
|
-
return node.luvioSelections === undefined ? [] : node.luvioSelections;
|
|
4065
|
-
}
|
|
4066
|
-
function hasArguments(node) {
|
|
4067
|
-
return node.arguments !== undefined;
|
|
4068
|
-
}
|
|
4069
|
-
function named(name) {
|
|
4070
|
-
return (node) => node.name === name;
|
|
4071
|
-
}
|
|
4072
|
-
function spanningField(node, fieldInfo, names, parentAlias, input, joins) {
|
|
4073
|
-
const parentQuery = spanningRecordQuery(node, fieldInfo, names, parentAlias, input, joins);
|
|
4074
|
-
if (parentQuery.isSuccess === false) {
|
|
4075
|
-
return failure(parentQuery.error);
|
|
4076
|
-
}
|
|
4077
|
-
const field = {
|
|
4078
|
-
type: FieldType.Spanning,
|
|
4079
|
-
path: '',
|
|
4080
|
-
spanning: parentQuery.value,
|
|
4081
|
-
};
|
|
4082
|
-
return success(field);
|
|
4083
|
-
}
|
|
4084
|
-
function scalarField(node, names, jsonAlias, targetDataType) {
|
|
4085
|
-
const outputNames = names.concat(node.name);
|
|
4086
|
-
return [node]
|
|
4087
|
-
.reduce(flatMap(luvioSelections), [])
|
|
4088
|
-
.filter(isScalarFieldNode)
|
|
4089
|
-
.map((field) => {
|
|
4090
|
-
const outputPath = outputNames.concat(field.name).join('.');
|
|
4091
|
-
const extract = {
|
|
4092
|
-
type: Extract,
|
|
4093
|
-
jsonAlias,
|
|
4094
|
-
field: node.name,
|
|
4095
|
-
subfield: field.name,
|
|
4096
|
-
};
|
|
4097
|
-
return { type: FieldType.Scalar, extract, path: outputPath, targetDataType };
|
|
4098
|
-
});
|
|
4099
|
-
}
|
|
4100
|
-
function selectionToQueryField(node, names, parentApiName, parentAlias, input, joins) {
|
|
4101
|
-
if (!isObjectFieldSelection(node) && !isCustomFieldNode(node) && !isScalarFieldNode(node)) {
|
|
4102
|
-
return failure([message(`Node type ${node.kind} is not a valid record field type.`)]);
|
|
4103
|
-
}
|
|
4104
|
-
// The following graphql fields are root properties on the record representation, rather
|
|
4105
|
-
// than fields found in the `fields` bag, so they need special handling.
|
|
4106
|
-
const rootPropertyFields = ['ApiName', 'Id', 'WeakEtag'];
|
|
4107
|
-
if (rootPropertyFields.includes(node.name)) {
|
|
4108
|
-
const outputPath = names.concat(node.name).join('.');
|
|
4109
|
-
const extract = {
|
|
4110
|
-
type: Extract,
|
|
4111
|
-
jsonAlias: parentAlias,
|
|
4112
|
-
field: node.name,
|
|
4113
|
-
};
|
|
4114
|
-
return success([{ type: FieldType.Scalar, extract, path: outputPath }]);
|
|
4115
|
-
}
|
|
4116
|
-
// RecordTypeId is special field that is pulled from the root properties of record rep
|
|
4117
|
-
// but is not returned as a scalar value.
|
|
4118
|
-
if (node.name === 'RecordTypeId') {
|
|
4119
|
-
return success(scalarField({ ...node, kind: 'ObjectFieldSelection' }, names, parentAlias, 'String'));
|
|
4120
|
-
}
|
|
4121
|
-
const fieldInfoResult = getFieldInfo(parentApiName, node.name, input.objectInfoMap);
|
|
4122
|
-
const relationshipInfoResult = getRelationshipInfo(parentApiName, node.name, input.objectInfoMap);
|
|
4123
|
-
if (fieldInfoResult.isSuccess === false) {
|
|
4124
|
-
return failure([fieldInfoResult.error]);
|
|
4125
|
-
}
|
|
4126
|
-
if (relationshipInfoResult.isSuccess === false) {
|
|
4127
|
-
return failure([relationshipInfoResult.error]);
|
|
4128
|
-
}
|
|
4129
|
-
const fieldInfo = fieldInfoResult.value;
|
|
4130
|
-
const relationshipInfo = relationshipInfoResult.value;
|
|
4131
|
-
if (fieldInfo === undefined && relationshipInfo === undefined) {
|
|
4132
|
-
return failure([message(`Field ${node.name} for type ${parentApiName} not found.`)]);
|
|
4133
|
-
}
|
|
4134
|
-
if (fieldInfo !== undefined) {
|
|
4135
|
-
//This is a spanning field
|
|
4136
|
-
if (fieldInfo.dataType === REFERENCE_NAME_KEY) {
|
|
4137
|
-
if (!isObjectFieldSelection(node) && !isCustomFieldNode(node)) {
|
|
4138
|
-
return failure([
|
|
4139
|
-
message(`Node type ${node.kind} is not a valid reference field type.`),
|
|
4140
|
-
]);
|
|
4141
|
-
}
|
|
4142
|
-
const selection = { ...node, kind: 'ObjectFieldSelection' };
|
|
4143
|
-
if (fieldInfo.relationshipName === node.name) {
|
|
4144
|
-
return spanningField(selection, fieldInfo, names, parentAlias, input, joins).map((field) => [field]);
|
|
4145
|
-
}
|
|
4146
|
-
return success(scalarField(selection, names, parentAlias, fieldInfo.dataType));
|
|
4147
|
-
}
|
|
4148
|
-
//Scalar field
|
|
4149
|
-
if (isScalarDataType(fieldInfo.dataType)) {
|
|
4150
|
-
if (!isObjectFieldSelection(node)) {
|
|
4151
|
-
return failure([
|
|
4152
|
-
message(`Node type ${node.kind} is not a valid scalar field type.`),
|
|
4153
|
-
]);
|
|
4154
|
-
}
|
|
4155
|
-
return success(scalarField(node, names, parentAlias, fieldInfo.dataType));
|
|
4156
|
-
}
|
|
4157
|
-
// If we've gotten this far, we're looking at a scalar field with a datatype
|
|
4158
|
-
// that we haven't otherwise identified
|
|
4159
|
-
return failure([
|
|
4160
|
-
message(`Field with datatype ${fieldInfo.dataType} is not a valid scalar field type.`),
|
|
4161
|
-
]);
|
|
4162
|
-
}
|
|
4163
|
-
if (relationshipInfo === undefined) {
|
|
4164
|
-
return failure([message(`Relationship ${node.name} for type ${parentApiName} not found.`)]);
|
|
4165
|
-
}
|
|
4166
|
-
//Field is a connection to a child record type
|
|
4167
|
-
if (!isCustomFieldNode(node)) {
|
|
4168
|
-
return failure([message(`Node type ${node.kind} is not a valid child field type.`)]);
|
|
4169
|
-
}
|
|
4170
|
-
const fieldPath = names.concat(node.name);
|
|
4171
|
-
const edgePath = fieldPath.concat('edges');
|
|
4172
|
-
return childRecordQuery(node, relationshipInfo, parentAlias, input).map((query) => {
|
|
4173
|
-
return [{ type: FieldType.Child, path: edgePath.join('.'), connection: query }];
|
|
4174
|
-
});
|
|
4175
|
-
}
|
|
4176
|
-
function recordFields(luvioSelections, names, parentApiName, parentAlias, input, joins) {
|
|
4177
|
-
const results = luvioSelections.map((selection) => selectionToQueryField(selection, names, parentApiName, parentAlias, input, joins));
|
|
4178
|
-
const fields = results.filter(isSuccess).reduce(flatMap(values$5), []);
|
|
4179
|
-
const fails = results.filter(isFailure).reduce(flatMap(errors), []);
|
|
4180
|
-
if (fails.length > 0) {
|
|
4181
|
-
return failure(fails);
|
|
4182
|
-
}
|
|
4183
|
-
return success(fields);
|
|
4184
|
-
}
|
|
4185
|
-
function queryContainer(inputFields, jsonAlias, apiName, additionalPredicates) {
|
|
4186
|
-
if (inputFields.isSuccess === false) {
|
|
4187
|
-
return failure(inputFields.error);
|
|
4188
|
-
}
|
|
4189
|
-
const extract = {
|
|
4190
|
-
type: Extract,
|
|
4191
|
-
jsonAlias,
|
|
4192
|
-
field: API_NAME_KEY,
|
|
4193
|
-
};
|
|
4194
|
-
//query's type predicate
|
|
4195
|
-
const typePredicate = comparison(extract, ComparisonOperator.eq, stringLiteral(apiName, true, true));
|
|
4196
|
-
const typePredicates = [];
|
|
4197
|
-
typePredicates.push(typePredicate);
|
|
4198
|
-
// puts 'typePredicate' in front so that 'apiName' predicate is always in the front of the SQL
|
|
4199
|
-
const fields = inputFields.value
|
|
4200
|
-
.map((field) => {
|
|
4201
|
-
if (isSpanningField(field)) {
|
|
4202
|
-
return field.spanning.fields;
|
|
4203
|
-
}
|
|
4204
|
-
else
|
|
4205
|
-
return [field];
|
|
4206
|
-
})
|
|
4207
|
-
.reduce(flatten$1, []);
|
|
4208
|
-
const predicates = typePredicates.concat(additionalPredicates);
|
|
4209
|
-
return success({ fields, predicates });
|
|
4210
|
-
}
|
|
4211
|
-
//A field or fields from a parent record
|
|
4212
|
-
function spanningRecordQuery(selection, fieldInfo, names, parentAlias, input, joins) {
|
|
4213
|
-
const { apiName: fieldName, referenceToInfos: referenceInfos, relationshipName } = fieldInfo;
|
|
4214
|
-
if (relationshipName === null) {
|
|
4215
|
-
return failure([message(`Relationship name is missing for ${fieldName}`)]);
|
|
4216
|
-
}
|
|
4217
|
-
const alias = `${parentAlias}.${relationshipName}`;
|
|
4218
|
-
const selections = selection.luvioSelections || [];
|
|
4219
|
-
const outPathNames = names.concat(relationshipName);
|
|
4220
|
-
const referenceToInfo = referenceInfos[0];
|
|
4221
|
-
if (referenceToInfo === undefined) {
|
|
4222
|
-
return failure([message(`No reference info found for ${fieldName}`)]);
|
|
4223
|
-
}
|
|
4224
|
-
const { apiName } = referenceToInfo;
|
|
4225
|
-
const joinPredicate = referencePredicate(parentAlias, alias, fieldName);
|
|
4226
|
-
const join = {
|
|
4227
|
-
name: alias,
|
|
4228
|
-
type: 'LEFT',
|
|
4229
|
-
to: parentAlias,
|
|
4230
|
-
conditions: [joinPredicate],
|
|
4231
|
-
};
|
|
4232
|
-
joins.push(join);
|
|
4233
|
-
// pushes the join in order
|
|
4234
|
-
const internalFields = recordFields(selections, outPathNames, apiName, alias, input, joins);
|
|
4235
|
-
if (internalFields.isSuccess === false) {
|
|
4236
|
-
return failure(internalFields.error);
|
|
4237
|
-
}
|
|
4238
|
-
const fields = internalFields.value
|
|
4239
|
-
.map((field) => {
|
|
4240
|
-
// add child predicates as part of its own predicates
|
|
4241
|
-
if (isSpanningField(field)) {
|
|
4242
|
-
return field.spanning.fields;
|
|
4243
|
-
}
|
|
4244
|
-
else
|
|
4245
|
-
return [field];
|
|
4246
|
-
})
|
|
4247
|
-
.reduce(flatten$1, []);
|
|
4248
|
-
return success({
|
|
4249
|
-
type: 'spanning',
|
|
4250
|
-
apiName,
|
|
4251
|
-
fields,
|
|
4252
|
-
alias,
|
|
4253
|
-
});
|
|
4254
|
-
}
|
|
4255
|
-
function childRecordQuery(selection, relationshipInfo, parentAlias, input) {
|
|
4256
|
-
const { relationshipName, childObjectApiName, fieldName } = relationshipInfo;
|
|
4257
|
-
const alias = `${parentAlias}.${relationshipName}`;
|
|
4258
|
-
const apiName = childObjectApiName;
|
|
4259
|
-
//parent predicate
|
|
4260
|
-
let additionalPredicates = [referencePredicate(alias, parentAlias, fieldName)];
|
|
4261
|
-
return recordQuery(selection, apiName, alias, additionalPredicates, input);
|
|
4262
|
-
}
|
|
4263
|
-
function parseFirst(arg) {
|
|
4264
|
-
if (arg === undefined) {
|
|
4265
|
-
return success(undefined);
|
|
4266
|
-
}
|
|
4267
|
-
if (arg.value.kind !== 'IntValue') {
|
|
4268
|
-
return failure(message('first type should be an IntValue.'));
|
|
4269
|
-
}
|
|
4270
|
-
return success(parseInt(arg.value.value));
|
|
4271
|
-
}
|
|
4272
|
-
function recordQuery(selection, apiName, alias, predicates, input) {
|
|
4273
|
-
const args = selection.arguments || [];
|
|
4274
|
-
const whereArg = args.filter(named('where'))[0];
|
|
4275
|
-
const scopeArg = args.filter(named('scope'))[0];
|
|
4276
|
-
const orderByArg = args.filter(named('orderBy'))[0];
|
|
4277
|
-
const firstArg = args.filter(named('first'))[0];
|
|
4278
|
-
const joinsGroup = [];
|
|
4279
|
-
const firstResult = parseFirst(firstArg);
|
|
4280
|
-
const orderByJoins = [];
|
|
4281
|
-
const orderByResult = parseOrderBy(orderByArg, alias, apiName, input.objectInfoMap, orderByJoins);
|
|
4282
|
-
const filterJoins = [];
|
|
4283
|
-
const whereResult = recordFilter(whereArg, alias, apiName, input.objectInfoMap, filterJoins, input.draftFunctions);
|
|
4284
|
-
const scopeJoins = [];
|
|
4285
|
-
const scopeResult = scopeFilter(scopeArg, alias, apiName, input, scopeJoins);
|
|
4286
|
-
let additionalPredicates = [];
|
|
4287
|
-
if (orderByResult.isSuccess === false) {
|
|
4288
|
-
return failure(orderByResult.error);
|
|
4289
|
-
}
|
|
4290
|
-
if (whereResult.isSuccess === false) {
|
|
4291
|
-
return failure(whereResult.error);
|
|
4292
|
-
}
|
|
4293
|
-
if (scopeResult.isSuccess === false) {
|
|
4294
|
-
return failure([scopeResult.error]);
|
|
4295
|
-
}
|
|
4296
|
-
if (firstResult.isSuccess === false) {
|
|
4297
|
-
return failure([firstResult.error]);
|
|
4298
|
-
}
|
|
4299
|
-
if (scopeResult.value !== undefined) {
|
|
4300
|
-
additionalPredicates.push(scopeResult.value);
|
|
4301
|
-
}
|
|
4302
|
-
if (whereResult.value !== undefined) {
|
|
4303
|
-
const { predicate } = whereResult.value;
|
|
4304
|
-
//it is possible that all the predicates falls into 'joins'
|
|
4305
|
-
if (predicate !== undefined) {
|
|
4306
|
-
additionalPredicates.push(predicate);
|
|
4307
|
-
}
|
|
4308
|
-
}
|
|
4309
|
-
//make our way down to the field-containing ast node
|
|
4310
|
-
const node = [selection]
|
|
4311
|
-
.reduce(flatMap(luvioSelections), [])
|
|
4312
|
-
.filter(isObjectFieldSelection)
|
|
4313
|
-
.filter(named('edges'))
|
|
4314
|
-
.reduce(flatMap(luvioSelections), [])
|
|
4315
|
-
.filter(isCustomFieldNode)
|
|
4316
|
-
.filter(named('node'))[0];
|
|
4317
|
-
const queryJoins = [];
|
|
4318
|
-
//look for scalar fields, parent fields or children
|
|
4319
|
-
const internalFields = recordFields(node.luvioSelections || [], ['node'], apiName, alias, input, queryJoins);
|
|
4320
|
-
joinsGroup.push(filterJoins);
|
|
4321
|
-
joinsGroup.push(orderByJoins);
|
|
4322
|
-
joinsGroup.push(queryJoins);
|
|
4323
|
-
joinsGroup.push(scopeJoins);
|
|
4324
|
-
const allJoins = joinsGroup.reduce((accumulatedJoins, curJoins) => {
|
|
4325
|
-
return mergeJoins(accumulatedJoins, curJoins);
|
|
4326
|
-
}, []);
|
|
4327
|
-
const extract = { type: ValueType.Extract, jsonAlias: alias, field: 'drafts' };
|
|
4328
|
-
const draftsField = { type: FieldType.Scalar, extract, path: 'node._drafts' };
|
|
4329
|
-
const idExtract = { type: ValueType.Extract, jsonAlias: alias, field: 'Id' };
|
|
4330
|
-
const idField = { type: FieldType.Scalar, extract: idExtract, path: 'node.Id' };
|
|
4331
|
-
// When the exclude stale records gate is open and there is a root timestamp
|
|
4332
|
-
// in the parser input, inject an additional predicate to limit the search
|
|
4333
|
-
// to records that either have drafts associated to them or were ingested at
|
|
4334
|
-
// least as recently as the query.
|
|
4335
|
-
if (excludeStaleRecordsGate.isOpen({ fallback: false }) && input.rootTimestamp !== undefined) {
|
|
4336
|
-
const timestampCheck = {
|
|
4337
|
-
type: PredicateType$1.comparison,
|
|
4338
|
-
left: {
|
|
4339
|
-
type: ValueType.Extract,
|
|
4340
|
-
jsonAlias: alias,
|
|
4341
|
-
field: 'ingestionTimestamp',
|
|
4342
|
-
metadata: true,
|
|
4343
|
-
},
|
|
4344
|
-
operator: ComparisonOperator.gte,
|
|
4345
|
-
right: { type: ValueType.IntLiteral, value: input.rootTimestamp },
|
|
4346
|
-
};
|
|
4347
|
-
const isDraft = {
|
|
4348
|
-
type: PredicateType$1.nullComparison,
|
|
4349
|
-
left: { type: ValueType.Extract, jsonAlias: alias, field: 'drafts' },
|
|
4350
|
-
operator: NullComparisonOperator.isNot,
|
|
4351
|
-
};
|
|
4352
|
-
predicates.push({
|
|
4353
|
-
type: PredicateType$1.compound,
|
|
4354
|
-
operator: CompoundOperator.or,
|
|
4355
|
-
children: [timestampCheck, isDraft],
|
|
4356
|
-
});
|
|
4357
|
-
}
|
|
4358
|
-
return queryContainer(internalFields, alias, apiName, predicates).map((result) => {
|
|
4359
|
-
const { fields, predicates } = result;
|
|
4360
|
-
const allFields = removeDuplicateFields(fields.concat(...[draftsField, idField]));
|
|
4361
|
-
// predicate for the top level 'recordQuery'
|
|
4362
|
-
const predicate = combinePredicates([...additionalPredicates, ...predicates].filter(isDefined), CompoundOperator.and);
|
|
4363
|
-
const first = firstResult.value;
|
|
4364
|
-
const orderBy = orderByResult.value === undefined
|
|
4365
|
-
? []
|
|
4366
|
-
: orderByResult.value.map((result) => result.orderBy);
|
|
4367
|
-
return {
|
|
4368
|
-
fields: allFields,
|
|
4369
|
-
first,
|
|
4370
|
-
orderBy,
|
|
4371
|
-
apiName,
|
|
4372
|
-
alias,
|
|
4373
|
-
predicate,
|
|
4374
|
-
joins: allJoins,
|
|
4375
|
-
};
|
|
4376
|
-
});
|
|
4377
|
-
}
|
|
4378
|
-
// merge 2 joins. it honors join type from the 2nd param.
|
|
4379
|
-
function mergeJoins(lJoins, rJoins) {
|
|
4380
|
-
const mergedJoins = [...lJoins];
|
|
4381
|
-
for (const join of rJoins) {
|
|
4382
|
-
const existingJoin = mergedJoins.find((newJoin) => join.name === newJoin.name && join.to === newJoin.to);
|
|
4383
|
-
if (existingJoin !== undefined) {
|
|
4384
|
-
const existingConditions = existingJoin.conditions;
|
|
4385
|
-
const allCondition = [...existingConditions, ...join.conditions];
|
|
4386
|
-
const mergedJoin = {
|
|
4387
|
-
name: join.name,
|
|
4388
|
-
to: join.to,
|
|
4389
|
-
type: join.type,
|
|
4390
|
-
conditions: removeDuplicatePredicates(allCondition),
|
|
4391
|
-
};
|
|
4392
|
-
mergedJoins.splice(mergedJoins.indexOf(existingJoin), 1, mergedJoin);
|
|
4393
|
-
}
|
|
4394
|
-
else {
|
|
4395
|
-
mergedJoins.push(join);
|
|
4396
|
-
}
|
|
4397
|
-
}
|
|
4398
|
-
return mergedJoins;
|
|
4399
|
-
}
|
|
4400
|
-
function rootRecordQuery(selection, input) {
|
|
4401
|
-
const alias = selection.name;
|
|
4402
|
-
const apiName = selection.name;
|
|
4403
|
-
if (input.objectInfoMap[alias] === undefined) {
|
|
4404
|
-
return failure([missingObjectInfo(apiName)]);
|
|
4405
|
-
}
|
|
4406
|
-
// When the exclude stale records gate is open and the query has an
|
|
4407
|
-
// ingestion timestamp in its cache metadata, associate that with the input
|
|
4408
|
-
// so it can later be used to limit the search to records were ingested at
|
|
4409
|
-
// least as recently as the query.
|
|
4410
|
-
if (excludeStaleRecordsGate.isOpen({ fallback: false })) {
|
|
4411
|
-
const key = input.connectionKeyBuilder(selection, input.config.variables);
|
|
4412
|
-
const queryMetadata = input.metadata[key];
|
|
4413
|
-
// If there is no metadata for this query or it somehow lacks a timestamp
|
|
4414
|
-
// skip setting the root timestamp
|
|
4415
|
-
if (queryMetadata !== undefined && queryMetadata.ingestionTimestamp !== undefined) {
|
|
4416
|
-
const timestamp = Number(queryMetadata.ingestionTimestamp);
|
|
4417
|
-
if (!isNaN(timestamp)) {
|
|
4418
|
-
input.rootTimestamp = timestamp;
|
|
4419
|
-
}
|
|
4420
|
-
}
|
|
4421
|
-
}
|
|
4422
|
-
return recordQuery(selection, alias, apiName, [], input);
|
|
4423
|
-
}
|
|
4424
|
-
function rootQuery(recordNodes, input) {
|
|
4425
|
-
const results = recordNodes.map((record) => rootRecordQuery(record, input));
|
|
4426
|
-
const connections = results.filter(isSuccess).map(values$5);
|
|
4427
|
-
const fails = results.filter(isFailure).reduce(flatMap(errors), []);
|
|
4428
|
-
if (fails.length > 0) {
|
|
4429
|
-
return failure(fails);
|
|
4430
|
-
}
|
|
4431
|
-
return success({ type: 'root', connections, queryKeys: input.queryKeys });
|
|
4432
|
-
}
|
|
4433
|
-
/**
|
|
4434
|
-
* Given a connection array of LuvioSelectionCustomFieldNode
|
|
4435
|
-
* it will go through to find a nested connection and check that it has arguments
|
|
4436
|
-
* if it has no arguments or no luvioSelections it will return the next level as an empty arrray
|
|
4437
|
-
* @param {LuvioSelectionCustomFieldNode[]} previousLevel
|
|
4438
|
-
* @returns LuvioSelectionCustomFieldNode[]
|
|
4439
|
-
*/
|
|
4440
|
-
function getNextLevelOfArguments(previousLevel) {
|
|
4441
|
-
return previousLevel
|
|
4442
|
-
.map((node) => {
|
|
4443
|
-
if (node.luvioSelections) {
|
|
4444
|
-
const records = node.luvioSelections
|
|
4445
|
-
.filter(isObjectFieldSelection)
|
|
4446
|
-
.reduce(flatMap(luvioSelections), [])
|
|
4447
|
-
.filter(isCustomFieldNode)
|
|
4448
|
-
.reduce(flatMap(luvioSelections), [])
|
|
4449
|
-
.filter(isCustomFieldNode)
|
|
4450
|
-
.filter(hasArguments);
|
|
4451
|
-
return records;
|
|
4452
|
-
}
|
|
4453
|
-
return [];
|
|
4454
|
-
})
|
|
4455
|
-
.reduce(flatten$1);
|
|
4456
|
-
}
|
|
4457
|
-
/**
|
|
4458
|
-
* Return all LuvioSelectionCustomFieldNodes that have arguments on them
|
|
4459
|
-
* @param {LuvioDocumentNode} document
|
|
4460
|
-
* @returns LuvioSelectionCustomFieldNode[]
|
|
4461
|
-
*/
|
|
4462
|
-
function findAllRecordsThatHaveArugments(document) {
|
|
4463
|
-
const topLevelRecords = findRecordsWithVariableArguments(document);
|
|
4464
|
-
let allRecordsWithVariables = [topLevelRecords];
|
|
4465
|
-
let lastLevel = topLevelRecords;
|
|
4466
|
-
while (lastLevel.length > 0) {
|
|
4467
|
-
lastLevel = getNextLevelOfArguments(lastLevel);
|
|
4468
|
-
if (lastLevel.length > 0) {
|
|
4469
|
-
allRecordsWithVariables.push(lastLevel);
|
|
4470
|
-
}
|
|
4471
|
-
}
|
|
4472
|
-
return allRecordsWithVariables.reduce(flatten$1);
|
|
4473
|
-
}
|
|
4474
|
-
/**
|
|
4475
|
-
* Given an AST Document and GQL Variables it will transform the argument nodes
|
|
4476
|
-
* and the variables listed in them to a parseable gql string that can be ran throug
|
|
4477
|
-
* @param {LuvioDocumentNode} document
|
|
4478
|
-
* @param {Record<string, any>} variables
|
|
4479
|
-
* @returns string
|
|
4480
|
-
*/
|
|
4481
|
-
function generateVariableGQLQuery(document, variables) {
|
|
4482
|
-
return (findAllRecordsThatHaveArugments(document)
|
|
4483
|
-
.map((node) => {
|
|
4484
|
-
const gqlArguments = node.arguments
|
|
4485
|
-
? node.arguments
|
|
4486
|
-
.map((args) => {
|
|
4487
|
-
const { value, name } = args;
|
|
4488
|
-
//The variable is on the top level, for example `where: ${filter}`
|
|
4489
|
-
if (is(value, 'Variable')) {
|
|
4490
|
-
return generateVariableNodeQuery(value, name, name, variables);
|
|
4491
|
-
}
|
|
4492
|
-
else if (isObjectValueNode$1(value)) {
|
|
4493
|
-
return generateVariableSubQuery(value, name, name, variables);
|
|
4494
|
-
}
|
|
4495
|
-
})
|
|
4496
|
-
.filter(Boolean)
|
|
4497
|
-
: undefined;
|
|
4498
|
-
return gqlArguments
|
|
4499
|
-
? `${node.name} ${gqlArguments.length > 0 ? '(' + gqlArguments.join(',') + ')' : ''} { Id }`
|
|
4500
|
-
: '';
|
|
4501
|
-
})
|
|
4502
|
-
//remove empty strings
|
|
4503
|
-
.filter(Boolean)
|
|
4504
|
-
//insert record qurries into brackets
|
|
4505
|
-
.reduce((accu, query) => {
|
|
4506
|
-
const length = accu.length;
|
|
4507
|
-
return [accu.slice(0, 1), query, accu.slice(1, length)].join('');
|
|
4508
|
-
}, '{ }'));
|
|
4509
|
-
}
|
|
4510
|
-
/**
|
|
4511
|
-
* Given a LuvioValueNode, generates a sql with its variable node replaced with actual value.
|
|
4512
|
-
* @param valueNode G
|
|
4513
|
-
* @param name
|
|
4514
|
-
* @param type
|
|
4515
|
-
* @param variables
|
|
4516
|
-
* @returns
|
|
4517
|
-
*/
|
|
4518
|
-
function generateVariableSubQuery(valueNode, name, type, variables) {
|
|
4519
|
-
switch (valueNode.kind) {
|
|
4520
|
-
case Kind$1.OBJECT: {
|
|
4521
|
-
// For example, `{ Id: { eq: $draftId } }` is a `ObjectValueNode`, which has field keys 'Id'
|
|
4522
|
-
const resultQuery = keys$7(valueNode.fields)
|
|
4523
|
-
.map((key) => generateVariableSubQuery(valueNode.fields[key], key, type, variables))
|
|
4524
|
-
.filter((subquery) => subquery.length > 0)
|
|
4525
|
-
.join(',');
|
|
4526
|
-
if (resultQuery.length > 0) {
|
|
4527
|
-
return `${name}: {${resultQuery}}`;
|
|
4528
|
-
}
|
|
4529
|
-
return resultQuery;
|
|
4530
|
-
}
|
|
4531
|
-
case Kind$1.VARIABLE:
|
|
4532
|
-
return generateVariableNodeQuery(valueNode, name, type, variables);
|
|
4533
|
-
default:
|
|
4534
|
-
return '';
|
|
4535
|
-
}
|
|
4536
|
-
}
|
|
4537
|
-
// Generate a sql for the variable node with its actual value.
|
|
4538
|
-
function generateVariableNodeQuery(value, name, type, variables) {
|
|
4539
|
-
const variable = variables[value.name];
|
|
4540
|
-
if (variable) {
|
|
4541
|
-
const jsonString = JSON.stringify(variable);
|
|
4542
|
-
const buildRecordQueryString = (name, query, transform) => {
|
|
4543
|
-
return `${name}: ${transform(query)}`;
|
|
4544
|
-
};
|
|
4545
|
-
switch (type) {
|
|
4546
|
-
case 'scope':
|
|
4547
|
-
case 'orderBy':
|
|
4548
|
-
return buildRecordQueryString(name, jsonString, removeAllQuotations);
|
|
4549
|
-
default:
|
|
4550
|
-
return buildRecordQueryString(name, jsonString, removeQuotationsFromKeys);
|
|
4551
|
-
}
|
|
4552
|
-
}
|
|
4553
|
-
return '';
|
|
4554
|
-
}
|
|
4555
|
-
/**
|
|
4556
|
-
* Given an AST with variables
|
|
4557
|
-
* Swap out the LuvioArgumentNodes on the original AST with ones generated from its variables.
|
|
4558
|
-
* @param {LuvioDocumentNode} document
|
|
4559
|
-
* @param {Record<string, any>} variables
|
|
4560
|
-
*/
|
|
4561
|
-
function swapVariableArguments(document, variables) {
|
|
4562
|
-
// dont run if no variables exist
|
|
4563
|
-
if (Object.keys(variables).length > 0) {
|
|
4564
|
-
const variablesGqlString = generateVariableGQLQuery(document, variables);
|
|
4565
|
-
const gqlParsedVariables = parseAndVisit(variablesGqlString);
|
|
4566
|
-
const variableNodes = findTopLevelVariableDocuments(gqlParsedVariables);
|
|
4567
|
-
const allArgumentRecords = findAllRecordsThatHaveArugments(document);
|
|
4568
|
-
// replace each variable argument node with the generated one of the same name
|
|
4569
|
-
variableNodes.forEach((node) => {
|
|
4570
|
-
const { name } = node;
|
|
4571
|
-
const first = allArgumentRecords.find((n) => n.name === name);
|
|
4572
|
-
if (first) {
|
|
4573
|
-
const swappedArgments = swapArgumentWithVariableNodes(node.arguments, first.arguments);
|
|
4574
|
-
first.arguments = swappedArgments ? swappedArgments : [];
|
|
4575
|
-
}
|
|
4576
|
-
});
|
|
4577
|
-
}
|
|
4578
|
-
}
|
|
4579
|
-
// Replaces the variable node in original LuvioArgumentNode with the actual value in the swapped node with the same path.
|
|
4580
|
-
function swapArgumentWithVariableNodes(swapped, original) {
|
|
4581
|
-
if (swapped === undefined || original === undefined) {
|
|
4582
|
-
return original;
|
|
4583
|
-
}
|
|
4584
|
-
return original.map((x) => {
|
|
4585
|
-
const targetNode = swapped.find((y) => y.name === x.name);
|
|
4586
|
-
if (targetNode === undefined) {
|
|
4587
|
-
return x;
|
|
4588
|
-
}
|
|
4589
|
-
if (x.value.kind === 'Variable') {
|
|
4590
|
-
return targetNode;
|
|
4591
|
-
}
|
|
4592
|
-
swapValueNodeWithVariableNodes(x.value, targetNode.value);
|
|
4593
|
-
return x;
|
|
4594
|
-
});
|
|
4595
|
-
}
|
|
4596
|
-
function swapValueNodeWithVariableNodes(original, swapped) {
|
|
4597
|
-
if (original.kind === Kind$1.OBJECT) {
|
|
4598
|
-
for (const key of keys$7(original.fields)) {
|
|
4599
|
-
if (isObjectValueNode$1(swapped) && swapped.fields[key]) {
|
|
4600
|
-
if (is(original.fields[key], 'Variable')) {
|
|
4601
|
-
original.fields[key] = swapped.fields[key];
|
|
4602
|
-
}
|
|
4603
|
-
else {
|
|
4604
|
-
swapValueNodeWithVariableNodes(original.fields[key], swapped.fields[key]);
|
|
4605
|
-
}
|
|
4606
|
-
}
|
|
4607
|
-
}
|
|
4608
|
-
}
|
|
4609
|
-
}
|
|
4610
|
-
//find top level record queries
|
|
4611
|
-
function findRecordSelections(document) {
|
|
4612
|
-
return document.definitions
|
|
4613
|
-
.filter(isOperationDefinition)
|
|
4614
|
-
.reduce(flatMap(luvioSelections), [])
|
|
4615
|
-
.filter(isObjectFieldSelection)
|
|
4616
|
-
.filter(named('uiapi'))
|
|
4617
|
-
.reduce(flatMap(luvioSelections), [])
|
|
4618
|
-
.filter(isObjectFieldSelection)
|
|
4619
|
-
.filter(named('query'))
|
|
4620
|
-
.reduce(flatMap(luvioSelections), [])
|
|
4621
|
-
.filter(isCustomFieldNode);
|
|
4622
|
-
}
|
|
4623
|
-
function findRecordsWithVariableArguments(document) {
|
|
4624
|
-
return findRecordSelections(document).filter(hasArguments);
|
|
4625
|
-
}
|
|
4626
|
-
function findTopLevelVariableDocuments(document) {
|
|
4627
|
-
return document.definitions
|
|
4628
|
-
.filter(isOperationDefinition)
|
|
4629
|
-
.reduce(flatMap(luvioSelections), [])
|
|
4630
|
-
.filter(isObjectFieldSelection);
|
|
4631
|
-
}
|
|
4632
|
-
function transform(document, input) {
|
|
4633
|
-
const recordNodes = findRecordSelections(document);
|
|
4634
|
-
return rootQuery(recordNodes, input);
|
|
4635
|
-
}
|
|
4636
|
-
|
|
4637
|
-
class StoreEvalPreconditioner {
|
|
4638
|
-
/**
|
|
4639
|
-
* Given an AST, a mechanism to discover missing object info references and a required userId,
|
|
4640
|
-
* produce an intermediate representation that can be successfully transformed to SQL for evaluation.
|
|
4641
|
-
* Missing object info records triggers the objectInfoService (same instance shared with Drafts code), to
|
|
4642
|
-
* use the getObjectInfos adapter to efficiently fetch the necessary records.
|
|
4643
|
-
*/
|
|
4644
|
-
async createRootQuery(config, objectInfoService, userId, draftFunctions, connectionKeyBuilder, sqliteStore) {
|
|
4645
|
-
const { query: ast, variables } = config;
|
|
4646
|
-
swapVariableArguments(ast, variables);
|
|
4647
|
-
// Parse out top-level record queries types we know we will need, since spanning fields will
|
|
4648
|
-
// require at least this top level record present to resolve relationship lookups
|
|
4649
|
-
const recordSelections = findRecordSelections(ast);
|
|
4650
|
-
let metadata = {};
|
|
4651
|
-
const queryKeys = recordSelections.map((rs) => connectionKeyBuilder(rs, variables));
|
|
4652
|
-
if (excludeStaleRecordsGate.isOpen({ fallback: false })) {
|
|
4653
|
-
let sqlResult = await sqliteStore.query(`select key, metadata from lds_data where key in (${queryKeys
|
|
4654
|
-
.map(() => '?')
|
|
4655
|
-
.join(',')})`, queryKeys);
|
|
4656
|
-
metadata = sqlResult.rows.reduce((metadata, row) => {
|
|
4657
|
-
metadata[row[0]] = JSON.parse(row[1]);
|
|
4658
|
-
return metadata;
|
|
4659
|
-
}, {});
|
|
4660
|
-
}
|
|
4661
|
-
const topLevelNeededRecords = recordSelections.map((selection) => selection.name);
|
|
4662
|
-
// Seed the initial list of things to fetch
|
|
4663
|
-
const neededObjectInfos = new Set(topLevelNeededRecords);
|
|
4664
|
-
// Seed the list of things we've _tried_ to fetch. As we iterate and
|
|
4665
|
-
// continue to find things we know we need, we're keeping track of things we've tried
|
|
4666
|
-
// to make sure that a query with unknown types (from the server's perspective) don't
|
|
4667
|
-
// trigger an infinite loop of ObjectInfo record lookups.
|
|
4668
|
-
const objectInfoAttemptedFetches = new Set(topLevelNeededRecords);
|
|
4669
|
-
// We'll build up this dictionary along the way with the necessary ObjectInfo records
|
|
4670
|
-
// we discovery
|
|
4671
|
-
let objectInfoMap = {};
|
|
4672
|
-
// Captures query parsing success/fail
|
|
4673
|
-
let astTransformResult;
|
|
4674
|
-
// Guarding against unknown unknowns that could (theoretically) get stuck in an infinite loop while
|
|
4675
|
-
// resolving ObjectInfo records.
|
|
4676
|
-
let depthGuard = 0;
|
|
4677
|
-
// Most of the depths we'll see for real use cases are on the order 2-8
|
|
4678
|
-
const depthLimit = 50;
|
|
4679
|
-
// The idea here is to keep attempting to parse the AST into something offline
|
|
4680
|
-
// eval understands until we've fetched all the necessary types and are successful or we
|
|
4681
|
-
// have irrecoverable errors.
|
|
4682
|
-
while (neededObjectInfos.size > 0) {
|
|
4683
|
-
// safety net in case there are 🐲's
|
|
4684
|
-
if (depthGuard++ > depthLimit) {
|
|
4685
|
-
// eslint-disable-next-line
|
|
4686
|
-
throw new Error('Maximum ObjectInfo fetching depth exceeded');
|
|
4687
|
-
}
|
|
4688
|
-
// Fetch the current list of needed records
|
|
4689
|
-
const fetchedObjectInfos = await objectInfoService.getObjectInfos([
|
|
4690
|
-
...neededObjectInfos,
|
|
4691
|
-
]);
|
|
4692
|
-
// Clear the list of records needed; this will get rebuilt with each iteration
|
|
4693
|
-
neededObjectInfos.clear();
|
|
4694
|
-
// Enrich the dictionary with the new information
|
|
4695
|
-
objectInfoMap = {
|
|
4696
|
-
...objectInfoMap,
|
|
4697
|
-
...fetchedObjectInfos,
|
|
4698
|
-
};
|
|
4699
|
-
// Attempt to parse the AST given what we know already...
|
|
4700
|
-
astTransformResult = transform(ast, {
|
|
4701
|
-
config,
|
|
4702
|
-
userId,
|
|
4703
|
-
objectInfoMap,
|
|
4704
|
-
draftFunctions,
|
|
4705
|
-
connectionKeyBuilder,
|
|
4706
|
-
metadata,
|
|
4707
|
-
queryKeys,
|
|
4708
|
-
});
|
|
4709
|
-
if (astTransformResult.isSuccess === false) {
|
|
4710
|
-
for (const error of astTransformResult.error) {
|
|
4711
|
-
// The only kind of error we can recover from is missing object info, so
|
|
4712
|
-
// if something else is wrong, bail early.
|
|
4713
|
-
if (error.type !== 'MissingObjectInfoError')
|
|
4714
|
-
throw concatenatePredicateErrors(astTransformResult.error);
|
|
4715
|
-
// If we've tried to fetch this before and failed, don't re-add it to the
|
|
4716
|
-
// list of things to try and fetch
|
|
4717
|
-
if (objectInfoAttemptedFetches.has(error.object))
|
|
4718
|
-
continue;
|
|
4719
|
-
// If there are errors specifically about missing object info, add the
|
|
4720
|
-
// newly discovered necessity to the list of things to fetch...
|
|
4721
|
-
neededObjectInfos.add(error.object);
|
|
4722
|
-
// and track the attempt
|
|
4723
|
-
objectInfoAttemptedFetches.add(error.object);
|
|
1543
|
+
...storeEntry.metadata,
|
|
1544
|
+
expirationTimestamp: now,
|
|
1545
|
+
};
|
|
4724
1546
|
}
|
|
1547
|
+
needWriteBack = true;
|
|
4725
1548
|
}
|
|
4726
1549
|
}
|
|
4727
|
-
|
|
4728
|
-
|
|
4729
|
-
// the future when we support non-record queries (related list metadata queries?), we'll adjust for this
|
|
4730
|
-
if (astTransformResult === undefined) {
|
|
4731
|
-
// eslint-disable-next-line
|
|
4732
|
-
throw new Error('Unable to transform AST to intermediate representation');
|
|
4733
|
-
}
|
|
4734
|
-
else if (astTransformResult.isSuccess === false) {
|
|
4735
|
-
// After we've exhausted the errors related to missing records, if we still have errors
|
|
4736
|
-
// we'll throw to exit early, otherwise we can proceed to evaluate
|
|
4737
|
-
throw concatenatePredicateErrors(astTransformResult.error);
|
|
4738
|
-
}
|
|
4739
|
-
else {
|
|
4740
|
-
return astTransformResult.value;
|
|
4741
|
-
}
|
|
4742
|
-
}
|
|
4743
|
-
}
|
|
4744
|
-
|
|
4745
|
-
/**
|
|
4746
|
-
* Checks a GraphQL-shaped network response for errors, returning true if it does
|
|
4747
|
-
* and false otherwise
|
|
4748
|
-
*/
|
|
4749
|
-
function hasGraphQlErrors$1(response) {
|
|
4750
|
-
return (response !== undefined &&
|
|
4751
|
-
response.errors !== undefined &&
|
|
4752
|
-
isArray$4(response.errors) &&
|
|
4753
|
-
response.errors.length > 0);
|
|
4754
|
-
}
|
|
4755
|
-
|
|
4756
|
-
const GRAPHQL_EVAL_NAMESPACE$1 = 'graphql-eval';
|
|
4757
|
-
|
|
4758
|
-
function isStoreEvalSnapshot(snapshot) {
|
|
4759
|
-
return 'rebuildWithStoreEval' in snapshot;
|
|
4760
|
-
}
|
|
4761
|
-
/**
|
|
4762
|
-
* Evaluates the SQL. Assumes all preconditions are met. NOTE: this function
|
|
4763
|
-
* will throw if it fails to evaluate the SQL, callers should protect against that.
|
|
4764
|
-
*/
|
|
4765
|
-
async function evaluateSqlite(query, eventEmitter, store) {
|
|
4766
|
-
const { sql, bindings } = computeSql(query);
|
|
4767
|
-
const start = Date.now();
|
|
4768
|
-
const result = await store.query(sql, bindings);
|
|
4769
|
-
const rawValue = result.rows[0][0];
|
|
4770
|
-
eventEmitter({ type: 'graphql-db-read', sql, bindings, duration: Date.now() - start });
|
|
4771
|
-
const data = JSON.parse(rawValue);
|
|
4772
|
-
const seenRecords = createSeenRecords$1(data);
|
|
4773
|
-
if (query.queryKeys) {
|
|
4774
|
-
for (const queryKey of query.queryKeys) {
|
|
4775
|
-
seenRecords.add(queryKey);
|
|
1550
|
+
if (needWriteBack) {
|
|
1551
|
+
await durableStore.setEntries(entries, DefaultDurableSegment);
|
|
4776
1552
|
}
|
|
4777
|
-
|
|
4778
|
-
|
|
4779
|
-
|
|
4780
|
-
const wrapStartEndEvents = (storeEval) => {
|
|
4781
|
-
return async (config, nonEvaluatedSnapshotOrPromise, observers, connectionKeyBuilder) => {
|
|
4782
|
-
const eventEmitter = createCustomAdapterEventEmitter(GRAPHQL_EVAL_NAMESPACE$1, observers);
|
|
4783
|
-
eventEmitter({ type: 'graphql-eval-start' });
|
|
4784
|
-
const snapshot = await storeEval(config, nonEvaluatedSnapshotOrPromise, observers, connectionKeyBuilder);
|
|
4785
|
-
eventEmitter({ type: 'graphql-eval-end' });
|
|
4786
|
-
return snapshot;
|
|
1553
|
+
// push a notifyStoreUpdateAvailable message with entryKeys as data into messaging segment
|
|
1554
|
+
await durableStore.setEntries({ notifyStoreUpdateAvailable: { data: entryKeys } }, MessagingDurableSegment);
|
|
1555
|
+
return Promise.resolve(undefined);
|
|
4787
1556
|
};
|
|
4788
|
-
|
|
4789
|
-
|
|
4790
|
-
|
|
4791
|
-
|
|
4792
|
-
|
|
4793
|
-
|
|
4794
|
-
|
|
4795
|
-
|
|
4796
|
-
|
|
4797
|
-
}, (query, _context, eventEmitter) => evaluateSqlite(query, eventEmitter, sqliteStore), draftFunctions, sqliteStore);
|
|
4798
|
-
}
|
|
4799
|
-
async function noopStoreEval(_config, nonEvaluatedSnapshotOrPromise, _observers) {
|
|
4800
|
-
return nonEvaluatedSnapshotOrPromise;
|
|
4801
|
-
}
|
|
4802
|
-
/**
|
|
4803
|
-
* If we can't reach the server or if we get back a 504 (which means only-if-cached
|
|
4804
|
-
* cache policy was used but this query hasn't been cached) then we still want to
|
|
4805
|
-
* attempt an eval. All other errors should be returned to caller.
|
|
4806
|
-
*/
|
|
4807
|
-
function isErrorSnapshotThatShouldGetReturnedToCaller$1(snapshot) {
|
|
4808
|
-
return ((snapshot.state === 'Error' &&
|
|
4809
|
-
snapshot.error.errorType === 'fetchResponse' &&
|
|
4810
|
-
snapshot.error.status !== 504) ||
|
|
4811
|
-
hasGraphQlErrors$1(snapshot.data));
|
|
4812
|
-
}
|
|
4813
|
-
function generateUniqueRecordId$1() {
|
|
4814
|
-
return `${GRAPHQL_ROOT_KEY$1}${Date.now() + Math.random().toFixed(5).split('.')[1]}`;
|
|
4815
|
-
}
|
|
4816
|
-
function makeStoreEval(preconditioner, objectInfoService, userId, contextProvider, queryEvaluator, draftFunctions, sqliteStore) {
|
|
4817
|
-
const storeEval = async (config, nonEvaluatedSnapshotOrPromise, observers, connectionKeyBuilder) => {
|
|
4818
|
-
const eventEmitter = createCustomAdapterEventEmitter(GRAPHQL_EVAL_NAMESPACE$1, observers);
|
|
4819
|
-
// the non-eval'ed input could either be a snapshot or promise of one so
|
|
4820
|
-
// await it here to normalize the input to a snapshot
|
|
4821
|
-
const nonEvaluatedSnapshot = await nonEvaluatedSnapshotOrPromise;
|
|
4822
|
-
// if the non-eval result has errors we want to return to caller
|
|
4823
|
-
const nonEvaluatedGQLSnapshot = nonEvaluatedSnapshot;
|
|
4824
|
-
if (isErrorSnapshotThatShouldGetReturnedToCaller$1(nonEvaluatedGQLSnapshot)) {
|
|
4825
|
-
const { data: gqlData } = nonEvaluatedGQLSnapshot;
|
|
4826
|
-
if (hasGraphQlErrors$1(gqlData) && gqlData !== undefined) {
|
|
4827
|
-
return {
|
|
4828
|
-
...nonEvaluatedSnapshot,
|
|
4829
|
-
data: undefined,
|
|
4830
|
-
state: 'Error',
|
|
4831
|
-
error: {
|
|
4832
|
-
errorType: 'adapterError',
|
|
4833
|
-
error: gqlData.errors,
|
|
4834
|
-
},
|
|
4835
|
-
};
|
|
1557
|
+
const reviveSnapshotWrapper = function (unavailableSnapshot, buildL1Snapshot) {
|
|
1558
|
+
let revivingStore = undefined;
|
|
1559
|
+
if (useRevivingStore) {
|
|
1560
|
+
// NOTE: `store` is private, there doesn't seem to be a better,
|
|
1561
|
+
// cleaner way of accessing it from a derived environment.
|
|
1562
|
+
let baseStore = environment.store;
|
|
1563
|
+
// If we're rebuilding during an ingest, the existing staging store should be the base store.
|
|
1564
|
+
if (stagingStore) {
|
|
1565
|
+
baseStore = stagingStore;
|
|
4836
1566
|
}
|
|
4837
|
-
|
|
1567
|
+
let revivingStore = buildRevivingStagingStore(baseStore);
|
|
1568
|
+
revivingStores.add(revivingStore);
|
|
4838
1569
|
}
|
|
4839
|
-
|
|
4840
|
-
|
|
4841
|
-
|
|
4842
|
-
|
|
4843
|
-
|
|
4844
|
-
|
|
4845
|
-
|
|
4846
|
-
|
|
1570
|
+
return reviveSnapshot(environment, durableStore, unavailableSnapshot, durableStoreErrorHandler, () => {
|
|
1571
|
+
const tempStore = stagingStore;
|
|
1572
|
+
const result = buildL1Snapshot();
|
|
1573
|
+
stagingStore = tempStore;
|
|
1574
|
+
return result;
|
|
1575
|
+
}, revivingStore).finally(() => {
|
|
1576
|
+
});
|
|
1577
|
+
};
|
|
1578
|
+
const expirePossibleStaleRecords = async function (keys$1, config, refresh) {
|
|
1579
|
+
validateNotDisposed();
|
|
1580
|
+
const metadataKeys = keys$1.map(serializeStructuredKey);
|
|
1581
|
+
const now = Date.now();
|
|
1582
|
+
const entries = await durableStore.getMetadata(metadataKeys, DefaultDurableSegment);
|
|
1583
|
+
if (entries === undefined || keys$7(entries).length === 0) {
|
|
1584
|
+
return environment.expirePossibleStaleRecords(keys$1);
|
|
4847
1585
|
}
|
|
4848
|
-
|
|
4849
|
-
|
|
4850
|
-
|
|
4851
|
-
|
|
4852
|
-
|
|
4853
|
-
|
|
4854
|
-
|
|
4855
|
-
...nonEvaluatedSnapshot,
|
|
4856
|
-
data: undefined,
|
|
4857
|
-
state: 'Error',
|
|
4858
|
-
error: {
|
|
4859
|
-
status: 500,
|
|
4860
|
-
statusText: 'PRECONDITIONER_ERROR',
|
|
4861
|
-
body: {
|
|
4862
|
-
message: error.message,
|
|
4863
|
-
},
|
|
4864
|
-
errorType: 'fetchResponse',
|
|
4865
|
-
headers: {},
|
|
4866
|
-
ok: false,
|
|
4867
|
-
},
|
|
4868
|
-
};
|
|
1586
|
+
let metaDataChanged = false;
|
|
1587
|
+
const metadataEntries = metadataKeys.reduce((accu, key) => {
|
|
1588
|
+
const metadataEntry = entries[key];
|
|
1589
|
+
if (metadataEntry.metadata !== undefined) {
|
|
1590
|
+
const metadata = { ...metadataEntry.metadata, expirationTimestamp: now };
|
|
1591
|
+
accu[key] = { metadata };
|
|
1592
|
+
metaDataChanged = true;
|
|
4869
1593
|
}
|
|
4870
|
-
return
|
|
4871
|
-
}
|
|
4872
|
-
|
|
4873
|
-
|
|
4874
|
-
try {
|
|
4875
|
-
const { data, seenRecords } = await queryEvaluator(rootQuery, context, eventEmitter);
|
|
4876
|
-
const rebuildWithStoreEval = ((originalSnapshot) => {
|
|
4877
|
-
return storeEval(config, originalSnapshot, observers, connectionKeyBuilder).then((rebuiltSnapshot) => {
|
|
4878
|
-
return objectsDeepEqual(originalSnapshot.data, rebuiltSnapshot.data)
|
|
4879
|
-
? originalSnapshot
|
|
4880
|
-
: rebuiltSnapshot;
|
|
4881
|
-
});
|
|
4882
|
-
});
|
|
4883
|
-
const recordId = generateUniqueRecordId$1();
|
|
4884
|
-
// if the non-eval'ed snapshot was an error then we return a synthetic
|
|
4885
|
-
// Fulfilled snapshot (this only happens in this code path if
|
|
4886
|
-
// the error is network error or 504), otherwise we spread over
|
|
4887
|
-
// the non-eval'ed snapshot (which will be either Fulfilled or Stale)
|
|
4888
|
-
return nonEvaluatedSnapshot.state === 'Error'
|
|
4889
|
-
? createStoreEvalSnapshot(data, seenRecords, rebuildWithStoreEval, recordId)
|
|
4890
|
-
: {
|
|
4891
|
-
...nonEvaluatedSnapshot,
|
|
4892
|
-
data: data,
|
|
4893
|
-
recordId,
|
|
4894
|
-
seenRecords: seenRecords,
|
|
4895
|
-
rebuildWithStoreEval,
|
|
4896
|
-
};
|
|
1594
|
+
return accu;
|
|
1595
|
+
}, {});
|
|
1596
|
+
if (metaDataChanged) {
|
|
1597
|
+
await durableStore.setMetadata(metadataEntries, DefaultDurableSegment);
|
|
4897
1598
|
}
|
|
4898
|
-
|
|
4899
|
-
|
|
4900
|
-
type: 'graphql-create-snapshot-error',
|
|
4901
|
-
error,
|
|
4902
|
-
});
|
|
4903
|
-
return nonEvaluatedSnapshot;
|
|
1599
|
+
if (config !== undefined && refresh !== undefined) {
|
|
1600
|
+
return environment.refreshPossibleStaleRecords(config, refresh);
|
|
4904
1601
|
}
|
|
1602
|
+
return Promise.resolve();
|
|
4905
1603
|
};
|
|
4906
|
-
|
|
1604
|
+
// set the default cache policy of the base environment
|
|
1605
|
+
environment.setDefaultCachePolicy({
|
|
1606
|
+
type: 'stale-while-revalidate',
|
|
1607
|
+
staleDurationSeconds: Number.MAX_SAFE_INTEGER,
|
|
1608
|
+
});
|
|
1609
|
+
return create$7(environment, {
|
|
1610
|
+
publishStoreMetadata: { value: publishStoreMetadata },
|
|
1611
|
+
storeIngest: { value: storeIngest },
|
|
1612
|
+
storeIngestError: { value: storeIngestError },
|
|
1613
|
+
storeBroadcast: { value: storeBroadcast },
|
|
1614
|
+
storeLookup: { value: storeLookup },
|
|
1615
|
+
storeEvict: { value: storeEvict },
|
|
1616
|
+
wrapNormalizedGraphNode: { value: wrapNormalizedGraphNode },
|
|
1617
|
+
getNode: { value: getNode },
|
|
1618
|
+
rebuildSnapshot: { value: rebuildSnapshot },
|
|
1619
|
+
withContext: { value: withContext },
|
|
1620
|
+
storeSetTTLOverride: { value: storeSetTTLOverride },
|
|
1621
|
+
storeSetDefaultTTLOverride: { value: storeSetDefaultTTLOverride },
|
|
1622
|
+
storePublish: { value: storePublish },
|
|
1623
|
+
storeRedirect: { value: storeRedirect },
|
|
1624
|
+
dispose: { value: dispose },
|
|
1625
|
+
publishChangesToDurableStore: { value: publishChangesToDurableStore },
|
|
1626
|
+
getDurableTTLOverrides: { value: getDurableTTLOverrides },
|
|
1627
|
+
dispatchResourceRequest: { value: dispatchResourceRequest },
|
|
1628
|
+
applyCachePolicy: { value: applyCachePolicy },
|
|
1629
|
+
getIngestStagingStoreRecords: { value: getIngestStagingStoreRecords },
|
|
1630
|
+
getIngestStagingStoreMetadata: { value: getIngestStagingStoreMetadata },
|
|
1631
|
+
getIngestStagingStore: { value: getIngestStagingStore },
|
|
1632
|
+
handleSuccessResponse: { value: handleSuccessResponse },
|
|
1633
|
+
handleErrorResponse: { value: handleErrorResponse },
|
|
1634
|
+
getNotifyChangeStoreEntries: { value: getNotifyChangeStoreEntries },
|
|
1635
|
+
notifyStoreUpdateAvailable: { value: notifyStoreUpdateAvailable },
|
|
1636
|
+
expirePossibleStaleRecords: { value: expirePossibleStaleRecords },
|
|
1637
|
+
});
|
|
4907
1638
|
}
|
|
4908
1639
|
|
|
4909
1640
|
/**
|
|
@@ -5262,7 +1993,7 @@ function getRetryAfterInMs(headers) {
|
|
|
5262
1993
|
return delayUntilDateTime - Date.now();
|
|
5263
1994
|
}
|
|
5264
1995
|
|
|
5265
|
-
function buildLuvioOverrideForDraftAdapters(luvio, handler, extractTargetIdFromCacheKey
|
|
1996
|
+
function buildLuvioOverrideForDraftAdapters(luvio, handler, extractTargetIdFromCacheKey) {
|
|
5266
1997
|
// override this to create and enqueue a new draft action, and return synthetic response
|
|
5267
1998
|
const dispatchResourceRequest = async function (resourceRequest, _context) {
|
|
5268
1999
|
const resourceRequestCopy = clone$1(resourceRequest);
|
|
@@ -5270,13 +2001,6 @@ function buildLuvioOverrideForDraftAdapters(luvio, handler, extractTargetIdFromC
|
|
|
5270
2001
|
if (handler.hasIdempotencySupport()) {
|
|
5271
2002
|
resourceRequestCopy.headers[HTTP_HEADER_IDEMPOTENCY_KEY] = uuidv4();
|
|
5272
2003
|
}
|
|
5273
|
-
// enable return extra fields for record creation and record update http call
|
|
5274
|
-
if (typeof resourceRequest.basePath === 'string' &&
|
|
5275
|
-
resourceRequest.basePath.startsWith('/ui-api/records') &&
|
|
5276
|
-
(resourceRequest.method === 'post' || resourceRequest.method === 'patch')) {
|
|
5277
|
-
resourceRequestCopy.queryParams = resourceRequestCopy.queryParams || {};
|
|
5278
|
-
resourceRequestCopy.queryParams['includeFieldsInBody'] = true;
|
|
5279
|
-
}
|
|
5280
2004
|
const { data } = await handler.enqueue(resourceRequestCopy).catch((err) => {
|
|
5281
2005
|
throw transformErrorToDraftSynthesisError(err);
|
|
5282
2006
|
});
|
|
@@ -5297,24 +2021,6 @@ function buildLuvioOverrideForDraftAdapters(luvio, handler, extractTargetIdFromC
|
|
|
5297
2021
|
ttl: Number.MAX_SAFE_INTEGER,
|
|
5298
2022
|
});
|
|
5299
2023
|
};
|
|
5300
|
-
if (options.forDeleteAdapter === true) {
|
|
5301
|
-
// delete adapters attempt to evict the record on successful network response,
|
|
5302
|
-
// since draft-aware delete adapters do soft-delete (record stays in cache
|
|
5303
|
-
// with a "drafts.deleted" property) we just want to let the environment know so it can
|
|
5304
|
-
// decrement its ref count for the record
|
|
5305
|
-
const storeEvict = function (key) {
|
|
5306
|
-
const softEvict = luvio.environment.softEvict;
|
|
5307
|
-
if (softEvict === undefined) {
|
|
5308
|
-
throw Error('DraftAwareEnvironment not configured correctly');
|
|
5309
|
-
}
|
|
5310
|
-
softEvict(key);
|
|
5311
|
-
};
|
|
5312
|
-
return create$6(luvio, {
|
|
5313
|
-
dispatchResourceRequest: { value: dispatchResourceRequest },
|
|
5314
|
-
publishStoreMetadata: { value: publishStoreMetadata },
|
|
5315
|
-
storeEvict: { value: storeEvict },
|
|
5316
|
-
});
|
|
5317
|
-
}
|
|
5318
2024
|
return create$6(luvio, {
|
|
5319
2025
|
dispatchResourceRequest: { value: dispatchResourceRequest },
|
|
5320
2026
|
publishStoreMetadata: { value: publishStoreMetadata },
|
|
@@ -6333,14 +3039,7 @@ class AbstractResourceRequestActionHandler {
|
|
|
6333
3039
|
version: '242.0.0',
|
|
6334
3040
|
};
|
|
6335
3041
|
}
|
|
6336
|
-
async handleActionEnqueued(
|
|
6337
|
-
const { method } = action.data;
|
|
6338
|
-
// delete adapters don't get a value back to ingest so
|
|
6339
|
-
// we ingest it for them here
|
|
6340
|
-
if (method === 'delete') {
|
|
6341
|
-
await this.reingestRecord(action);
|
|
6342
|
-
}
|
|
6343
|
-
}
|
|
3042
|
+
async handleActionEnqueued(_action) { }
|
|
6344
3043
|
handleActionRemoved(action) {
|
|
6345
3044
|
return this.reingestRecord(action);
|
|
6346
3045
|
}
|
|
@@ -7090,6 +3789,40 @@ function makeEnvironmentDraftAware(luvio, env, durableStore, handlers, draftQueu
|
|
|
7090
3789
|
*/
|
|
7091
3790
|
|
|
7092
3791
|
|
|
3792
|
+
function isStoreKeyRecordId$1(key) {
|
|
3793
|
+
return key.indexOf(RECORD_ID_PREFIX) > -1 && key.indexOf(RECORD_FIELDS_KEY_JUNCTION) === -1;
|
|
3794
|
+
}
|
|
3795
|
+
function objectsDeepEqual(lhs, rhs) {
|
|
3796
|
+
if (lhs === rhs)
|
|
3797
|
+
return true;
|
|
3798
|
+
if (typeof lhs !== 'object' || typeof rhs !== 'object' || lhs === null || rhs === null)
|
|
3799
|
+
return false;
|
|
3800
|
+
const lhsKeys = Object.keys(lhs);
|
|
3801
|
+
const rhsKeys = Object.keys(rhs);
|
|
3802
|
+
if (lhsKeys.length !== rhsKeys.length)
|
|
3803
|
+
return false;
|
|
3804
|
+
for (let key of lhsKeys) {
|
|
3805
|
+
if (!rhsKeys.includes(key))
|
|
3806
|
+
return false;
|
|
3807
|
+
if (typeof lhs[key] === 'function' || typeof rhs[key] === 'function') {
|
|
3808
|
+
if (lhs[key].toString() !== rhs[key].toString())
|
|
3809
|
+
return false;
|
|
3810
|
+
}
|
|
3811
|
+
else {
|
|
3812
|
+
if (!objectsDeepEqual(lhs[key], rhs[key]))
|
|
3813
|
+
return false;
|
|
3814
|
+
}
|
|
3815
|
+
}
|
|
3816
|
+
return true;
|
|
3817
|
+
}
|
|
3818
|
+
|
|
3819
|
+
/**
|
|
3820
|
+
* Copyright (c) 2022, Salesforce, Inc.,
|
|
3821
|
+
* All rights reserved.
|
|
3822
|
+
* For full license text, see the LICENSE.txt file
|
|
3823
|
+
*/
|
|
3824
|
+
|
|
3825
|
+
|
|
7093
3826
|
const { keys: keys$5, values: values$3, create: create$5, assign: assign$5, freeze: freeze$1, entries: entries$5 } = Object;
|
|
7094
3827
|
const { stringify: stringify$4, parse: parse$4 } = JSON;
|
|
7095
3828
|
const { shift } = Array.prototype;
|
|
@@ -7113,7 +3846,6 @@ function getDenormalizedRecord(recordKey, durableStore) {
|
|
|
7113
3846
|
if (denormalizedEntry === undefined) {
|
|
7114
3847
|
return undefined;
|
|
7115
3848
|
}
|
|
7116
|
-
// don't include link information
|
|
7117
3849
|
const denormalizedRecord = denormalizedEntry.data;
|
|
7118
3850
|
if (isStoreRecordError(denormalizedRecord)) {
|
|
7119
3851
|
return undefined;
|
|
@@ -7212,6 +3944,11 @@ function filterOutReferenceNonScalarFields(record) {
|
|
|
7212
3944
|
delete filteredRecords.links;
|
|
7213
3945
|
return filteredRecords;
|
|
7214
3946
|
}
|
|
3947
|
+
function filterOutPrivateProperties(record) {
|
|
3948
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
3949
|
+
const { eTag, weakEtag, ...rest } = record;
|
|
3950
|
+
return rest;
|
|
3951
|
+
}
|
|
7215
3952
|
|
|
7216
3953
|
/**
|
|
7217
3954
|
* Checks if a resource request is a GET method on the record endpoint
|
|
@@ -8119,6 +4856,9 @@ class UiApiActionHandler extends AbstractResourceRequestActionHandler {
|
|
|
8119
4856
|
}
|
|
8120
4857
|
return super.handleActionCompleted(completedAction, queueOperations, allHandlers);
|
|
8121
4858
|
}
|
|
4859
|
+
async handleActionEnqueued(action) {
|
|
4860
|
+
await this.reingestRecord(action);
|
|
4861
|
+
}
|
|
8122
4862
|
async fetchReferenceRecord(referenceFields) {
|
|
8123
4863
|
const promises = referenceFields.map(async (referenceFieldInfo) => {
|
|
8124
4864
|
const apiName = await this.identifyApiName(referenceFieldInfo.id, referenceFieldInfo.field);
|
|
@@ -8631,6 +5371,15 @@ class UiApiDraftRecordService {
|
|
|
8631
5371
|
const luvio = this.getLuvio();
|
|
8632
5372
|
const key = keyBuilderFromTypeRecordRepresentation(luvio, record);
|
|
8633
5373
|
luvio.storeIngest(key, ingestRecord, record);
|
|
5374
|
+
if (record.drafts && record.drafts.created) {
|
|
5375
|
+
luvio.publishStoreMetadata(key, {
|
|
5376
|
+
namespace: 'UiApi',
|
|
5377
|
+
representationName: 'RecordRepresentation',
|
|
5378
|
+
ttl: Number.MAX_SAFE_INTEGER,
|
|
5379
|
+
version: RecordRepresentationVersion,
|
|
5380
|
+
ingestionTimestamp: Date.now(),
|
|
5381
|
+
});
|
|
5382
|
+
}
|
|
8634
5383
|
}
|
|
8635
5384
|
getUserId() {
|
|
8636
5385
|
return this.userId;
|
|
@@ -9421,10 +6170,23 @@ function getDescriptionFromResourceRequest(request) {
|
|
|
9421
6170
|
* @param actionHandler The UIAPI Record action handler. NOTE: this adapter doesn't
|
|
9422
6171
|
* register it with the DraftQueue, runtime should set that up
|
|
9423
6172
|
*/
|
|
9424
|
-
function createRecordDraftAdapterFactory(
|
|
9425
|
-
|
|
9426
|
-
|
|
9427
|
-
|
|
6173
|
+
function createRecordDraftAdapterFactory(actionHandler, durableStore) {
|
|
6174
|
+
return async function createRecordDraftAdapter(config, createResourceRequest) {
|
|
6175
|
+
const request = createResourceRequest(config);
|
|
6176
|
+
request.headers['Idempotency-Key'] = uuidv4();
|
|
6177
|
+
request.queryParams = request.queryParams || {};
|
|
6178
|
+
request.queryParams['includeFieldsInBody'] = true;
|
|
6179
|
+
const result = await actionHandler.enqueue(request).catch((err) => {
|
|
6180
|
+
throw transformErrorToDraftSynthesisError(err);
|
|
6181
|
+
});
|
|
6182
|
+
const record = await getDenormalizedRecord(result.action.tag, durableStore);
|
|
6183
|
+
if (record) {
|
|
6184
|
+
return {
|
|
6185
|
+
state: 'Fulfilled',
|
|
6186
|
+
data: filterOutPrivateProperties(filterOutReferenceNonScalarFields(record)),
|
|
6187
|
+
};
|
|
6188
|
+
}
|
|
6189
|
+
throw createDraftSynthesisErrorResponse();
|
|
9428
6190
|
};
|
|
9429
6191
|
}
|
|
9430
6192
|
|
|
@@ -9434,10 +6196,23 @@ function createRecordDraftAdapterFactory(luvio, actionHandler) {
|
|
|
9434
6196
|
* @param actionHandler The UIAPI Record action handler. NOTE: this adapter doesn't
|
|
9435
6197
|
* register it with the DraftQueue, runtime should set that up
|
|
9436
6198
|
*/
|
|
9437
|
-
function updateRecordDraftAdapterFactory(
|
|
9438
|
-
|
|
9439
|
-
|
|
9440
|
-
|
|
6199
|
+
function updateRecordDraftAdapterFactory(actionHandler, durableStore) {
|
|
6200
|
+
return async function createRecordDraftAdapter(config, createResourceRequest) {
|
|
6201
|
+
const request = createResourceRequest(config);
|
|
6202
|
+
request.headers['Idempotency-Key'] = uuidv4();
|
|
6203
|
+
request.queryParams = request.queryParams || {};
|
|
6204
|
+
request.queryParams['includeFieldsInBody'] = true;
|
|
6205
|
+
const result = await actionHandler.enqueue(request).catch((err) => {
|
|
6206
|
+
throw transformErrorToDraftSynthesisError(err);
|
|
6207
|
+
});
|
|
6208
|
+
const record = await getDenormalizedRecord(result.action.tag, durableStore);
|
|
6209
|
+
if (record) {
|
|
6210
|
+
return {
|
|
6211
|
+
state: 'Fulfilled',
|
|
6212
|
+
data: filterOutPrivateProperties(filterOutReferenceNonScalarFields(record)),
|
|
6213
|
+
};
|
|
6214
|
+
}
|
|
6215
|
+
throw createDraftSynthesisErrorResponse();
|
|
9441
6216
|
};
|
|
9442
6217
|
}
|
|
9443
6218
|
|
|
@@ -9447,10 +6222,9 @@ function updateRecordDraftAdapterFactory(luvio, actionHandler) {
|
|
|
9447
6222
|
* @param actionHandler The UIAPI Record action handler. NOTE: this adapter doesn't
|
|
9448
6223
|
* register it with the DraftQueue, runtime should set that up
|
|
9449
6224
|
*/
|
|
9450
|
-
function deleteRecordDraftAdapterFactory(
|
|
9451
|
-
|
|
9452
|
-
|
|
9453
|
-
return buildNetworkSnapshot(luvioWithDraftOverrides, config);
|
|
6225
|
+
function deleteRecordDraftAdapterFactory(actionHandler) {
|
|
6226
|
+
return async function deleteRecordDraftAdapter(config, buildResourceRequest) {
|
|
6227
|
+
await actionHandler.enqueue(buildResourceRequest(config));
|
|
9454
6228
|
};
|
|
9455
6229
|
}
|
|
9456
6230
|
|
|
@@ -10841,7 +7615,14 @@ function singlePredicateToSql(predicate, defaultAlias, isChildNotPredicate = fal
|
|
|
10841
7615
|
// SQLite is case sensitive by default, SOQL is case in-sensitive by default
|
|
10842
7616
|
// For pick list includes or excludeds, prefix and suffix the field value with ';' to guarantee the query accuracy.
|
|
10843
7617
|
if (dataType === 'MultiPicklist' && (operator === 'LIKE' || operator === 'NOT LIKE')) {
|
|
10844
|
-
|
|
7618
|
+
// to include nulls in NOT LIKE operators we need to still return a value for NULL
|
|
7619
|
+
// calling the COALESCE function with the extracted value and empty string will make it an empty string
|
|
7620
|
+
// instead of NULL in the function return and can be compared
|
|
7621
|
+
const coalesce = (sql) => {
|
|
7622
|
+
return `COALESCE(${sql}, '')`;
|
|
7623
|
+
};
|
|
7624
|
+
const extract = `json_extract("${alias}".data, '${leftPath}')`;
|
|
7625
|
+
sql = `'${MultiPickListValueSeparator}' || ${operator === 'NOT LIKE' ? coalesce(extract) : extract} || '${MultiPickListValueSeparator}' ${operator} ${questionSql} COLLATE NOCASE`;
|
|
10845
7626
|
}
|
|
10846
7627
|
else {
|
|
10847
7628
|
sql = `json_extract("${alias}".data, '${leftPath}') ${operator} ${questionSql}${isCaseSensitive === true ? '' : ` COLLATE NOCASE`}`;
|
|
@@ -17571,12 +14352,6 @@ function isUnfulfilledSnapshot(cachedSnapshotResult) {
|
|
|
17571
14352
|
function makeEnvironmentGraphqlAware(environment) {
|
|
17572
14353
|
//TODO: [W-12734162] - rebuild non-evaluated snapshot when graph rebuild is triggered. The dependency work on luvio needs to be done.
|
|
17573
14354
|
const rebuildSnapshot = function (snapshot, onRebuild) {
|
|
17574
|
-
if (isStoreEvalSnapshot(snapshot)) {
|
|
17575
|
-
snapshot.rebuildWithStoreEval(snapshot).then((rebuilt) => {
|
|
17576
|
-
onRebuild(rebuilt);
|
|
17577
|
-
});
|
|
17578
|
-
return;
|
|
17579
|
-
}
|
|
17580
14355
|
if (isLocalEvalSnapshot(snapshot)) {
|
|
17581
14356
|
snapshot.rebuildWithLocalEval(snapshot).then((rebuilt) => {
|
|
17582
14357
|
onRebuild(rebuilt);
|
|
@@ -18164,7 +14939,13 @@ class PrimingSession extends EventEmitter {
|
|
|
18164
14939
|
processFetchedRecords(result, abortController) {
|
|
18165
14940
|
if (result.ok === false) {
|
|
18166
14941
|
const { error } = result;
|
|
18167
|
-
|
|
14942
|
+
let primingError = 'unknown';
|
|
14943
|
+
if (error === 'service-protection-error') {
|
|
14944
|
+
primingError = 'service-protection-error';
|
|
14945
|
+
}
|
|
14946
|
+
else if (error === 'network-error') {
|
|
14947
|
+
primingError = 'service-unavailable';
|
|
14948
|
+
}
|
|
18168
14949
|
this.emit('error', {
|
|
18169
14950
|
ids: result.missingIds,
|
|
18170
14951
|
code: primingError,
|
|
@@ -18300,6 +15081,17 @@ class PrimingSession extends EventEmitter {
|
|
|
18300
15081
|
}
|
|
18301
15082
|
}
|
|
18302
15083
|
|
|
15084
|
+
class PrimingNetworkError extends Error {
|
|
15085
|
+
constructor(message, httpCode) {
|
|
15086
|
+
super(message);
|
|
15087
|
+
this.httpCode = httpCode;
|
|
15088
|
+
}
|
|
15089
|
+
isServiceProtectionError() {
|
|
15090
|
+
// When CSP kicks-in, we get 429/503 errors back - https://salesforce.quip.com/20HZA5BN5xgx
|
|
15091
|
+
return this.httpCode === 429 || this.httpCode === 503;
|
|
15092
|
+
}
|
|
15093
|
+
}
|
|
15094
|
+
|
|
18303
15095
|
const requiredPrefix = `required_`;
|
|
18304
15096
|
const requiredFieldMap = {
|
|
18305
15097
|
ApiName: 'ApiName',
|
|
@@ -18321,12 +15113,22 @@ class RecordLoaderGraphQL {
|
|
|
18321
15113
|
return this.generateFetchResult(rep, batch);
|
|
18322
15114
|
}
|
|
18323
15115
|
catch (e) {
|
|
18324
|
-
|
|
18325
|
-
|
|
18326
|
-
|
|
18327
|
-
|
|
18328
|
-
|
|
18329
|
-
|
|
15116
|
+
if (e instanceof PrimingNetworkError && e.isServiceProtectionError()) {
|
|
15117
|
+
return {
|
|
15118
|
+
ok: false,
|
|
15119
|
+
error: 'service-protection-error',
|
|
15120
|
+
messages: ['Service Protection Error'],
|
|
15121
|
+
missingIds: batch.ids,
|
|
15122
|
+
};
|
|
15123
|
+
}
|
|
15124
|
+
else {
|
|
15125
|
+
return {
|
|
15126
|
+
ok: false,
|
|
15127
|
+
error: 'network-error',
|
|
15128
|
+
messages: ['Network Error'],
|
|
15129
|
+
missingIds: batch.ids,
|
|
15130
|
+
};
|
|
15131
|
+
}
|
|
18330
15132
|
}
|
|
18331
15133
|
}
|
|
18332
15134
|
async batchFetchRecordData(batchs, abortController) {
|
|
@@ -18342,14 +15144,26 @@ class RecordLoaderGraphQL {
|
|
|
18342
15144
|
const missingIds = batchs
|
|
18343
15145
|
.map((batch) => batch.ids)
|
|
18344
15146
|
.reduce((prev, curr) => prev.concat(curr), []);
|
|
18345
|
-
|
|
18346
|
-
|
|
18347
|
-
|
|
18348
|
-
|
|
18349
|
-
|
|
18350
|
-
|
|
18351
|
-
|
|
18352
|
-
|
|
15147
|
+
if (e instanceof PrimingNetworkError && e.isServiceProtectionError()) {
|
|
15148
|
+
return [
|
|
15149
|
+
{
|
|
15150
|
+
ok: false,
|
|
15151
|
+
error: 'service-protection-error',
|
|
15152
|
+
messages: [e.message],
|
|
15153
|
+
missingIds,
|
|
15154
|
+
},
|
|
15155
|
+
];
|
|
15156
|
+
}
|
|
15157
|
+
else {
|
|
15158
|
+
return [
|
|
15159
|
+
{
|
|
15160
|
+
ok: false,
|
|
15161
|
+
error: 'network-error',
|
|
15162
|
+
messages: ['Network Error'],
|
|
15163
|
+
missingIds,
|
|
15164
|
+
},
|
|
15165
|
+
];
|
|
15166
|
+
}
|
|
18353
15167
|
}
|
|
18354
15168
|
}
|
|
18355
15169
|
generateFetchResult(repResult, batchInput) {
|
|
@@ -18560,13 +15374,16 @@ class NimbusPrimingNetworkAdapter {
|
|
|
18560
15374
|
body: JSON.stringify({
|
|
18561
15375
|
batchQuery: configs,
|
|
18562
15376
|
}),
|
|
18563
|
-
headers: {
|
|
15377
|
+
headers: {
|
|
15378
|
+
// This header is needed to get back 429/503 error code when CSP kicks-in - https://salesforce.quip.com/20HZA5BN5xgx
|
|
15379
|
+
Cos: '0x04',
|
|
15380
|
+
},
|
|
18564
15381
|
queryParams: {},
|
|
18565
15382
|
priority: 'background',
|
|
18566
15383
|
observabilityContext: {},
|
|
18567
15384
|
}, abortController, (response) => {
|
|
18568
15385
|
if (response.status < 200 || response.status > 299) {
|
|
18569
|
-
reject(new
|
|
15386
|
+
reject(new PrimingNetworkError(response.body || 'Network error', response.status));
|
|
18570
15387
|
return;
|
|
18571
15388
|
}
|
|
18572
15389
|
try {
|
|
@@ -18606,7 +15423,7 @@ class NimbusPrimingNetworkAdapter {
|
|
|
18606
15423
|
observabilityContext: {},
|
|
18607
15424
|
}, abortController, (response) => {
|
|
18608
15425
|
if (response.status < 200 || response.status > 299) {
|
|
18609
|
-
reject(new
|
|
15426
|
+
reject(new PrimingNetworkError(response.body || 'Network error', response.status));
|
|
18610
15427
|
return;
|
|
18611
15428
|
}
|
|
18612
15429
|
try {
|
|
@@ -18638,7 +15455,7 @@ class NimbusPrimingNetworkAdapter {
|
|
|
18638
15455
|
observabilityContext: {},
|
|
18639
15456
|
}, abortController, (response) => {
|
|
18640
15457
|
if (response.status < 200 || response.status > 299) {
|
|
18641
|
-
reject(new
|
|
15458
|
+
reject(new PrimingNetworkError(response.body || 'Network error', response.status));
|
|
18642
15459
|
return;
|
|
18643
15460
|
}
|
|
18644
15461
|
try {
|
|
@@ -18674,7 +15491,7 @@ class NimbusPrimingNetworkAdapter {
|
|
|
18674
15491
|
observabilityContext: {},
|
|
18675
15492
|
}, abortController, (response) => {
|
|
18676
15493
|
if (response.status < 200 || response.status > 299) {
|
|
18677
|
-
reject(new
|
|
15494
|
+
reject(new PrimingNetworkError(response.body || 'Network error', response.status));
|
|
18678
15495
|
return;
|
|
18679
15496
|
}
|
|
18680
15497
|
try {
|
|
@@ -19131,20 +15948,6 @@ function getRuntime() {
|
|
|
19131
15948
|
setupMobileInstrumentation(lazyLuvio, store);
|
|
19132
15949
|
// If the observer nimbus plugin is configured, observation is enabled otherwise this is a no-op
|
|
19133
15950
|
setupObserver();
|
|
19134
|
-
// set storeEval function for lds-adapters-graghql to use
|
|
19135
|
-
withRegistration('@salesforce/lds-adapters-graphql', (registration) => {
|
|
19136
|
-
const { configuration: { setStoreEval, setDraftFunctions }, } = registration;
|
|
19137
|
-
const getCanonicalId = (id) => {
|
|
19138
|
-
var _a;
|
|
19139
|
-
return ((_a = extractRecordIdFromStoreKey(lazyLuvio.storeGetCanonicalKey(RECORD_ID_PREFIX + id))) !== null && _a !== void 0 ? _a : id);
|
|
19140
|
-
};
|
|
19141
|
-
const draftFuncs = {
|
|
19142
|
-
isDraftId: isGenerated,
|
|
19143
|
-
getCanonicalId,
|
|
19144
|
-
};
|
|
19145
|
-
setStoreEval(sqliteStoreEvalFactory(userId, lazyDurableStore, lazyObjectInfoService, draftFuncs));
|
|
19146
|
-
setDraftFunctions(draftFuncs);
|
|
19147
|
-
});
|
|
19148
15951
|
// on core the graphql configuration is split so we need to set configureUIAPIGraphQL both in the
|
|
19149
15952
|
// graphql registration and the uiapi for off core
|
|
19150
15953
|
const configureUIAPIGraphQL = (registration) => {
|
|
@@ -19163,9 +15966,9 @@ function getRuntime() {
|
|
|
19163
15966
|
setEnvironmentAwareGraphQLBatchAdapter(environmentAwareGraphQLBatchAdapter);
|
|
19164
15967
|
};
|
|
19165
15968
|
const draftAwareCreateContentDocumentAndVersionAdapter = createContentDocumentAndVersionDraftAdapterFactory(lazyLuvio, NimbusBinaryStore, contentDocumentCompositeActionHandler);
|
|
19166
|
-
const draftAwareCreateRecord = createRecordDraftAdapterFactory(
|
|
19167
|
-
const draftAwareUpdateRecord = updateRecordDraftAdapterFactory(
|
|
19168
|
-
const draftAwareDeleteRecord = deleteRecordDraftAdapterFactory(
|
|
15969
|
+
const draftAwareCreateRecord = createRecordDraftAdapterFactory(uiApiRecordHandler, lazyDurableStore);
|
|
15970
|
+
const draftAwareUpdateRecord = updateRecordDraftAdapterFactory(uiApiRecordHandler, lazyDurableStore);
|
|
15971
|
+
const draftAwareDeleteRecord = deleteRecordDraftAdapterFactory(uiApiRecordHandler);
|
|
19169
15972
|
withRegistration('@salesforce/lds-adapters-uiapi-mobile', (registration) => {
|
|
19170
15973
|
setLdsAdaptersUiapiInstrumentation(registration);
|
|
19171
15974
|
registration.configuration.setDraftAwareCreateContentDocumentAndVersionAdapter(draftAwareCreateContentDocumentAndVersionAdapter);
|
|
@@ -19209,4 +16012,4 @@ register({
|
|
|
19209
16012
|
});
|
|
19210
16013
|
|
|
19211
16014
|
export { O11Y_NAMESPACE_LDS_MOBILE, getRuntime, registerReportObserver, reportGraphqlQueryParseError };
|
|
19212
|
-
// version: 1.
|
|
16015
|
+
// version: 1.313.0-bf88d762e3
|