@warp-drive/legacy 5.8.0-alpha.4 → 5.8.0-alpha.40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +14 -27
- package/declarations/adapter/error.d.ts +5 -5
- package/declarations/adapter/json-api.d.ts +6 -8
- package/declarations/adapter/rest.d.ts +5 -8
- package/declarations/adapter.d.ts +4 -6
- package/declarations/compat/-private.d.ts +1 -1
- package/declarations/compat/builders/find-all.d.ts +6 -6
- package/declarations/compat/builders/find-record.d.ts +8 -8
- package/declarations/compat/builders/query.d.ts +12 -12
- package/declarations/compat/extensions.d.ts +1 -1
- package/declarations/compat/legacy-network-handler/minimum-adapter-interface.d.ts +7 -9
- package/declarations/compat/legacy-network-handler/minimum-serializer-interface.d.ts +20 -30
- package/declarations/compat/utils.d.ts +17 -17
- package/declarations/compat.d.ts +35 -11
- package/declarations/index.d.ts +70 -0
- package/declarations/model/-private/attr.d.ts +5 -6
- package/declarations/model/-private/belongs-to.d.ts +4 -5
- package/declarations/model/-private/has-many.d.ts +4 -5
- package/declarations/model/-private/hooks.d.ts +1 -1
- package/declarations/model/-private/legacy-relationships-support.d.ts +2 -2
- package/declarations/model/-private/model.d.ts +8 -8
- package/declarations/model/-private/record-state.d.ts +1 -1
- package/declarations/model/-private/references/belongs-to.d.ts +5 -5
- package/declarations/model/-private/references/has-many.d.ts +3 -3
- package/declarations/model/migration-support.d.ts +46 -21
- package/declarations/model-fragments/extensions/fragment-array.d.ts +16 -0
- package/declarations/model-fragments/extensions/fragment.d.ts +15 -0
- package/declarations/model-fragments/hooks/model-for.d.ts +20 -0
- package/declarations/model-fragments/index.d.ts +5 -0
- package/declarations/model-fragments/instance-initializers/fragment-extensions.d.ts +9 -0
- package/declarations/model-fragments/utilities/with-array-defaults.d.ts +15 -0
- package/declarations/model-fragments/utilities/with-fragment-array-defaults.d.ts +20 -0
- package/declarations/model-fragments/utilities/with-fragment-defaults.d.ts +19 -0
- package/declarations/model-fragments/utilities/with-legacy.d.ts +3 -0
- package/declarations/model-fragments.d.ts +9 -0
- package/declarations/model.d.ts +2 -2
- package/declarations/serializer/-private/embedded-records-mixin.d.ts +1 -6
- package/declarations/serializer/-private/transforms/boolean.d.ts +2 -2
- package/declarations/serializer/-private/transforms/date.d.ts +2 -2
- package/declarations/serializer/-private/transforms/number.d.ts +1 -1
- package/declarations/serializer/-private/transforms/string.d.ts +1 -1
- package/declarations/serializer/json-api.d.ts +7 -9
- package/declarations/serializer/json.d.ts +7 -9
- package/declarations/serializer/rest.d.ts +4 -6
- package/declarations/serializer.d.ts +9 -12
- package/dist/{-private-8UmnAf9J.js → -private-B1pSSN52.js} +1 -1
- package/dist/adapter/-private.js +1 -1
- package/dist/adapter/error.js +12 -13
- package/dist/adapter/json-api.js +4 -1
- package/dist/adapter/rest.js +6 -9
- package/dist/adapter.js +4 -6
- package/dist/compat/-private.js +1 -1
- package/dist/compat/builders.js +26 -26
- package/dist/compat/utils.js +17 -18
- package/dist/compat.js +58 -41
- package/dist/{errors-8kD2mSe_.js → errors-CIGPcDvd.js} +87 -65
- package/dist/hooks-QqRnX108.js +74 -0
- package/dist/index.js +195 -0
- package/dist/{json-DziiodPf.js → json-BNrV8EYG.js} +12 -16
- package/dist/model/-private.js +1 -1
- package/dist/model/migration-support.js +54 -24
- package/dist/model-for-CqXsIKws.js +221 -0
- package/dist/model-fragments.js +76 -0
- package/dist/model.js +18 -90
- package/dist/{schema-provider-DQu4Rjco.js → schema-provider-g5MfTj8n.js} +18 -20
- package/dist/{serialize-into-hash-CS0MIv4F.js → serialize-into-hash-BnYvPex3.js} +1 -1
- package/dist/serializer/json-api.js +17 -44
- package/dist/serializer/json.js +1 -1
- package/dist/serializer/rest.js +14 -21
- package/dist/serializer/transform.js +15 -6
- package/dist/serializer.js +9 -13
- package/dist/store.js +3 -0
- package/dist/unpkg/dev/-private-DbaSCSym.js +1205 -0
- package/dist/unpkg/dev/adapter/-private.js +1 -0
- package/dist/unpkg/dev/adapter/error.js +335 -0
- package/dist/unpkg/dev/adapter/json-api.js +271 -0
- package/dist/unpkg/dev/adapter/rest.js +1255 -0
- package/dist/unpkg/dev/adapter.js +1252 -0
- package/dist/unpkg/dev/compat/-private.js +1 -0
- package/dist/unpkg/dev/compat/builders.js +275 -0
- package/dist/unpkg/dev/compat/extensions.js +242 -0
- package/dist/unpkg/dev/compat/utils.js +223 -0
- package/dist/unpkg/dev/compat.js +1146 -0
- package/dist/unpkg/dev/errors-DD96TBEs.js +2591 -0
- package/dist/unpkg/dev/hooks-CqWjNWeL.js +73 -0
- package/dist/unpkg/dev/index.js +197 -0
- package/dist/unpkg/dev/json-CCU-ZQ4b.js +1269 -0
- package/dist/unpkg/dev/model/-private.js +1 -0
- package/dist/unpkg/dev/model/migration-support.js +551 -0
- package/dist/unpkg/dev/model-for-CqXsIKws.js +221 -0
- package/dist/unpkg/dev/model-fragments.js +76 -0
- package/dist/unpkg/dev/model.js +678 -0
- package/dist/unpkg/dev/runtime-BPCpkOf1-BKOwiRJp.js +65 -0
- package/dist/unpkg/dev/schema-provider-B8jiJOYC.js +2229 -0
- package/dist/unpkg/dev/serialize-into-hash-CPAZXrQU.js +259 -0
- package/dist/unpkg/dev/serializer/json-api.js +648 -0
- package/dist/unpkg/dev/serializer/json.js +4 -0
- package/dist/unpkg/dev/serializer/rest.js +1242 -0
- package/dist/unpkg/dev/serializer/transform.js +278 -0
- package/dist/unpkg/dev/serializer.js +248 -0
- package/dist/unpkg/dev/store.js +636 -0
- package/dist/unpkg/dev/util-DvanW33H.js +20 -0
- package/dist/unpkg/dev/utils-BhvS1iTS.js +8 -0
- package/dist/unpkg/dev-deprecated/-private-DbaSCSym.js +1205 -0
- package/dist/unpkg/dev-deprecated/adapter/-private.js +1 -0
- package/dist/unpkg/dev-deprecated/adapter/error.js +335 -0
- package/dist/unpkg/dev-deprecated/adapter/json-api.js +271 -0
- package/dist/unpkg/dev-deprecated/adapter/rest.js +1255 -0
- package/dist/unpkg/dev-deprecated/adapter.js +1252 -0
- package/dist/unpkg/dev-deprecated/compat/-private.js +1 -0
- package/dist/unpkg/dev-deprecated/compat/builders.js +275 -0
- package/dist/unpkg/dev-deprecated/compat/extensions.js +242 -0
- package/dist/unpkg/dev-deprecated/compat/utils.js +223 -0
- package/dist/unpkg/dev-deprecated/compat.js +1146 -0
- package/dist/unpkg/dev-deprecated/errors-DEnabIZj.js +2594 -0
- package/dist/unpkg/dev-deprecated/hooks-CAll-Ets.js +73 -0
- package/dist/unpkg/dev-deprecated/index.js +196 -0
- package/dist/unpkg/dev-deprecated/json-CCU-ZQ4b.js +1269 -0
- package/dist/unpkg/dev-deprecated/model/-private.js +1 -0
- package/dist/unpkg/dev-deprecated/model/migration-support.js +568 -0
- package/dist/unpkg/dev-deprecated/model-for-CqXsIKws.js +221 -0
- package/dist/unpkg/dev-deprecated/model-fragments.js +76 -0
- package/dist/unpkg/dev-deprecated/model.js +682 -0
- package/dist/unpkg/dev-deprecated/runtime-BPCpkOf1-BKOwiRJp.js +65 -0
- package/dist/unpkg/dev-deprecated/schema-provider-k2qXQTBg.js +2254 -0
- package/dist/unpkg/dev-deprecated/serialize-into-hash-CPAZXrQU.js +259 -0
- package/dist/unpkg/dev-deprecated/serializer/json-api.js +648 -0
- package/dist/unpkg/dev-deprecated/serializer/json.js +4 -0
- package/dist/unpkg/dev-deprecated/serializer/rest.js +1242 -0
- package/dist/unpkg/dev-deprecated/serializer/transform.js +278 -0
- package/dist/unpkg/dev-deprecated/serializer.js +248 -0
- package/dist/unpkg/dev-deprecated/store.js +636 -0
- package/dist/unpkg/dev-deprecated/util-CWr5WQOT.js +24 -0
- package/dist/unpkg/dev-deprecated/utils-C9PJehtL.js +12 -0
- package/dist/unpkg/prod/-private-Cvf_97EG.js +970 -0
- package/dist/unpkg/prod/adapter/-private.js +1 -0
- package/dist/unpkg/prod/adapter/error.js +330 -0
- package/dist/unpkg/prod/adapter/json-api.js +266 -0
- package/dist/unpkg/prod/adapter/rest.js +1218 -0
- package/dist/unpkg/prod/adapter.js +1219 -0
- package/dist/unpkg/prod/compat/-private.js +1 -0
- package/dist/unpkg/prod/compat/builders.js +210 -0
- package/dist/unpkg/prod/compat/extensions.js +232 -0
- package/dist/unpkg/prod/compat/utils.js +218 -0
- package/dist/unpkg/prod/compat.js +726 -0
- package/dist/unpkg/prod/errors-CXnfnBfQ.js +2343 -0
- package/dist/unpkg/prod/hooks-DvyWhLNg.js +41 -0
- package/dist/unpkg/prod/index.js +151 -0
- package/dist/unpkg/prod/json-BYrUP8ao.js +1256 -0
- package/dist/unpkg/prod/model/-private.js +1 -0
- package/dist/unpkg/prod/model/migration-support.js +544 -0
- package/dist/unpkg/prod/model-for-CqXsIKws.js +221 -0
- package/dist/unpkg/prod/model-fragments.js +76 -0
- package/dist/unpkg/prod/model.js +593 -0
- package/dist/unpkg/prod/runtime-BPCpkOf1-BKOwiRJp.js +65 -0
- package/dist/unpkg/prod/schema-provider-CHujJvA9.js +1904 -0
- package/dist/unpkg/prod/serialize-into-hash-DYU2egXl.js +215 -0
- package/dist/unpkg/prod/serializer/json-api.js +591 -0
- package/dist/unpkg/prod/serializer/json.js +4 -0
- package/dist/unpkg/prod/serializer/rest.js +1210 -0
- package/dist/unpkg/prod/serializer/transform.js +278 -0
- package/dist/unpkg/prod/serializer.js +248 -0
- package/dist/unpkg/prod/store.js +504 -0
- package/dist/unpkg/prod/util-DvanW33H.js +20 -0
- package/dist/unpkg/prod/utils-BhvS1iTS.js +8 -0
- package/dist/unpkg/prod-deprecated/-private-Cvf_97EG.js +970 -0
- package/dist/unpkg/prod-deprecated/adapter/-private.js +1 -0
- package/dist/unpkg/prod-deprecated/adapter/error.js +330 -0
- package/dist/unpkg/prod-deprecated/adapter/json-api.js +266 -0
- package/dist/unpkg/prod-deprecated/adapter/rest.js +1218 -0
- package/dist/unpkg/prod-deprecated/adapter.js +1219 -0
- package/dist/unpkg/prod-deprecated/compat/-private.js +1 -0
- package/dist/unpkg/prod-deprecated/compat/builders.js +210 -0
- package/dist/unpkg/prod-deprecated/compat/extensions.js +232 -0
- package/dist/unpkg/prod-deprecated/compat/utils.js +218 -0
- package/dist/unpkg/prod-deprecated/compat.js +726 -0
- package/dist/unpkg/prod-deprecated/errors-CG1SPYVg.js +2346 -0
- package/dist/unpkg/prod-deprecated/hooks-BIUBiNGR.js +41 -0
- package/dist/unpkg/prod-deprecated/index.js +150 -0
- package/dist/unpkg/prod-deprecated/json-BYrUP8ao.js +1256 -0
- package/dist/unpkg/prod-deprecated/model/-private.js +1 -0
- package/dist/unpkg/prod-deprecated/model/migration-support.js +561 -0
- package/dist/unpkg/prod-deprecated/model-for-CqXsIKws.js +221 -0
- package/dist/unpkg/prod-deprecated/model-fragments.js +76 -0
- package/dist/unpkg/prod-deprecated/model.js +596 -0
- package/dist/unpkg/prod-deprecated/runtime-BPCpkOf1-BKOwiRJp.js +65 -0
- package/dist/unpkg/prod-deprecated/schema-provider-BJ4TWnZf.js +1947 -0
- package/dist/unpkg/prod-deprecated/serialize-into-hash-DYU2egXl.js +215 -0
- package/dist/unpkg/prod-deprecated/serializer/json-api.js +591 -0
- package/dist/unpkg/prod-deprecated/serializer/json.js +4 -0
- package/dist/unpkg/prod-deprecated/serializer/rest.js +1210 -0
- package/dist/unpkg/prod-deprecated/serializer/transform.js +278 -0
- package/dist/unpkg/prod-deprecated/serializer.js +248 -0
- package/dist/unpkg/prod-deprecated/store.js +504 -0
- package/dist/unpkg/prod-deprecated/util-B6cn-i93.js +23 -0
- package/dist/unpkg/prod-deprecated/utils-BUWwQwCh.js +11 -0
- package/logos/README.md +2 -2
- package/logos/logo-yellow-slab.svg +1 -0
- package/logos/word-mark-black.svg +1 -0
- package/logos/word-mark-white.svg +1 -0
- package/package.json +14 -6
- package/logos/NCC-1701-a-blue.svg +0 -4
- package/logos/NCC-1701-a-gold.svg +0 -4
- package/logos/NCC-1701-a-gold_100.svg +0 -1
- package/logos/NCC-1701-a-gold_base-64.txt +0 -1
- package/logos/NCC-1701-a.svg +0 -4
- package/logos/docs-badge.svg +0 -2
- package/logos/ember-data-logo-dark.svg +0 -12
- package/logos/ember-data-logo-light.svg +0 -12
- package/logos/social1.png +0 -0
- package/logos/social2.png +0 -0
- package/logos/warp-drive-logo-dark.svg +0 -4
- package/logos/warp-drive-logo-gold.svg +0 -4
|
@@ -0,0 +1,1146 @@
|
|
|
1
|
+
import { getOwner } from '@ember/application';
|
|
2
|
+
import { recordIdentifierFor } from '@warp-drive/core';
|
|
3
|
+
import { assertPrivateStore, waitFor, _deprecatingNormalize } from '@warp-drive/core/store/-private';
|
|
4
|
+
import '@warp-drive/core/reactive/-private';
|
|
5
|
+
import { p as payloadIsNotBlank, n as normalizeResponseHelper, i as iterateData, F as FetchManager, S as SaveOp, a as assertIdentifierHasId, b as SnapshotRecordArray } from "./-private-DbaSCSym.js";
|
|
6
|
+
function _findHasMany(adapter, store, identifier, link, relationship, options) {
|
|
7
|
+
const promise = Promise.resolve().then(() => {
|
|
8
|
+
const snapshot = store._fetchManager.createSnapshot(identifier, options);
|
|
9
|
+
const useLink = !link || typeof link === 'string';
|
|
10
|
+
const relatedLink = useLink ? link : link.href;
|
|
11
|
+
(test => {
|
|
12
|
+
if (!test) {
|
|
13
|
+
throw new Error(`Attempted to load a hasMany relationship from a specified 'link' in the original payload, but the specified link is empty. You must provide a valid 'link' in the original payload to use 'findHasMany'`);
|
|
14
|
+
}
|
|
15
|
+
})(relatedLink);
|
|
16
|
+
(test => {
|
|
17
|
+
if (!test) {
|
|
18
|
+
throw new Error(`Expected the adapter to implement 'findHasMany' but it does not`);
|
|
19
|
+
}
|
|
20
|
+
})(typeof adapter.findHasMany === 'function');
|
|
21
|
+
return adapter.findHasMany(store, snapshot, relatedLink, relationship);
|
|
22
|
+
});
|
|
23
|
+
return promise.then(adapterPayload => {
|
|
24
|
+
(test => {
|
|
25
|
+
if (!test) {
|
|
26
|
+
throw new Error(`You made a 'findHasMany' request for a ${identifier.type}'s '${relationship.name}' relationship, using link '${JSON.stringify(link)}' , but the adapter's response did not have any data`);
|
|
27
|
+
}
|
|
28
|
+
})(payloadIsNotBlank(adapterPayload));
|
|
29
|
+
const modelClass = store.modelFor(relationship.type);
|
|
30
|
+
const serializer = store.serializerFor(relationship.type);
|
|
31
|
+
let payload = normalizeResponseHelper(serializer, store, modelClass, adapterPayload, null, 'findHasMany');
|
|
32
|
+
(test => {
|
|
33
|
+
if (!test) {
|
|
34
|
+
throw new Error(`fetched the hasMany relationship '${relationship.name}' for ${identifier.type}:${identifier.id} with link '${JSON.stringify(link)}', but no data member is present in the response. If no data exists, the response should set { data: [] }`);
|
|
35
|
+
}
|
|
36
|
+
})('data' in payload && Array.isArray(payload.data));
|
|
37
|
+
payload = syncRelationshipDataFromLink(store, payload, identifier, relationship);
|
|
38
|
+
return store._push(payload, true);
|
|
39
|
+
}, null);
|
|
40
|
+
}
|
|
41
|
+
function _findBelongsTo(store, identifier, link, relationship, options) {
|
|
42
|
+
const promise = Promise.resolve().then(() => {
|
|
43
|
+
const adapter = store.adapterFor(identifier.type);
|
|
44
|
+
(test => {
|
|
45
|
+
if (!test) {
|
|
46
|
+
throw new Error(`You tried to load a belongsTo relationship but you have no adapter (for ${identifier.type})`);
|
|
47
|
+
}
|
|
48
|
+
})(adapter);
|
|
49
|
+
(test => {
|
|
50
|
+
if (!test) {
|
|
51
|
+
throw new Error(`You tried to load a belongsTo relationship from a specified 'link' in the original payload but your adapter does not implement 'findBelongsTo'`);
|
|
52
|
+
}
|
|
53
|
+
})(typeof adapter.findBelongsTo === 'function');
|
|
54
|
+
const snapshot = store._fetchManager.createSnapshot(identifier, options);
|
|
55
|
+
const useLink = !link || typeof link === 'string';
|
|
56
|
+
const relatedLink = useLink ? link : link.href;
|
|
57
|
+
(test => {
|
|
58
|
+
if (!test) {
|
|
59
|
+
throw new Error(`Attempted to load a belongsTo relationship from a specified 'link' in the original payload, but the specified link is empty. You must provide a valid 'link' in the original payload to use 'findBelongsTo'`);
|
|
60
|
+
}
|
|
61
|
+
})(relatedLink);
|
|
62
|
+
return adapter.findBelongsTo(store, snapshot, relatedLink, relationship);
|
|
63
|
+
});
|
|
64
|
+
return promise.then(adapterPayload => {
|
|
65
|
+
const modelClass = store.modelFor(relationship.type);
|
|
66
|
+
const serializer = store.serializerFor(relationship.type);
|
|
67
|
+
let payload = normalizeResponseHelper(serializer, store, modelClass, adapterPayload, null, 'findBelongsTo');
|
|
68
|
+
(test => {
|
|
69
|
+
if (!test) {
|
|
70
|
+
throw new Error(`fetched the belongsTo relationship '${relationship.name}' for ${identifier.type}:${identifier.id} with link '${JSON.stringify(link)}', but no data member is present in the response. If no data exists, the response should set { data: null }`);
|
|
71
|
+
}
|
|
72
|
+
})('data' in payload && (payload.data === null || typeof payload.data === 'object' && !Array.isArray(payload.data)));
|
|
73
|
+
if (!payload.data && !payload.links && !payload.meta) {
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
payload = syncRelationshipDataFromLink(store, payload, identifier, relationship);
|
|
77
|
+
return store._push(payload, true);
|
|
78
|
+
}, null);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// sync
|
|
82
|
+
// iterate over records in payload.data
|
|
83
|
+
// for each record
|
|
84
|
+
// assert that record.relationships[inverse] is either undefined (so we can fix it)
|
|
85
|
+
// or provide a data: {id, type} that matches the record that requested it
|
|
86
|
+
// return the relationship data for the parent
|
|
87
|
+
function syncRelationshipDataFromLink(store, payload, parentIdentifier, relationship) {
|
|
88
|
+
// ensure the right hand side (incoming payload) points to the parent record that
|
|
89
|
+
// requested this relationship
|
|
90
|
+
const relationshipData = payload.data ? iterateData(payload.data, (data, index) => {
|
|
91
|
+
const {
|
|
92
|
+
id,
|
|
93
|
+
type
|
|
94
|
+
} = data;
|
|
95
|
+
ensureRelationshipIsSetToParent(data, parentIdentifier, store, relationship, index);
|
|
96
|
+
return {
|
|
97
|
+
id,
|
|
98
|
+
type
|
|
99
|
+
};
|
|
100
|
+
}) : null;
|
|
101
|
+
const relatedDataHash = {};
|
|
102
|
+
if ('meta' in payload) {
|
|
103
|
+
relatedDataHash.meta = payload.meta;
|
|
104
|
+
}
|
|
105
|
+
if ('links' in payload) {
|
|
106
|
+
relatedDataHash.links = payload.links;
|
|
107
|
+
}
|
|
108
|
+
if ('data' in payload) {
|
|
109
|
+
relatedDataHash.data = relationshipData;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// now, push the left hand side (the parent record) to ensure things are in sync, since
|
|
113
|
+
// the payload will be pushed with store._push
|
|
114
|
+
const parentPayload = {
|
|
115
|
+
id: parentIdentifier.id,
|
|
116
|
+
type: parentIdentifier.type,
|
|
117
|
+
relationships: {
|
|
118
|
+
[relationship.name]: relatedDataHash
|
|
119
|
+
}
|
|
120
|
+
};
|
|
121
|
+
if (!Array.isArray(payload.included)) {
|
|
122
|
+
payload.included = [];
|
|
123
|
+
}
|
|
124
|
+
payload.included.push(parentPayload);
|
|
125
|
+
return payload;
|
|
126
|
+
}
|
|
127
|
+
function ensureRelationshipIsSetToParent(payload, parentIdentifier, store, parentRelationship, index) {
|
|
128
|
+
const {
|
|
129
|
+
id,
|
|
130
|
+
type
|
|
131
|
+
} = payload;
|
|
132
|
+
if (!payload.relationships) {
|
|
133
|
+
payload.relationships = {};
|
|
134
|
+
}
|
|
135
|
+
const {
|
|
136
|
+
relationships
|
|
137
|
+
} = payload;
|
|
138
|
+
const inverse = getInverse(store, parentIdentifier, parentRelationship, type);
|
|
139
|
+
if (inverse) {
|
|
140
|
+
const {
|
|
141
|
+
inverseKey,
|
|
142
|
+
kind
|
|
143
|
+
} = inverse;
|
|
144
|
+
const relationshipData = relationships[inverseKey]?.data;
|
|
145
|
+
{
|
|
146
|
+
if (typeof relationshipData !== 'undefined' && !relationshipDataPointsToParent(relationshipData, parentIdentifier)) {
|
|
147
|
+
const inspect = function inspect(thing) {
|
|
148
|
+
return `'${JSON.stringify(thing)}'`;
|
|
149
|
+
};
|
|
150
|
+
const quotedType = inspect(type);
|
|
151
|
+
const quotedInverse = inspect(inverseKey);
|
|
152
|
+
const expected = inspect({
|
|
153
|
+
id: parentIdentifier.id,
|
|
154
|
+
type: parentIdentifier.type
|
|
155
|
+
});
|
|
156
|
+
const expectedModel = `${parentIdentifier.type}:${parentIdentifier.id}`;
|
|
157
|
+
const got = inspect(relationshipData);
|
|
158
|
+
const prefix = typeof index === 'number' ? `data[${index}]` : `data`;
|
|
159
|
+
const path = `${prefix}.relationships.${inverseKey}.data`;
|
|
160
|
+
const data = Array.isArray(relationshipData) ? relationshipData[0] : relationshipData;
|
|
161
|
+
const other = data ? `<${data.type}:${data.id}>` : null;
|
|
162
|
+
const relationshipFetched = `${expectedModel}.${parentRelationship.kind}("${parentRelationship.name}")`;
|
|
163
|
+
const includedRecord = `<${type}:${id}>`;
|
|
164
|
+
const message = [`Encountered mismatched relationship: Ember Data expected ${path} in the payload from ${relationshipFetched} to include ${expected} but got ${got} instead.\n`, `The ${includedRecord} record loaded at ${prefix} in the payload specified ${other} as its ${quotedInverse}, but should have specified ${expectedModel} (the record the relationship is being loaded from) as its ${quotedInverse} instead.`, `This could mean that the response for ${relationshipFetched} may have accidentally returned ${quotedType} records that aren't related to ${expectedModel} and could be related to a different ${parentIdentifier.type} record instead.`, `Ember Data has corrected the ${includedRecord} record's ${quotedInverse} relationship to ${expectedModel} so that ${relationshipFetched} will include ${includedRecord}.`, `Please update the response from the server or change your serializer to either ensure that the response for only includes ${quotedType} records that specify ${expectedModel} as their ${quotedInverse}, or omit the ${quotedInverse} relationship from the response.`].join('\n');
|
|
165
|
+
(test => {
|
|
166
|
+
{
|
|
167
|
+
throw new Error(message);
|
|
168
|
+
}
|
|
169
|
+
})();
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
if (kind !== 'hasMany' || typeof relationshipData !== 'undefined') {
|
|
173
|
+
relationships[inverseKey] = relationships[inverseKey] || {};
|
|
174
|
+
relationships[inverseKey].data = fixRelationshipData(relationshipData ?? null, kind, parentIdentifier);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
function inverseForRelationship(store, identifier, key) {
|
|
179
|
+
const definition = store.schema.fields(identifier).get(key);
|
|
180
|
+
if (!definition) {
|
|
181
|
+
return null;
|
|
182
|
+
}
|
|
183
|
+
(test => {
|
|
184
|
+
if (!test) {
|
|
185
|
+
throw new Error(`Expected the field definition to be a relationship`);
|
|
186
|
+
}
|
|
187
|
+
})(definition.kind === 'hasMany' || definition.kind === 'belongsTo');
|
|
188
|
+
(test => {
|
|
189
|
+
if (!test) {
|
|
190
|
+
throw new Error(`Expected the relationship defintion to specify the inverse type or null.`);
|
|
191
|
+
}
|
|
192
|
+
})(definition.options?.inverse === null || typeof definition.options?.inverse === 'string' && definition.options.inverse.length > 0);
|
|
193
|
+
return definition.options.inverse;
|
|
194
|
+
}
|
|
195
|
+
function getInverse(store, parentIdentifier, parentRelationship, type) {
|
|
196
|
+
const {
|
|
197
|
+
name: lhs_relationshipName
|
|
198
|
+
} = parentRelationship;
|
|
199
|
+
const {
|
|
200
|
+
type: parentType
|
|
201
|
+
} = parentIdentifier;
|
|
202
|
+
const inverseKey = inverseForRelationship(store, {
|
|
203
|
+
type: parentType
|
|
204
|
+
}, lhs_relationshipName);
|
|
205
|
+
if (inverseKey) {
|
|
206
|
+
const definition = store.schema.fields({
|
|
207
|
+
type
|
|
208
|
+
}).get(inverseKey);
|
|
209
|
+
(test => {
|
|
210
|
+
if (!test) {
|
|
211
|
+
throw new Error(`Expected the field definition to be a relationship`);
|
|
212
|
+
}
|
|
213
|
+
})(definition && (definition.kind === 'hasMany' || definition.kind === 'belongsTo'));
|
|
214
|
+
return {
|
|
215
|
+
inverseKey,
|
|
216
|
+
kind: definition.kind
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
function relationshipDataPointsToParent(relationshipData, identifier) {
|
|
221
|
+
if (relationshipData === null) {
|
|
222
|
+
return false;
|
|
223
|
+
}
|
|
224
|
+
if (Array.isArray(relationshipData)) {
|
|
225
|
+
if (relationshipData.length === 0) {
|
|
226
|
+
return false;
|
|
227
|
+
}
|
|
228
|
+
for (let i = 0; i < relationshipData.length; i++) {
|
|
229
|
+
const entry = relationshipData[i];
|
|
230
|
+
if (validateRelationshipEntry(entry, identifier)) {
|
|
231
|
+
return true;
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
} else {
|
|
235
|
+
return validateRelationshipEntry(relationshipData, identifier);
|
|
236
|
+
}
|
|
237
|
+
return false;
|
|
238
|
+
}
|
|
239
|
+
function fixRelationshipData(relationshipData, relationshipKind, {
|
|
240
|
+
id,
|
|
241
|
+
type
|
|
242
|
+
}) {
|
|
243
|
+
const parentRelationshipData = {
|
|
244
|
+
id,
|
|
245
|
+
type
|
|
246
|
+
};
|
|
247
|
+
let payload = null;
|
|
248
|
+
if (relationshipKind === 'hasMany') {
|
|
249
|
+
const relData = relationshipData || [];
|
|
250
|
+
if (relationshipData) {
|
|
251
|
+
(test => {
|
|
252
|
+
if (!test) {
|
|
253
|
+
throw new Error('expected the relationship data to be an array');
|
|
254
|
+
}
|
|
255
|
+
})(Array.isArray(relationshipData));
|
|
256
|
+
// these arrays could be massive so this is better than filter
|
|
257
|
+
// Note: this is potentially problematic if type/id are not in the
|
|
258
|
+
// same state of normalization.
|
|
259
|
+
const found = relationshipData.find(v => {
|
|
260
|
+
return v.type === parentRelationshipData.type && v.id === parentRelationshipData.id;
|
|
261
|
+
});
|
|
262
|
+
if (!found) {
|
|
263
|
+
relData.push(parentRelationshipData);
|
|
264
|
+
}
|
|
265
|
+
} else {
|
|
266
|
+
relData.push(parentRelationshipData);
|
|
267
|
+
}
|
|
268
|
+
payload = relData;
|
|
269
|
+
} else {
|
|
270
|
+
const relData = relationshipData || {};
|
|
271
|
+
Object.assign(relData, parentRelationshipData);
|
|
272
|
+
payload = relData;
|
|
273
|
+
}
|
|
274
|
+
return payload;
|
|
275
|
+
}
|
|
276
|
+
function validateRelationshipEntry({
|
|
277
|
+
id
|
|
278
|
+
}, {
|
|
279
|
+
id: parentModelID
|
|
280
|
+
}) {
|
|
281
|
+
return !!id && id.toString() === parentModelID;
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
/*
|
|
285
|
+
These are the runtime implementations for the javascript macros that have
|
|
286
|
+
runtime implementations.
|
|
287
|
+
|
|
288
|
+
Not every macro has a runtime implementation, some only make sense in the
|
|
289
|
+
build and always run there.
|
|
290
|
+
|
|
291
|
+
Even when we have runtime implementations, we are still careful to emit static
|
|
292
|
+
errors during the build wherever possible, and runtime errors when necessary,
|
|
293
|
+
so that you're not surprised when you switch from runtime-mode to compile-time
|
|
294
|
+
mode.
|
|
295
|
+
*/
|
|
296
|
+
|
|
297
|
+
// This is here as a compile target for `getConfig` and `getOwnConfig` when
|
|
298
|
+
// we're in runtime mode. This is not public API to call from your own code.
|
|
299
|
+
function config(packageRoot) {
|
|
300
|
+
return runtimeConfig.packages[packageRoot];
|
|
301
|
+
}
|
|
302
|
+
function getGlobalConfig() {
|
|
303
|
+
return runtimeConfig.global;
|
|
304
|
+
}
|
|
305
|
+
const runtimeConfig = initializeRuntimeMacrosConfig();
|
|
306
|
+
|
|
307
|
+
// this exists to be targeted by our babel plugin
|
|
308
|
+
function initializeRuntimeMacrosConfig() {
|
|
309
|
+
return {
|
|
310
|
+
"packages": {},
|
|
311
|
+
"global": {
|
|
312
|
+
"@embroider/macros": {
|
|
313
|
+
"isTesting": false
|
|
314
|
+
},
|
|
315
|
+
"WarpDrive": {
|
|
316
|
+
"debug": {
|
|
317
|
+
"DEBUG_RELATIONSHIP_NOTIFICATIONS": false,
|
|
318
|
+
"LOG_CACHE": false,
|
|
319
|
+
"LOG_CACHE_POLICY": false,
|
|
320
|
+
"LOG_GRAPH": false,
|
|
321
|
+
"LOG_IDENTIFIERS": false,
|
|
322
|
+
"LOG_INSTANCE_CACHE": false,
|
|
323
|
+
"LOG_METRIC_COUNTS": false,
|
|
324
|
+
"LOG_MUTATIONS": false,
|
|
325
|
+
"LOG_NOTIFICATIONS": false,
|
|
326
|
+
"LOG_OPERATIONS": false,
|
|
327
|
+
"LOG_PAYLOADS": false,
|
|
328
|
+
"LOG_REACT_SIGNAL_INTEGRATION": false,
|
|
329
|
+
"LOG_REQUESTS": false,
|
|
330
|
+
"LOG_REQUEST_STATUS": false,
|
|
331
|
+
"__INTERNAL_LOG_NATIVE_MAP_SET_COUNTS": false
|
|
332
|
+
},
|
|
333
|
+
"polyfillUUID": false,
|
|
334
|
+
"includeDataAdapter": true,
|
|
335
|
+
"compatWith": null,
|
|
336
|
+
"deprecations": {
|
|
337
|
+
"DEPRECATE_CATCH_ALL": true,
|
|
338
|
+
"DEPRECATE_COMPUTED_CHAINS": true,
|
|
339
|
+
"DEPRECATE_EMBER_INFLECTOR": true,
|
|
340
|
+
"DEPRECATE_LEGACY_IMPORTS": true,
|
|
341
|
+
"DEPRECATE_MANY_ARRAY_DUPLICATES": true,
|
|
342
|
+
"DEPRECATE_NON_STRICT_ID": true,
|
|
343
|
+
"DEPRECATE_NON_STRICT_TYPES": true,
|
|
344
|
+
"DEPRECATE_NON_UNIQUE_PAYLOADS": true,
|
|
345
|
+
"DEPRECATE_RELATIONSHIP_REMOTE_UPDATE_CLEARING_LOCAL_STATE": true,
|
|
346
|
+
"DEPRECATE_STORE_EXTENDS_EMBER_OBJECT": true,
|
|
347
|
+
"DEPRECATE_TRACKING_PACKAGE": true,
|
|
348
|
+
"DISABLE_7X_DEPRECATIONS": true,
|
|
349
|
+
"ENABLE_LEGACY_REQUEST_METHODS": true,
|
|
350
|
+
"ENABLE_LEGACY_SCHEMA_SERVICE": true
|
|
351
|
+
},
|
|
352
|
+
"features": {
|
|
353
|
+
"ENFORCE_STRICT_RESOURCE_FINALIZATION": false,
|
|
354
|
+
"JSON_API_CACHE_VALIDATION_ERRORS": false,
|
|
355
|
+
"SAMPLE_FEATURE_FLAG": null
|
|
356
|
+
},
|
|
357
|
+
"activeLogging": {
|
|
358
|
+
"DEBUG_RELATIONSHIP_NOTIFICATIONS": true,
|
|
359
|
+
"LOG_CACHE": true,
|
|
360
|
+
"LOG_CACHE_POLICY": true,
|
|
361
|
+
"LOG_GRAPH": true,
|
|
362
|
+
"LOG_IDENTIFIERS": true,
|
|
363
|
+
"LOG_INSTANCE_CACHE": true,
|
|
364
|
+
"LOG_METRIC_COUNTS": true,
|
|
365
|
+
"LOG_MUTATIONS": true,
|
|
366
|
+
"LOG_NOTIFICATIONS": true,
|
|
367
|
+
"LOG_OPERATIONS": true,
|
|
368
|
+
"LOG_PAYLOADS": true,
|
|
369
|
+
"LOG_REACT_SIGNAL_INTEGRATION": true,
|
|
370
|
+
"LOG_REQUESTS": true,
|
|
371
|
+
"LOG_REQUEST_STATUS": true,
|
|
372
|
+
"__INTERNAL_LOG_NATIVE_MAP_SET_COUNTS": true
|
|
373
|
+
},
|
|
374
|
+
"env": {
|
|
375
|
+
"TESTING": true,
|
|
376
|
+
"PRODUCTION": false,
|
|
377
|
+
"DEBUG": true,
|
|
378
|
+
"IS_RECORDING": false,
|
|
379
|
+
"IS_CI": true,
|
|
380
|
+
"SHOULD_RECORD": false
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
};
|
|
385
|
+
}
|
|
386
|
+
function updaterMethods() {
|
|
387
|
+
return {
|
|
388
|
+
config,
|
|
389
|
+
getGlobalConfig,
|
|
390
|
+
setConfig(packageRoot, value) {
|
|
391
|
+
runtimeConfig.packages[packageRoot] = value;
|
|
392
|
+
},
|
|
393
|
+
setGlobalConfig(key, value) {
|
|
394
|
+
runtimeConfig.global[key] = value;
|
|
395
|
+
}
|
|
396
|
+
};
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
// this is how runtime config can get injected at boot. I'm not sure yet if this
|
|
400
|
+
// should be public API, but we certainly need it internally to set things like
|
|
401
|
+
// the global fastboot.isRunning.
|
|
402
|
+
//
|
|
403
|
+
// consumers of this API push a function onto
|
|
404
|
+
// window._embroider_macros_runtime_config. The function is given four methods
|
|
405
|
+
// which allow it to read and write the per-package and global configs. The
|
|
406
|
+
// reason for allowing both read & write is that merging strategies are up to
|
|
407
|
+
// each consumers -- read first, then merge, then write.
|
|
408
|
+
//
|
|
409
|
+
// For an example user of this API, see where we generate
|
|
410
|
+
// embroider_macros_fastboot_init.js' in @embroider/core.
|
|
411
|
+
let updaters = typeof window !== 'undefined' ? window._embroider_macros_runtime_config : undefined;
|
|
412
|
+
if (updaters) {
|
|
413
|
+
let methods = updaterMethods();
|
|
414
|
+
for (let updater of updaters) {
|
|
415
|
+
updater(methods);
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
const PotentialLegacyOperations = new Set(['findRecord', 'findAll', 'query', 'queryRecord', 'findBelongsTo', 'findHasMany', 'updateRecord', 'createRecord', 'deleteRecord']);
|
|
419
|
+
const LegacyNetworkHandler = {
|
|
420
|
+
request(context, next) {
|
|
421
|
+
// if we are not a legacy request, move on
|
|
422
|
+
if (context.request.url || !context.request.op || !PotentialLegacyOperations.has(context.request.op)) {
|
|
423
|
+
return next(context.request);
|
|
424
|
+
}
|
|
425
|
+
const {
|
|
426
|
+
store
|
|
427
|
+
} = context.request;
|
|
428
|
+
if (!store._fetchManager) {
|
|
429
|
+
store._fetchManager = new FetchManager(store);
|
|
430
|
+
}
|
|
431
|
+
switch (context.request.op) {
|
|
432
|
+
case 'findRecord':
|
|
433
|
+
return findRecord(context);
|
|
434
|
+
case 'findAll':
|
|
435
|
+
return findAll(context);
|
|
436
|
+
case 'query':
|
|
437
|
+
return query(context);
|
|
438
|
+
case 'queryRecord':
|
|
439
|
+
return queryRecord(context);
|
|
440
|
+
case 'findBelongsTo':
|
|
441
|
+
return findBelongsTo(context);
|
|
442
|
+
case 'findHasMany':
|
|
443
|
+
return findHasMany(context);
|
|
444
|
+
case 'updateRecord':
|
|
445
|
+
return saveRecord(context);
|
|
446
|
+
case 'createRecord':
|
|
447
|
+
return saveRecord(context);
|
|
448
|
+
case 'deleteRecord':
|
|
449
|
+
return saveRecord(context);
|
|
450
|
+
default:
|
|
451
|
+
return next(context.request);
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
};
|
|
455
|
+
function findBelongsTo(context) {
|
|
456
|
+
const {
|
|
457
|
+
store,
|
|
458
|
+
data,
|
|
459
|
+
records: identifiers
|
|
460
|
+
} = context.request;
|
|
461
|
+
const {
|
|
462
|
+
options,
|
|
463
|
+
record,
|
|
464
|
+
links,
|
|
465
|
+
useLink,
|
|
466
|
+
field
|
|
467
|
+
} = data;
|
|
468
|
+
const identifier = identifiers?.[0];
|
|
469
|
+
|
|
470
|
+
// short circuit if we are already loading
|
|
471
|
+
const pendingRequest = identifier && store._fetchManager.getPendingFetch(identifier, options);
|
|
472
|
+
if (pendingRequest) {
|
|
473
|
+
return pendingRequest;
|
|
474
|
+
}
|
|
475
|
+
if (useLink) {
|
|
476
|
+
(test => {
|
|
477
|
+
if (!test) {
|
|
478
|
+
throw new Error(`Expected a related link when calling store.findBelongsTo, found ${String(links)}`);
|
|
479
|
+
}
|
|
480
|
+
})(links && links.related);
|
|
481
|
+
return _findBelongsTo(store, record, links.related, field, options);
|
|
482
|
+
}
|
|
483
|
+
(test => {
|
|
484
|
+
if (!test) {
|
|
485
|
+
throw new Error(`Expected an identifier`);
|
|
486
|
+
}
|
|
487
|
+
})(Array.isArray(identifiers) && identifiers.length === 1);
|
|
488
|
+
const manager = store._fetchManager;
|
|
489
|
+
assertIdentifierHasId(identifier);
|
|
490
|
+
return options.reload ? manager.scheduleFetch(identifier, options, context.request) : manager.fetchDataIfNeededForIdentifier(identifier, options, context.request);
|
|
491
|
+
}
|
|
492
|
+
function findHasMany(context) {
|
|
493
|
+
const {
|
|
494
|
+
store,
|
|
495
|
+
data,
|
|
496
|
+
records: identifiers
|
|
497
|
+
} = context.request;
|
|
498
|
+
const {
|
|
499
|
+
options,
|
|
500
|
+
record,
|
|
501
|
+
links,
|
|
502
|
+
useLink,
|
|
503
|
+
field
|
|
504
|
+
} = data;
|
|
505
|
+
|
|
506
|
+
// link case
|
|
507
|
+
if (useLink) {
|
|
508
|
+
const adapter = store.adapterFor(record.type);
|
|
509
|
+
/*
|
|
510
|
+
If a relationship was originally populated by the adapter as a link
|
|
511
|
+
(as opposed to a list of IDs), this method is called when the
|
|
512
|
+
relationship is fetched.
|
|
513
|
+
The link (which is usually a URL) is passed through unchanged, so the
|
|
514
|
+
adapter can make whatever request it wants.
|
|
515
|
+
The usual use-case is for the server to register a URL as a link, and
|
|
516
|
+
then use that URL in the future to make a request for the relationship.
|
|
517
|
+
*/
|
|
518
|
+
(test => {
|
|
519
|
+
if (!test) {
|
|
520
|
+
throw new Error(`You tried to load a hasMany relationship but you have no adapter (for ${record.type})`);
|
|
521
|
+
}
|
|
522
|
+
})(adapter);
|
|
523
|
+
(test => {
|
|
524
|
+
if (!test) {
|
|
525
|
+
throw new Error(`You tried to load a hasMany relationship from a specified 'link' in the original payload but your adapter does not implement 'findHasMany'`);
|
|
526
|
+
}
|
|
527
|
+
})(typeof adapter.findHasMany === 'function');
|
|
528
|
+
(test => {
|
|
529
|
+
if (!test) {
|
|
530
|
+
throw new Error(`Expected a related link when calling store.findHasMany, found ${String(links)}`);
|
|
531
|
+
}
|
|
532
|
+
})(links && links.related);
|
|
533
|
+
return _findHasMany(adapter, store, record, links.related, field, options);
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
// identifiers case
|
|
537
|
+
(test => {
|
|
538
|
+
if (!test) {
|
|
539
|
+
throw new Error(`Expected an array of identifiers to fetch`);
|
|
540
|
+
}
|
|
541
|
+
})(Array.isArray(identifiers));
|
|
542
|
+
const fetches = new Array(identifiers.length);
|
|
543
|
+
const manager = store._fetchManager;
|
|
544
|
+
for (let i = 0; i < identifiers.length; i++) {
|
|
545
|
+
const identifier = identifiers[i];
|
|
546
|
+
// TODO we probably can be lenient here and return from cache for the isNew case
|
|
547
|
+
assertIdentifierHasId(identifier);
|
|
548
|
+
fetches[i] = options.reload ? manager.scheduleFetch(identifier, options, context.request) : manager.fetchDataIfNeededForIdentifier(identifier, options, context.request);
|
|
549
|
+
}
|
|
550
|
+
return Promise.all(fetches);
|
|
551
|
+
}
|
|
552
|
+
function saveRecord(context) {
|
|
553
|
+
const {
|
|
554
|
+
store,
|
|
555
|
+
data,
|
|
556
|
+
op: operation
|
|
557
|
+
} = context.request;
|
|
558
|
+
const {
|
|
559
|
+
options,
|
|
560
|
+
record: identifier
|
|
561
|
+
} = data;
|
|
562
|
+
assertPrivateStore(store);
|
|
563
|
+
store.cache.willCommit(identifier, context);
|
|
564
|
+
const saveOptions = Object.assign({
|
|
565
|
+
[SaveOp]: operation
|
|
566
|
+
}, options);
|
|
567
|
+
const fetchManagerPromise = store._fetchManager.scheduleSave(identifier, saveOptions);
|
|
568
|
+
return fetchManagerPromise.then(payload => {
|
|
569
|
+
let result;
|
|
570
|
+
store._join(() => {
|
|
571
|
+
// @ts-expect-error we don't have access to a response in legacy
|
|
572
|
+
result = store.cache.didCommit(identifier, {
|
|
573
|
+
request: context.request,
|
|
574
|
+
content: payload
|
|
575
|
+
});
|
|
576
|
+
});
|
|
577
|
+
|
|
578
|
+
// blatantly lie if we were a createRecord request
|
|
579
|
+
// to give some semblance of cache-control to the
|
|
580
|
+
// CachePolicy while legacy is still around
|
|
581
|
+
if (store.lifetimes?.didRequest && operation === 'createRecord') {
|
|
582
|
+
store.lifetimes.didRequest(context.request, {
|
|
583
|
+
status: 201
|
|
584
|
+
}, null, store);
|
|
585
|
+
}
|
|
586
|
+
return store.peekRecord(result.data);
|
|
587
|
+
}).catch(e => {
|
|
588
|
+
let err = e;
|
|
589
|
+
if (!e) {
|
|
590
|
+
err = new Error(`Unknown Error Occurred During Request`);
|
|
591
|
+
} else if (typeof e === 'string') {
|
|
592
|
+
err = new Error(e);
|
|
593
|
+
}
|
|
594
|
+
adapterDidInvalidate(store, identifier, err);
|
|
595
|
+
throw err;
|
|
596
|
+
});
|
|
597
|
+
}
|
|
598
|
+
function adapterDidInvalidate(store, identifier, error) {
|
|
599
|
+
if (error && error.isAdapterError === true && error.code === 'InvalidError') {
|
|
600
|
+
const serializer = store.serializerFor(identifier.type);
|
|
601
|
+
|
|
602
|
+
// TODO @deprecate extractErrors being called
|
|
603
|
+
// TODO remove extractErrors from the default serializers.
|
|
604
|
+
if (serializer && typeof serializer.extractErrors === 'function') {
|
|
605
|
+
const errorsHash = serializer.extractErrors(store, store.modelFor(identifier.type), error, identifier.id);
|
|
606
|
+
error.errors = errorsHashToArray(errorsHash);
|
|
607
|
+
}
|
|
608
|
+
}
|
|
609
|
+
const cache = store.cache;
|
|
610
|
+
if (error.errors) {
|
|
611
|
+
(test => {
|
|
612
|
+
if (!test) {
|
|
613
|
+
throw new Error(`Expected the cache in use by resource ${String(identifier)} to have a getErrors(identifier) method for retrieving errors.`);
|
|
614
|
+
}
|
|
615
|
+
})(typeof cache.getErrors === 'function');
|
|
616
|
+
let jsonApiErrors = error.errors;
|
|
617
|
+
if (jsonApiErrors.length === 0) {
|
|
618
|
+
jsonApiErrors = [{
|
|
619
|
+
title: 'Invalid Error',
|
|
620
|
+
detail: '',
|
|
621
|
+
source: {
|
|
622
|
+
pointer: '/data'
|
|
623
|
+
}
|
|
624
|
+
}];
|
|
625
|
+
}
|
|
626
|
+
cache.commitWasRejected(identifier, jsonApiErrors);
|
|
627
|
+
} else {
|
|
628
|
+
cache.commitWasRejected(identifier);
|
|
629
|
+
}
|
|
630
|
+
}
|
|
631
|
+
function makeArray(value) {
|
|
632
|
+
return Array.isArray(value) ? value : [value];
|
|
633
|
+
}
|
|
634
|
+
const PRIMARY_ATTRIBUTE_KEY = 'base';
|
|
635
|
+
function errorsHashToArray(errors) {
|
|
636
|
+
const out = [];
|
|
637
|
+
if (errors) {
|
|
638
|
+
Object.keys(errors).forEach(key => {
|
|
639
|
+
const messages = makeArray(errors[key]);
|
|
640
|
+
for (let i = 0; i < messages.length; i++) {
|
|
641
|
+
let title = 'Invalid Attribute';
|
|
642
|
+
let pointer = `/data/attributes/${key}`;
|
|
643
|
+
if (key === PRIMARY_ATTRIBUTE_KEY) {
|
|
644
|
+
title = 'Invalid Document';
|
|
645
|
+
pointer = `/data`;
|
|
646
|
+
}
|
|
647
|
+
out.push({
|
|
648
|
+
title: title,
|
|
649
|
+
detail: messages[i],
|
|
650
|
+
source: {
|
|
651
|
+
pointer: pointer
|
|
652
|
+
}
|
|
653
|
+
});
|
|
654
|
+
}
|
|
655
|
+
});
|
|
656
|
+
}
|
|
657
|
+
return out;
|
|
658
|
+
}
|
|
659
|
+
function findRecord(context) {
|
|
660
|
+
const {
|
|
661
|
+
store,
|
|
662
|
+
data
|
|
663
|
+
} = context.request;
|
|
664
|
+
const {
|
|
665
|
+
record: identifier,
|
|
666
|
+
options
|
|
667
|
+
} = data;
|
|
668
|
+
assertPrivateStore(store);
|
|
669
|
+
let promise;
|
|
670
|
+
|
|
671
|
+
// if not loaded start loading
|
|
672
|
+
if (!store._instanceCache.recordIsLoaded(identifier)) {
|
|
673
|
+
promise = store._fetchManager.fetchDataIfNeededForIdentifier(identifier, options, context.request);
|
|
674
|
+
|
|
675
|
+
// Refetch if the reload option is passed
|
|
676
|
+
} else if (options.reload) {
|
|
677
|
+
assertIdentifierHasId(identifier);
|
|
678
|
+
promise = store._fetchManager.scheduleFetch(identifier, options, context.request);
|
|
679
|
+
} else {
|
|
680
|
+
let snapshot = null;
|
|
681
|
+
const adapter = store.adapterFor(identifier.type);
|
|
682
|
+
|
|
683
|
+
// Refetch the record if the adapter thinks the record is stale
|
|
684
|
+
if (typeof options.reload === 'undefined' && adapter.shouldReloadRecord && adapter.shouldReloadRecord(store, snapshot = store._fetchManager.createSnapshot(identifier, options))) {
|
|
685
|
+
assertIdentifierHasId(identifier);
|
|
686
|
+
{
|
|
687
|
+
promise = store._fetchManager.scheduleFetch(identifier, Object.assign({}, options, {
|
|
688
|
+
reload: true
|
|
689
|
+
}), context.request);
|
|
690
|
+
}
|
|
691
|
+
} else {
|
|
692
|
+
// Trigger the background refetch if backgroundReload option is passed
|
|
693
|
+
if (options.backgroundReload !== false && (options.backgroundReload || !adapter.shouldBackgroundReloadRecord || adapter.shouldBackgroundReloadRecord(store, snapshot = snapshot || store._fetchManager.createSnapshot(identifier, options)))) {
|
|
694
|
+
assertIdentifierHasId(identifier);
|
|
695
|
+
{
|
|
696
|
+
void store._fetchManager.scheduleFetch(identifier, Object.assign({}, options, {
|
|
697
|
+
backgroundReload: true
|
|
698
|
+
}), context.request);
|
|
699
|
+
}
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
// Return the cached record
|
|
703
|
+
promise = Promise.resolve(identifier);
|
|
704
|
+
}
|
|
705
|
+
}
|
|
706
|
+
return promise.then(i => store.peekRecord(i));
|
|
707
|
+
}
|
|
708
|
+
function findAll(context) {
|
|
709
|
+
const {
|
|
710
|
+
store,
|
|
711
|
+
data
|
|
712
|
+
} = context.request;
|
|
713
|
+
const {
|
|
714
|
+
type,
|
|
715
|
+
options
|
|
716
|
+
} = data;
|
|
717
|
+
assertPrivateStore(store);
|
|
718
|
+
const adapter = store.adapterFor(type);
|
|
719
|
+
(test => {
|
|
720
|
+
if (!test) {
|
|
721
|
+
throw new Error(`You tried to load all records but you have no adapter (for ${type})`);
|
|
722
|
+
}
|
|
723
|
+
})(adapter);
|
|
724
|
+
(test => {
|
|
725
|
+
if (!test) {
|
|
726
|
+
throw new Error(`You tried to load all records but your adapter does not implement 'findAll'`);
|
|
727
|
+
}
|
|
728
|
+
})(typeof adapter.findAll === 'function');
|
|
729
|
+
|
|
730
|
+
// avoid initializing the liveArray just to set `isUpdating`
|
|
731
|
+
const maybeRecordArray = store.recordArrayManager._live.get(type);
|
|
732
|
+
const snapshotArray = new SnapshotRecordArray(store, type, options);
|
|
733
|
+
const shouldReload = options.reload || options.reload !== false && (adapter.shouldReloadAll && adapter.shouldReloadAll(store, snapshotArray) || !adapter.shouldReloadAll && snapshotArray.length === 0);
|
|
734
|
+
let fetch;
|
|
735
|
+
if (shouldReload) {
|
|
736
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-expressions
|
|
737
|
+
maybeRecordArray && (maybeRecordArray.isUpdating = true);
|
|
738
|
+
fetch = _findAll(adapter, store, type, snapshotArray, context.request, true);
|
|
739
|
+
} else {
|
|
740
|
+
fetch = Promise.resolve(store.peekAll(type));
|
|
741
|
+
if (options.backgroundReload || options.backgroundReload !== false && (!adapter.shouldBackgroundReloadAll || adapter.shouldBackgroundReloadAll(store, snapshotArray))) {
|
|
742
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-expressions
|
|
743
|
+
maybeRecordArray && (maybeRecordArray.isUpdating = true);
|
|
744
|
+
void _findAll(adapter, store, type, snapshotArray, context.request, false);
|
|
745
|
+
}
|
|
746
|
+
}
|
|
747
|
+
return fetch;
|
|
748
|
+
}
|
|
749
|
+
function _findAll(adapter, store, type, snapshotArray, request, isAsyncFlush) {
|
|
750
|
+
const schema = store.modelFor(type);
|
|
751
|
+
let promise = Promise.resolve().then(() => adapter.findAll(store, schema, null, snapshotArray));
|
|
752
|
+
promise = promise.then(adapterPayload => {
|
|
753
|
+
(test => {
|
|
754
|
+
if (!test) {
|
|
755
|
+
throw new Error(`You made a 'findAll' request for '${type}' records, but the adapter's response did not have any data`);
|
|
756
|
+
}
|
|
757
|
+
})(payloadIsNotBlank(adapterPayload));
|
|
758
|
+
const serializer = store.serializerFor(type);
|
|
759
|
+
const payload = normalizeResponseHelper(serializer, store, schema, adapterPayload, null, 'findAll');
|
|
760
|
+
store._push(payload, isAsyncFlush);
|
|
761
|
+
snapshotArray._recordArray.isUpdating = false;
|
|
762
|
+
{
|
|
763
|
+
if (getGlobalConfig().WarpDrive.debug.LOG_REQUESTS || globalThis.getWarpDriveRuntimeConfig().debug.LOG_REQUESTS) {
|
|
764
|
+
// eslint-disable-next-line no-console
|
|
765
|
+
console.log(`request: findAll<${type}> background reload complete`);
|
|
766
|
+
}
|
|
767
|
+
}
|
|
768
|
+
return snapshotArray._recordArray;
|
|
769
|
+
});
|
|
770
|
+
{
|
|
771
|
+
if (!request.disableTestWaiter) {
|
|
772
|
+
promise = waitFor(promise);
|
|
773
|
+
}
|
|
774
|
+
}
|
|
775
|
+
return promise;
|
|
776
|
+
}
|
|
777
|
+
function query(context) {
|
|
778
|
+
const {
|
|
779
|
+
store,
|
|
780
|
+
data
|
|
781
|
+
} = context.request;
|
|
782
|
+
assertPrivateStore(store);
|
|
783
|
+
let {
|
|
784
|
+
options
|
|
785
|
+
} = data;
|
|
786
|
+
// eslint-disable-next-line @typescript-eslint/no-shadow
|
|
787
|
+
const {
|
|
788
|
+
type,
|
|
789
|
+
query
|
|
790
|
+
} = data;
|
|
791
|
+
const adapter = store.adapterFor(type);
|
|
792
|
+
(test => {
|
|
793
|
+
if (!test) {
|
|
794
|
+
throw new Error(`You tried to make a query but you have no adapter (for ${type})`);
|
|
795
|
+
}
|
|
796
|
+
})(adapter);
|
|
797
|
+
(test => {
|
|
798
|
+
if (!test) {
|
|
799
|
+
throw new Error(`You tried to make a query but your adapter does not implement 'query'`);
|
|
800
|
+
}
|
|
801
|
+
})(typeof adapter.query === 'function');
|
|
802
|
+
const recordArray = options._recordArray || store.recordArrayManager.getCollection({
|
|
803
|
+
type,
|
|
804
|
+
query
|
|
805
|
+
});
|
|
806
|
+
{
|
|
807
|
+
options = Object.assign({}, options);
|
|
808
|
+
delete options._recordArray;
|
|
809
|
+
}
|
|
810
|
+
const schema = store.modelFor(type);
|
|
811
|
+
const promise = Promise.resolve().then(() => adapter.query(store, schema, query, recordArray, options));
|
|
812
|
+
return promise.then(adapterPayload => {
|
|
813
|
+
const serializer = store.serializerFor(type);
|
|
814
|
+
const payload = normalizeResponseHelper(serializer, store, schema, adapterPayload, null, 'query');
|
|
815
|
+
const identifiers = store._push(payload, true);
|
|
816
|
+
(test => {
|
|
817
|
+
if (!test) {
|
|
818
|
+
throw new Error('The response to store.query is expected to be an array but it was a single record. Please wrap your response in an array or use `store.queryRecord` to query for a single record.');
|
|
819
|
+
}
|
|
820
|
+
})(Array.isArray(identifiers));
|
|
821
|
+
store.recordArrayManager.populateManagedArray(recordArray, identifiers, payload);
|
|
822
|
+
return recordArray;
|
|
823
|
+
});
|
|
824
|
+
}
|
|
825
|
+
function assertSingleResourceDocument(payload) {
|
|
826
|
+
(test => {
|
|
827
|
+
if (!test) {
|
|
828
|
+
throw new Error(`Expected the primary data returned by the serializer for a 'queryRecord' response to be a single object or null but instead it was an array.`);
|
|
829
|
+
}
|
|
830
|
+
})(!Array.isArray(payload.data));
|
|
831
|
+
}
|
|
832
|
+
function queryRecord(context) {
|
|
833
|
+
const {
|
|
834
|
+
store,
|
|
835
|
+
data
|
|
836
|
+
} = context.request;
|
|
837
|
+
// eslint-disable-next-line @typescript-eslint/no-shadow
|
|
838
|
+
const {
|
|
839
|
+
type,
|
|
840
|
+
query,
|
|
841
|
+
options
|
|
842
|
+
} = data;
|
|
843
|
+
const adapter = store.adapterFor(type);
|
|
844
|
+
(test => {
|
|
845
|
+
if (!test) {
|
|
846
|
+
throw new Error(`You tried to make a query but you have no adapter (for ${type})`);
|
|
847
|
+
}
|
|
848
|
+
})(adapter);
|
|
849
|
+
(test => {
|
|
850
|
+
if (!test) {
|
|
851
|
+
throw new Error(`You tried to make a query but your adapter does not implement 'queryRecord'`);
|
|
852
|
+
}
|
|
853
|
+
})(typeof adapter.queryRecord === 'function');
|
|
854
|
+
const schema = store.modelFor(type);
|
|
855
|
+
const promise = Promise.resolve().then(() => adapter.queryRecord(store, schema, query, options));
|
|
856
|
+
return promise.then(adapterPayload => {
|
|
857
|
+
const serializer = store.serializerFor(type);
|
|
858
|
+
const payload = normalizeResponseHelper(serializer, store, schema, adapterPayload, null, 'queryRecord');
|
|
859
|
+
assertSingleResourceDocument(payload);
|
|
860
|
+
const identifier = store._push(payload, true);
|
|
861
|
+
return identifier ? store.peekRecord(identifier) : null;
|
|
862
|
+
});
|
|
863
|
+
}
|
|
864
|
+
|
|
865
|
+
/**
|
|
866
|
+
* Extends the signature of {@link Store} with additional
|
|
867
|
+
* methods available when using the legacy network layer.
|
|
868
|
+
*
|
|
869
|
+
* @public
|
|
870
|
+
* @noInheritDoc
|
|
871
|
+
* @legacy
|
|
872
|
+
*/
|
|
873
|
+
|
|
874
|
+
/**
|
|
875
|
+
* @deprecated - use {@link LegacyStoreCompat} instead
|
|
876
|
+
*/
|
|
877
|
+
|
|
878
|
+
/**
|
|
879
|
+
Returns an instance of the adapter for a given type. For
|
|
880
|
+
example, `adapterFor('person')` will return an instance of
|
|
881
|
+
the adapter located at `app/adapters/person.js`
|
|
882
|
+
|
|
883
|
+
If no `person` adapter is found, this method will look
|
|
884
|
+
for an `application` adapter (the default adapter for
|
|
885
|
+
your entire application).
|
|
886
|
+
|
|
887
|
+
@public
|
|
888
|
+
@param modelName
|
|
889
|
+
*/
|
|
890
|
+
|
|
891
|
+
function adapterFor(modelName, _allowMissing) {
|
|
892
|
+
(test => {
|
|
893
|
+
if (!test) {
|
|
894
|
+
throw new Error(`Attempted to call store.adapterFor(), but the store instance has already been destroyed.`);
|
|
895
|
+
}
|
|
896
|
+
})(!(this.isDestroying || this.isDestroyed));
|
|
897
|
+
(test => {
|
|
898
|
+
if (!test) {
|
|
899
|
+
throw new Error(`You need to pass a model name to the store's adapterFor method`);
|
|
900
|
+
}
|
|
901
|
+
})(modelName);
|
|
902
|
+
(test => {
|
|
903
|
+
if (!test) {
|
|
904
|
+
throw new Error(`Passing classes to store.adapterFor has been removed. Please pass a dasherized string instead of ${modelName}`);
|
|
905
|
+
}
|
|
906
|
+
})(typeof modelName === 'string');
|
|
907
|
+
this._adapterCache = this._adapterCache || Object.create(null);
|
|
908
|
+
const normalizedModelName = _deprecatingNormalize(modelName);
|
|
909
|
+
const {
|
|
910
|
+
_adapterCache
|
|
911
|
+
} = this;
|
|
912
|
+
let adapter = _adapterCache[normalizedModelName];
|
|
913
|
+
if (adapter) {
|
|
914
|
+
return adapter;
|
|
915
|
+
}
|
|
916
|
+
const owner = getOwner(this);
|
|
917
|
+
|
|
918
|
+
// name specific adapter
|
|
919
|
+
adapter = owner.lookup(`adapter:${normalizedModelName}`);
|
|
920
|
+
if (adapter !== undefined) {
|
|
921
|
+
_adapterCache[normalizedModelName] = adapter;
|
|
922
|
+
return adapter;
|
|
923
|
+
}
|
|
924
|
+
|
|
925
|
+
// no adapter found for the specific name, fallback and check for application adapter
|
|
926
|
+
adapter = _adapterCache.application || owner.lookup('adapter:application');
|
|
927
|
+
if (adapter !== undefined) {
|
|
928
|
+
_adapterCache[normalizedModelName] = adapter;
|
|
929
|
+
_adapterCache.application = adapter;
|
|
930
|
+
return adapter;
|
|
931
|
+
}
|
|
932
|
+
(test => {
|
|
933
|
+
if (!test) {
|
|
934
|
+
throw new Error(`No adapter was found for '${modelName}' and no 'application' adapter was found as a fallback.`);
|
|
935
|
+
}
|
|
936
|
+
})(_allowMissing);
|
|
937
|
+
}
|
|
938
|
+
|
|
939
|
+
/**
|
|
940
|
+
Returns an instance of the serializer for a given type. For
|
|
941
|
+
example, `serializerFor('person')` will return an instance of
|
|
942
|
+
`App.PersonSerializer`.
|
|
943
|
+
|
|
944
|
+
If no `App.PersonSerializer` is found, this method will look
|
|
945
|
+
for an `App.ApplicationSerializer` (the default serializer for
|
|
946
|
+
your entire application).
|
|
947
|
+
|
|
948
|
+
If a serializer cannot be found on the adapter, it will fall back
|
|
949
|
+
to an instance of `JSONSerializer`.
|
|
950
|
+
|
|
951
|
+
@public
|
|
952
|
+
@param modelName the record to serialize
|
|
953
|
+
*/
|
|
954
|
+
function serializerFor(modelName) {
|
|
955
|
+
(test => {
|
|
956
|
+
if (!test) {
|
|
957
|
+
throw new Error(`Attempted to call store.serializerFor(), but the store instance has already been destroyed.`);
|
|
958
|
+
}
|
|
959
|
+
})(!(this.isDestroying || this.isDestroyed));
|
|
960
|
+
(test => {
|
|
961
|
+
if (!test) {
|
|
962
|
+
throw new Error(`You need to pass a model name to the store's serializerFor method`);
|
|
963
|
+
}
|
|
964
|
+
})(modelName);
|
|
965
|
+
(test => {
|
|
966
|
+
if (!test) {
|
|
967
|
+
throw new Error(`Passing classes to store.serializerFor has been removed. Please pass a dasherized string instead of ${modelName}`);
|
|
968
|
+
}
|
|
969
|
+
})(typeof modelName === 'string');
|
|
970
|
+
this._serializerCache = this._serializerCache || Object.create(null);
|
|
971
|
+
const normalizedModelName = _deprecatingNormalize(modelName);
|
|
972
|
+
const {
|
|
973
|
+
_serializerCache
|
|
974
|
+
} = this;
|
|
975
|
+
let serializer = _serializerCache[normalizedModelName];
|
|
976
|
+
if (serializer) {
|
|
977
|
+
return serializer;
|
|
978
|
+
}
|
|
979
|
+
|
|
980
|
+
// by name
|
|
981
|
+
const owner = getOwner(this);
|
|
982
|
+
serializer = owner.lookup(`serializer:${normalizedModelName}`);
|
|
983
|
+
if (serializer !== undefined) {
|
|
984
|
+
_serializerCache[normalizedModelName] = serializer;
|
|
985
|
+
return serializer;
|
|
986
|
+
}
|
|
987
|
+
|
|
988
|
+
// no serializer found for the specific model, fallback and check for application serializer
|
|
989
|
+
serializer = _serializerCache.application || owner.lookup('serializer:application');
|
|
990
|
+
if (serializer !== undefined) {
|
|
991
|
+
_serializerCache[normalizedModelName] = serializer;
|
|
992
|
+
_serializerCache.application = serializer;
|
|
993
|
+
return serializer;
|
|
994
|
+
}
|
|
995
|
+
return null;
|
|
996
|
+
}
|
|
997
|
+
|
|
998
|
+
/**
|
|
999
|
+
`normalize` converts a json payload into the normalized form expected by
|
|
1000
|
+
{@link Store.push | push} using the serializer specified by `modelName`
|
|
1001
|
+
|
|
1002
|
+
:::warning
|
|
1003
|
+
Generally it would be better to invoke the serializer yourself directly,
|
|
1004
|
+
or write a more specialized normalization utility.
|
|
1005
|
+
:::
|
|
1006
|
+
|
|
1007
|
+
Example
|
|
1008
|
+
|
|
1009
|
+
```js
|
|
1010
|
+
socket.on('message', function(message) {
|
|
1011
|
+
let modelName = message.model;
|
|
1012
|
+
let data = message.data;
|
|
1013
|
+
store.push(store.normalize(modelName, data));
|
|
1014
|
+
});
|
|
1015
|
+
```
|
|
1016
|
+
|
|
1017
|
+
@legacy
|
|
1018
|
+
@public
|
|
1019
|
+
@param modelName The name of the model type for this payload
|
|
1020
|
+
@return The normalized payload
|
|
1021
|
+
*/
|
|
1022
|
+
// TODO @runspired @deprecate users should call normalize on the associated serializer directly
|
|
1023
|
+
function normalize(modelName, payload) {
|
|
1024
|
+
(test => {
|
|
1025
|
+
if (!test) {
|
|
1026
|
+
throw new Error(`Attempted to call store.normalize(), but the store instance has already been destroyed.`);
|
|
1027
|
+
}
|
|
1028
|
+
})(!(this.isDestroying || this.isDestroyed));
|
|
1029
|
+
(test => {
|
|
1030
|
+
if (!test) {
|
|
1031
|
+
throw new Error(`You need to pass a model name to the store's normalize method`);
|
|
1032
|
+
}
|
|
1033
|
+
})(modelName);
|
|
1034
|
+
(test => {
|
|
1035
|
+
if (!test) {
|
|
1036
|
+
throw new Error(`Passing classes to store methods has been removed. Please pass a dasherized string instead of ${typeof modelName}`);
|
|
1037
|
+
}
|
|
1038
|
+
})(typeof modelName === 'string');
|
|
1039
|
+
const normalizedModelName = _deprecatingNormalize(modelName);
|
|
1040
|
+
const serializer = this.serializerFor(normalizedModelName);
|
|
1041
|
+
const schema = this.modelFor(normalizedModelName);
|
|
1042
|
+
(test => {
|
|
1043
|
+
if (!test) {
|
|
1044
|
+
throw new Error(`You must define a normalize method in your serializer in order to call store.normalize`);
|
|
1045
|
+
}
|
|
1046
|
+
})(typeof serializer?.normalize === 'function');
|
|
1047
|
+
return serializer.normalize(schema, payload);
|
|
1048
|
+
}
|
|
1049
|
+
|
|
1050
|
+
/**
|
|
1051
|
+
Push some raw data into the store.
|
|
1052
|
+
|
|
1053
|
+
This method can be used both to push in brand new
|
|
1054
|
+
records, as well as to update existing records. You
|
|
1055
|
+
can push in more than one type of object at once.
|
|
1056
|
+
All objects should be in the format expected by the
|
|
1057
|
+
serializer.
|
|
1058
|
+
|
|
1059
|
+
```js [app/serializers/application.js]
|
|
1060
|
+
import RESTSerializer from '@warp-drive/legacy/serializer/rest';
|
|
1061
|
+
|
|
1062
|
+
export default class ApplicationSerializer extends RESTSerializer;
|
|
1063
|
+
```
|
|
1064
|
+
|
|
1065
|
+
```js
|
|
1066
|
+
let pushData = {
|
|
1067
|
+
posts: [
|
|
1068
|
+
{ id: 1, postTitle: "Great post", commentIds: [2] }
|
|
1069
|
+
],
|
|
1070
|
+
comments: [
|
|
1071
|
+
{ id: 2, commentBody: "Insightful comment" }
|
|
1072
|
+
]
|
|
1073
|
+
}
|
|
1074
|
+
|
|
1075
|
+
store.pushPayload(pushData);
|
|
1076
|
+
```
|
|
1077
|
+
|
|
1078
|
+
By default, the data will be deserialized using a default
|
|
1079
|
+
serializer (the application serializer if it exists).
|
|
1080
|
+
|
|
1081
|
+
Alternatively, `pushPayload` will accept a model type which
|
|
1082
|
+
will determine which serializer will process the payload.
|
|
1083
|
+
|
|
1084
|
+
```js [app/serializers/application.js]
|
|
1085
|
+
import RESTSerializer from '@warp-drive/legacy/serializer/rest';
|
|
1086
|
+
|
|
1087
|
+
export default class ApplicationSerializer extends RESTSerializer;
|
|
1088
|
+
```
|
|
1089
|
+
|
|
1090
|
+
```js [app/serializers/post.js]
|
|
1091
|
+
import JSONSerializer from '@warp-drive/legacy/serializer/json';
|
|
1092
|
+
|
|
1093
|
+
export default JSONSerializer;
|
|
1094
|
+
```
|
|
1095
|
+
|
|
1096
|
+
```js
|
|
1097
|
+
store.pushPayload(pushData); // Will use the application serializer
|
|
1098
|
+
store.pushPayload('post', pushData); // Will use the post serializer
|
|
1099
|
+
```
|
|
1100
|
+
|
|
1101
|
+
@public
|
|
1102
|
+
@param modelName Optionally, a model type used to determine which serializer will be used
|
|
1103
|
+
@param inputPayload
|
|
1104
|
+
*/
|
|
1105
|
+
// TODO @runspired @deprecate pushPayload in favor of looking up the serializer
|
|
1106
|
+
function pushPayload(modelName, inputPayload) {
|
|
1107
|
+
(test => {
|
|
1108
|
+
if (!test) {
|
|
1109
|
+
throw new Error(`Attempted to call store.pushPayload(), but the store instance has already been destroyed.`);
|
|
1110
|
+
}
|
|
1111
|
+
})(!(this.isDestroying || this.isDestroyed));
|
|
1112
|
+
const payload = inputPayload || modelName;
|
|
1113
|
+
const normalizedModelName = inputPayload ? _deprecatingNormalize(modelName) : 'application';
|
|
1114
|
+
const serializer = this.serializerFor(normalizedModelName);
|
|
1115
|
+
(test => {
|
|
1116
|
+
if (!test) {
|
|
1117
|
+
throw new Error(`You cannot use 'store.pushPayload(<type>, <payload>)' unless the serializer for '${normalizedModelName}' defines 'pushPayload'`);
|
|
1118
|
+
}
|
|
1119
|
+
})(serializer && typeof serializer.pushPayload === 'function');
|
|
1120
|
+
serializer.pushPayload(this, payload);
|
|
1121
|
+
}
|
|
1122
|
+
|
|
1123
|
+
// TODO @runspired @deprecate records should implement their own serialization if desired
|
|
1124
|
+
function serializeRecord(record, options) {
|
|
1125
|
+
// TODO we used to check if the record was destroyed here
|
|
1126
|
+
if (!this._fetchManager) {
|
|
1127
|
+
this._fetchManager = new FetchManager(this);
|
|
1128
|
+
}
|
|
1129
|
+
return this._fetchManager.createSnapshot(recordIdentifierFor(record)).serialize(options);
|
|
1130
|
+
}
|
|
1131
|
+
function cleanup() {
|
|
1132
|
+
// enqueue destruction of any adapters/serializers we have created
|
|
1133
|
+
for (const adapterName in this._adapterCache) {
|
|
1134
|
+
const adapter = this._adapterCache[adapterName];
|
|
1135
|
+
if (typeof adapter.destroy === 'function') {
|
|
1136
|
+
adapter.destroy();
|
|
1137
|
+
}
|
|
1138
|
+
}
|
|
1139
|
+
for (const serializerName in this._serializerCache) {
|
|
1140
|
+
const serializer = this._serializerCache[serializerName];
|
|
1141
|
+
if (typeof serializer.destroy === 'function') {
|
|
1142
|
+
serializer.destroy();
|
|
1143
|
+
}
|
|
1144
|
+
}
|
|
1145
|
+
}
|
|
1146
|
+
export { LegacyNetworkHandler, adapterFor, cleanup, normalize, pushPayload, serializeRecord, serializerFor };
|