@sap/cds 8.3.0 → 8.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/CHANGELOG.md +35 -1
  2. package/bin/serve.js +9 -2
  3. package/lib/auth/ias-auth.js +4 -1
  4. package/lib/auth/jwt-auth.js +4 -1
  5. package/lib/compile/cdsc.js +1 -1
  6. package/lib/compile/etc/_localized.js +1 -0
  7. package/lib/compile/extend.js +23 -23
  8. package/lib/compile/for/lean_drafts.js +5 -0
  9. package/lib/compile/to/srvinfo.js +3 -1
  10. package/lib/{linked → core}/classes.js +8 -6
  11. package/lib/{linked/models.js → core/linked-csn.js} +4 -0
  12. package/lib/env/defaults.js +4 -1
  13. package/lib/i18n/localize.js +2 -2
  14. package/lib/index.js +43 -59
  15. package/lib/log/cds-error.js +21 -21
  16. package/lib/ql/cds-ql.js +5 -5
  17. package/lib/req/cds-context.js +5 -0
  18. package/lib/req/context.js +2 -2
  19. package/lib/req/locale.js +25 -21
  20. package/lib/{linked → req}/validate.js +11 -9
  21. package/lib/srv/cds-serve.js +1 -1
  22. package/lib/srv/middlewares/cds-context.js +1 -1
  23. package/lib/srv/middlewares/errors.js +20 -7
  24. package/lib/srv/protocols/hcql.js +106 -43
  25. package/lib/srv/protocols/http.js +2 -2
  26. package/lib/srv/protocols/index.js +14 -10
  27. package/lib/srv/protocols/odata-v4.js +2 -26
  28. package/lib/srv/protocols/okra.js +24 -0
  29. package/lib/srv/srv-models.js +6 -8
  30. package/lib/{utils → test}/cds-test.js +5 -5
  31. package/lib/utils/check-version.js +8 -15
  32. package/lib/utils/extend.js +20 -0
  33. package/lib/utils/lazify.js +33 -0
  34. package/lib/utils/tar.js +39 -1
  35. package/libx/_runtime/cds-services/adapter/odata-v4/to.js +0 -1
  36. package/libx/_runtime/common/error/frontend.js +18 -4
  37. package/libx/_runtime/common/generic/auth/restrict.js +1 -3
  38. package/libx/_runtime/common/generic/sorting.js +1 -1
  39. package/libx/_runtime/common/utils/compareJson.js +139 -53
  40. package/libx/_runtime/common/utils/resolveView.js +19 -23
  41. package/libx/_runtime/fiori/lean-draft.js +2 -2
  42. package/libx/_runtime/messaging/kafka.js +7 -1
  43. package/libx/_runtime/remote/utils/data.js +30 -24
  44. package/libx/odata/ODataAdapter.js +12 -7
  45. package/libx/odata/middleware/batch.js +3 -0
  46. package/libx/odata/middleware/error.js +6 -0
  47. package/libx/odata/parse/afterburner.js +5 -6
  48. package/libx/odata/parse/multipartToJson.js +12 -8
  49. package/libx/odata/utils/index.js +3 -2
  50. package/libx/odata/utils/metadata.js +31 -1
  51. package/libx/outbox/index.js +5 -1
  52. package/package.json +3 -4
  53. package/server.js +18 -0
  54. package/lib/lazy.js +0 -51
  55. package/lib/test/index.js +0 -2
  56. /package/lib/{linked → core}/entities.js +0 -0
  57. /package/lib/{linked → core}/types.js +0 -0
  58. /package/lib/{utils → test}/axios.js +0 -0
  59. /package/lib/{utils → test}/data.js +0 -0
@@ -51,7 +51,7 @@ const _rewriteError = error => {
51
51
  (code.startsWith('SQLITE_CONSTRAINT') && (message.match(/COMMIT/) || message.match(/FOREIGN KEY/))) ||
52
52
  (code === '155' && message.match(/fk constraint violation/))
53
53
  ) {
54
- // > foreign key constaint violation no sqlite/ hana
54
+ // > foreign key constaint violation on sqlite/ hana
55
55
  error.code = '400'
56
56
  error.message = 'FK_CONSTRAINT_VIOLATION'
57
57
  return
@@ -63,17 +63,33 @@ const _rewriteError = error => {
63
63
  }
64
64
  }
65
65
 
66
+ const _isInHttpResponseCodeRange = errorCode => errorCode >= 300 && errorCode <= 599
67
+
68
+ const BAD_REQUEST_ERRORS = new Set(['ENTITY_ALREADY_EXISTS', 'FK_CONSTRAINT_VIOLATION', 'UNIQUE_CONSTRAINT_VIOLATION'])
69
+
66
70
  const _normalize = (err, locale, formatterFn = _getFiltered) => {
67
71
  // REVISIT: code and message rewriting
68
72
  _rewriteError(err)
69
73
 
74
+ const { message: originalMessage } = err
75
+
70
76
  // message (i18n)
71
77
  err.message = getErrorMessage(err, locale)
72
78
 
73
79
  // ensure code is set and a string
74
80
  err.code = String(err.code || 'null')
75
81
 
76
- let statusCode = err.status || err.statusCode || (_isAllowedError(err.code) && err.code)
82
+ // determine status code from error
83
+ let statusCode = err.status || err.statusCode //> REVISIT: why prefer status over statusCode?
84
+ // well-defined bad request errors
85
+ if (!statusCode && BAD_REQUEST_ERRORS.has(originalMessage)) statusCode = 400
86
+ if (!statusCode && _isInHttpResponseCodeRange(err.code)) {
87
+ if ('sqlState' in err) {
88
+ // HANA/ database error -> don't use code as status code
89
+ } else {
90
+ statusCode = err.code
91
+ }
92
+ }
77
93
 
78
94
  // details
79
95
  if (err.details) {
@@ -95,8 +111,6 @@ const _normalize = (err, locale, formatterFn = _getFiltered) => {
95
111
  return { error, statusCode }
96
112
  }
97
113
 
98
- const _isAllowedError = errorCode => errorCode >= 300 && errorCode < 505
99
-
100
114
  // - for one unique value, we use it
101
115
  // - if at least one 5xx exists, we use 500
102
116
  // - else if at least one 4xx exists, we use 400
@@ -5,8 +5,6 @@ const { reject, getRejectReason, resolveUserAttrs, getAuthRelevantEntity } = req
5
5
  const { DRAFT_EVENTS, MOD_EVENTS } = require('./constants')
6
6
  const { getNormalizedPlainRestrictions } = require('./restrictions')
7
7
 
8
- const { cqn2cqn4sql } = require('../../utils/cqn2cqn4sql')
9
-
10
8
  const _getResolvedApplicables = (applicables, req) => {
11
9
  const resolvedApplicables = []
12
10
 
@@ -189,7 +187,7 @@ const _getRestrictedCount = async (req, model, resolvedApplicables) => {
189
187
  const restrictionForTarget = _getRestrictionForTarget(resolvedApplicables, req.target)
190
188
  if (restrictionForTarget) selectRestricted.where(restrictionForTarget)
191
189
 
192
- const { n } = await dbtx.run(cqn2cqn4sql(selectRestricted, model, { suppressSearch: true }))
190
+ const { n } = await dbtx.run(selectRestricted)
193
191
  return n
194
192
  }
195
193
 
@@ -76,7 +76,7 @@ const commonGenericSorting = function (req) {
76
76
 
77
77
  if (select.from && select.from.SELECT) {
78
78
  // add default sort to root query
79
- if (select.orderBy) _addDefaultSortOrder(req, select)
79
+ _addDefaultSortOrder(req, select)
80
80
 
81
81
  // apply default sort to bottom-most sub-query
82
82
  while (select.from.SELECT) select = select.from.SELECT
@@ -1,3 +1,4 @@
1
+ const cds = require('../../cds')
1
2
  const { DRAFT_COLUMNS_MAP } = require('../constants/draft')
2
3
 
3
4
  const _deepEqual = (val1, val2) => {
@@ -18,12 +19,7 @@ const _getCorrespondingEntryWithSameKeys = (source, entry, keys) => {
18
19
  const _getIdxCorrespondingEntryWithSameKeys = (source, entry, keys) =>
19
20
  source.findIndex(sourceEntry => keys.every(key => _deepEqual(sourceEntry[key], entry[key])))
20
21
 
21
- const _getKeysOfEntity = entity =>
22
- Object.keys(entity.keys).filter(key => !(key in DRAFT_COLUMNS_MAP) && !entity.elements[key].isAssociation)
23
-
24
- const _getCompositionsOfEntity = entity => Object.keys(entity.elements).filter(e => entity.elements[e].isComposition)
25
-
26
- const _createToBeDeletedEntries = (oldEntry, entity, keys, compositions) => {
22
+ const _createToBeDeletedEntries = (oldEntry, entity, keys, compositions, metaCache) => {
27
23
  const toBeDeletedEntry = {
28
24
  _op: 'delete'
29
25
  }
@@ -35,20 +31,18 @@ const _createToBeDeletedEntries = (oldEntry, entity, keys, compositions) => {
35
31
  if (keys.includes(prop)) {
36
32
  toBeDeletedEntry[prop] = oldEntry[prop]
37
33
  } else if (compositions.includes(prop) && oldEntry[prop]) {
34
+ const target = entity.elements[prop]._target
35
+ const cache = metaCache.get(target)
38
36
  toBeDeletedEntry[prop] = entity.elements[prop].is2one
39
37
  ? _createToBeDeletedEntries(
40
38
  oldEntry[prop],
41
39
  entity.elements[prop]._target,
42
- _getKeysOfEntity(entity.elements[prop]._target),
43
- _getCompositionsOfEntity(entity.elements[prop]._target)
40
+ cache.keys,
41
+ cache.compositions,
42
+ metaCache
44
43
  )
45
44
  : oldEntry[prop].map(entry =>
46
- _createToBeDeletedEntries(
47
- entry,
48
- entity.elements[prop]._target,
49
- _getKeysOfEntity(entity.elements[prop]._target),
50
- _getCompositionsOfEntity(entity.elements[prop]._target)
51
- )
45
+ _createToBeDeletedEntries(entry, target, cache.keys, cache.compositions, metaCache)
52
46
  )
53
47
  } else {
54
48
  toBeDeletedEntry._old = toBeDeletedEntry._old || {}
@@ -77,7 +71,7 @@ const _hasOpDeep = (entry, element) => {
77
71
  return false
78
72
  }
79
73
 
80
- const _addCompositionsToResult = (result, entity, prop, newValue, oldValue, opts) => {
74
+ const _addCompositionsToResult = (result, entity, prop, newValue, oldValue, opts, buckets, metaCache) => {
81
75
  /*
82
76
  * REVISIT: the current impl results in {} instead of keeping null for compo to one.
83
77
  * unfortunately, many follow-up errors occur (e.g., prop in null checks) if changed.
@@ -89,9 +83,23 @@ const _addCompositionsToResult = (result, entity, prop, newValue, oldValue, opts
89
83
  !Array.isArray(newValue[prop]) &&
90
84
  Object.keys(newValue[prop]).length === 0
91
85
  ) {
92
- composition = compareJsonDeep(entity.elements[prop]._target, undefined, oldValue && oldValue[prop], opts)
86
+ composition = compareJsonDeep(
87
+ entity.elements[prop]._target,
88
+ undefined,
89
+ oldValue && oldValue[prop],
90
+ opts,
91
+ buckets,
92
+ metaCache
93
+ )
93
94
  } else {
94
- composition = compareJsonDeep(entity.elements[prop]._target, newValue[prop], oldValue && oldValue[prop], opts)
95
+ composition = compareJsonDeep(
96
+ entity.elements[prop]._target,
97
+ newValue[prop],
98
+ oldValue && oldValue[prop],
99
+ opts,
100
+ buckets,
101
+ metaCache
102
+ )
95
103
  }
96
104
  if (composition.some(c => _hasOpDeep(c, entity.elements[prop]))) {
97
105
  result[prop] = entity.elements[prop].is2one ? composition[0] : composition
@@ -118,14 +126,17 @@ const _addKeysToResult = (result, prop, newValue, oldValue) => {
118
126
  }
119
127
  }
120
128
 
121
- const _addToBeDeletedEntriesToResult = (results, entity, keys, newValues, oldValues) => {
129
+ const _addToBeDeletedEntriesToResult = (results, entity, keys, newValues, oldValues, newBucketMap, metaCache) => {
130
+ const cache = metaCache.get(entity)
122
131
  // add to be deleted entries
123
132
  for (const oldEntry of oldValues) {
124
- const entry = _getCorrespondingEntryWithSameKeys(newValues, oldEntry, keys)
133
+ const entry = cds.env.features.diff_optimization
134
+ ? _getCorrespondingEntryWithSameKeysFromBucket(newBucketMap, oldEntry, entity, keys, cache)
135
+ : _getCorrespondingEntryWithSameKeys(newValues, oldEntry, keys)
125
136
 
126
137
  if (!entry) {
127
138
  // prepare to be deleted (deep) entry without manipulating oldData
128
- const toBeDeletedEntry = _createToBeDeletedEntries(oldEntry, entity, keys, _getCompositionsOfEntity(entity))
139
+ const toBeDeletedEntry = _createToBeDeletedEntries(oldEntry, entity, keys, cache.compositions, metaCache)
129
140
  results.push(toBeDeletedEntry)
130
141
  }
131
142
  }
@@ -149,54 +160,115 @@ const _skipToMany = (entity, prop) => {
149
160
  return entity.elements[prop] && entity.elements[prop].is2many && _skip(entity, prop)
150
161
  }
151
162
 
152
- // Returns all property names from the new entry and add missing managed elements
153
- const _propertiesAndManaged = (newEntry, entity) => {
154
- return [
155
- ...Object.getOwnPropertyNames(newEntry),
156
- ...Object.keys(entity.elements).filter(
157
- elementName => newEntry[elementName] === undefined && entity.elements[elementName]['@cds.on.update']
158
- )
159
- ]
160
- }
163
+ const _iteratePropsInNewEntry = (newEntry, keys, result, oldEntry, entity, opts, buckets, metaCache) => {
164
+ const cache = metaCache.get(entity)
161
165
 
162
- const _iteratePropsInNewEntry = (newEntry, keys, result, oldEntry, entity, opts) => {
163
166
  // On app-service layer, generated foreign keys are not enumerable,
164
167
  // include them here too.
165
- for (const prop of _propertiesAndManaged(newEntry, entity)) {
166
- if (keys.includes(prop)) {
168
+ for (const prop of cache.props) {
169
+ if (cache.keys.includes(prop)) {
167
170
  _addKeysToResult(result, prop, newEntry, oldEntry)
168
171
  continue
169
172
  }
170
173
 
171
- // if value did not change --> ignored
172
- if (
173
- newEntry[prop] === (oldEntry && oldEntry[prop]) ||
174
- (oldEntry && entity.elements[prop]?.['@Core.Immutable']) ||
175
- (opts.ignoreDraftColumns && prop in DRAFT_COLUMNS_MAP)
176
- ) {
174
+ if (newEntry[prop] === undefined && !cache.onUpdate.includes(prop)) continue
175
+
176
+ if (cache.compositions.includes(prop)) {
177
+ _addCompositionsToResult(result, entity, prop, newEntry, oldEntry, opts, buckets, metaCache)
177
178
  continue
178
179
  }
179
180
 
180
- if (_skipToMany(entity, prop)) {
181
- continue
181
+ // if value did not change --> ignored
182
+ if (newEntry[prop] === (oldEntry && oldEntry[prop])) continue
183
+
184
+ // existing immutable --> ignored
185
+ if (oldEntry && cache.immutables.includes(prop)) continue
186
+
187
+ _addPrimitiveValuesAndOperatorToResult(result, prop, newEntry, oldEntry)
188
+ }
189
+ }
190
+
191
+ const _isSimpleKey = element => !element._isStructured && element.type != 'cds.Binary'
192
+
193
+ const _getMetaCache = (entity, metaCache, opts) => {
194
+ if (metaCache.get(entity)) return
195
+
196
+ const cache = { keys: [], props: [], compositions: [], immutables: [], onUpdate: [] }
197
+ metaCache.set(entity, cache)
198
+ for (let prop in entity.elements) {
199
+ const element = entity.elements[prop] || {}
200
+ if (prop in entity.keys && !(prop in DRAFT_COLUMNS_MAP) && !element.isAssociation) cache.keys.push(prop)
201
+ if (_skipToMany(entity, prop) || _skipToOne(entity, prop)) continue
202
+ if (opts.ignoreDraftColumns && prop in DRAFT_COLUMNS_MAP) continue
203
+
204
+ if (element?.isComposition) {
205
+ cache.compositions.push(prop)
206
+ _getMetaCache(element._target, metaCache, opts)
182
207
  }
183
208
 
184
- if (_skipToOne(entity, prop)) {
185
- continue
209
+ if (element?.['@Core.Immutable']) cache.immutables.push(prop)
210
+ if (element?.['@cds.on.update']) cache.onUpdate.push(prop)
211
+
212
+ cache.props.push(prop)
213
+ }
214
+
215
+ let getKeyHash
216
+ if (cache.keys.length === 1 && _isSimpleKey(entity.elements[cache.keys[0]])) {
217
+ getKeyHash = (entry, keys) => entry[keys[0]].toString()
218
+ } else if (cache.keys.map(key => entity.elements[key]).every(key => _isSimpleKey(key))) {
219
+ getKeyHash = (entry, keys) => keys.reduce((hash, key) => `${hash},${key}=${entry[key].toString()}`, '')
220
+ } else {
221
+ getKeyHash = (entry, keys) => {
222
+ const keyObj = keys.reduce((hash, key) => {
223
+ hash[key] = entry[key]
224
+ return hash
225
+ }, {})
226
+
227
+ return JSON.stringify(keyObj)
186
228
  }
229
+ }
230
+ cache.getKeyHash = getKeyHash
231
+ }
187
232
 
188
- if (entity.elements[prop] && entity.elements[prop].isComposition) {
189
- _addCompositionsToResult(result, entity, prop, newEntry, oldEntry, opts)
190
- continue
233
+ const _addBucket = (entity, entry, bucketMap, metaCache) => {
234
+ if (!entry) return
235
+ const entries = _normalizeToArray(entry)
236
+ const cache = metaCache.get(entity)
237
+
238
+ entries.forEach(e => {
239
+ const keyHash = cache.getKeyHash(e, cache.keys)
240
+ let entityMap = bucketMap.get(entity)
241
+ if (!entityMap) {
242
+ entityMap = new Map()
243
+ bucketMap.set(entity, entityMap)
191
244
  }
245
+ entityMap.set(keyHash, e)
192
246
 
193
- _addPrimitiveValuesAndOperatorToResult(result, prop, newEntry, oldEntry)
194
- }
247
+ for (const prop of cache.props) {
248
+ if (cache.compositions.includes(prop)) _addBucket(entity.elements[prop]._target, e[prop], bucketMap, metaCache)
249
+ }
250
+ })
251
+ }
252
+
253
+ const _getBucketMap = (value, entity, metaCache) => {
254
+ const bucketMap = new Map()
255
+ _addBucket(entity, value, bucketMap, metaCache)
256
+
257
+ return bucketMap
258
+ }
259
+
260
+ const _getCorrespondingEntryWithSameKeysFromBucket = (bucketMap, entry, entity, keys, cache) => {
261
+ const bucket = bucketMap.get(entity)
262
+ if (!bucket) return
263
+
264
+ const keyHash = cache.getKeyHash(entry, keys)
265
+ return bucket.get(keyHash)
195
266
  }
196
267
 
197
- const compareJsonDeep = (entity, newValue = [], oldValue = [], opts) => {
268
+ const compareJsonDeep = (entity, newValue = [], oldValue = [], opts, buckets, metaCache) => {
198
269
  const resultsArray = []
199
- const keys = _getKeysOfEntity(entity)
270
+ const cache = metaCache.get(entity)
271
+ const keys = cache.keys
200
272
 
201
273
  // normalize input
202
274
  const newValues = _normalizeToArray(newValue)
@@ -205,12 +277,17 @@ const compareJsonDeep = (entity, newValue = [], oldValue = [], opts) => {
205
277
  // add to be created and to be updated entries
206
278
  for (const newEntry of newValues) {
207
279
  const result = {}
208
- const oldEntry = _getCorrespondingEntryWithSameKeys(oldValues, newEntry, keys)
209
- _iteratePropsInNewEntry(newEntry, keys, result, oldEntry, entity, opts)
280
+ let oldEntry
281
+ if (oldValues.length) {
282
+ oldEntry = cds.env.features.diff_optimization
283
+ ? _getCorrespondingEntryWithSameKeysFromBucket(buckets.oldBucketMap, newEntry, entity, keys, cache)
284
+ : _getCorrespondingEntryWithSameKeys(oldValues, newEntry, keys)
285
+ }
286
+ _iteratePropsInNewEntry(newEntry, keys, result, oldEntry, entity, opts, buckets, metaCache)
210
287
  resultsArray.push(result)
211
288
  }
212
289
 
213
- _addToBeDeletedEntriesToResult(resultsArray, entity, keys, newValues, oldValues)
290
+ _addToBeDeletedEntriesToResult(resultsArray, entity, keys, newValues, oldValues, buckets.newBucketMap, metaCache)
214
291
 
215
292
  return resultsArray
216
293
  }
@@ -266,7 +343,16 @@ const compareJsonDeep = (entity, newValue = [], oldValue = [], opts) => {
266
343
  */
267
344
  const compareJson = (newValue, oldValue, entity, opts = {}) => {
268
345
  const options = Object.assign({ ignoreDraftColumns: false }, opts)
269
- const result = compareJsonDeep(entity, newValue, oldValue, options)
346
+
347
+ let newBucketMap,
348
+ oldBucketMap,
349
+ metaCache = new Map()
350
+ _getMetaCache(entity, metaCache, opts)
351
+ if (oldValue && (!Array.isArray(oldValue) || oldValue.length) && cds.env.features.diff_optimization) {
352
+ newBucketMap = _getBucketMap(newValue, entity, metaCache)
353
+ oldBucketMap = _getBucketMap(oldValue, entity, metaCache)
354
+ }
355
+ const result = compareJsonDeep(entity, newValue, oldValue, options, { newBucketMap, oldBucketMap }, metaCache)
270
356
 
271
357
  // in case of batch insert, result is an array
272
358
  // in all other cases it is an array with just one entry
@@ -65,9 +65,7 @@ const revertData = (data, transition, service) => {
65
65
  : _newData(data, inverseTransition, true, service)
66
66
  }
67
67
 
68
- const _newSubData = (newData, key, transition, el, inverse, service) => {
69
- const val = newData[key]
70
-
68
+ const _newSubData = (val, key, transition, el, inverse, service) => {
71
69
  if ((!Array.isArray(val) && typeof val === 'object') || (Array.isArray(val) && val.length !== 0)) {
72
70
  let mapped = transition.mapping.get(key)
73
71
  if (!mapped) {
@@ -81,11 +79,12 @@ const _newSubData = (newData, key, transition, el, inverse, service) => {
81
79
  }
82
80
 
83
81
  if (Array.isArray(val)) {
84
- newData[key] = val.map(singleVal => _newData(singleVal, mapped.transition, inverse, service))
82
+ return val.map(singleVal => _newData(singleVal, mapped.transition, inverse, service))
85
83
  } else {
86
- newData[key] = _newData(val, mapped.transition, inverse, service)
84
+ return _newData(val, mapped.transition, inverse, service)
87
85
  }
88
86
  }
87
+ return val //Case of empty array
89
88
  }
90
89
 
91
90
  const _newNestedData = (queryTarget, newData, ref, value) => {
@@ -112,40 +111,37 @@ const _newData = (data, transition, inverse, service) => {
112
111
  // no transition -> nothing to do
113
112
  if (transition.target && transition.target.name === transition.queryTarget.name) return data
114
113
 
115
- // REVISIT this does not copy deep
116
- const newData = { ...data }
114
+ const newData = {}
117
115
  const queryTarget = transition.queryTarget
118
116
 
119
- for (const key in newData) {
117
+ for (const key in data) {
120
118
  const el = queryTarget && queryTarget?.elements[key]
121
119
  const isAssoc = el && el.isAssociation
122
120
 
123
- if (isAssoc) {
124
- if (newData[key] || (newData[key] === null && service.name === 'db')) {
125
- _newSubData(newData, key, transition, el, inverse, service)
126
- }
127
- }
128
-
129
121
  const mapped = transition.mapping.get(key)
130
122
  if (!mapped) {
131
- // if there is no mapping and no element with the same name in the target, then we don't need the data
132
- if ((typeof newData[key] !== 'object' || newData[key] === null) && !transition.target.elements[key])
133
- delete newData[key]
123
+ //In this condition the data is needed
124
+ if (
125
+ ((typeof data[key] === 'object' && data[key] !== null) || transition.target.elements[key]) &&
126
+ newData[key] === undefined
127
+ )
128
+ newData[key] = data[key]
134
129
  continue
135
130
  }
131
+ let value = data[key]
132
+ if (isAssoc) {
133
+ if (value || (value === null && service.name === 'db')) {
134
+ value = _newSubData(value, key, transition, el, inverse, service)
135
+ }
136
+ }
136
137
 
137
138
  if (!isAssoc && mapped.transition) {
138
- _newSubData(newData, key, transition, el, inverse)
139
- const value = newData[key]
140
- delete newData[key]
139
+ value = _newSubData(value, key, transition, el, inverse)
141
140
  Object.assign(newData, value)
142
141
  }
143
142
 
144
143
  if (mapped.ref) {
145
- const value = newData[key]
146
- delete newData[key]
147
144
  const { ref } = mapped
148
-
149
145
  if (ref.length === 1) {
150
146
  newData[ref[0]] = value
151
147
  if (mapped.alternatives) mapped.alternatives.forEach(({ ref }) => (newData[ref[0]] = value))
@@ -389,7 +389,7 @@ cds.ApplicationService.prototype.handle = async function (req) {
389
389
  return req._messages
390
390
  }
391
391
  })
392
- if (req.tx) _req.tx = req.tx
392
+ if (req.tx && !_req.tx) _req.tx = req.tx
393
393
 
394
394
  return _req
395
395
  }
@@ -1209,7 +1209,7 @@ function _cleanseParams(params, target) {
1209
1209
  if (key === 'IsActiveEntity') {
1210
1210
  const value = params[key]
1211
1211
  delete params[key]
1212
- Object.defineProperty(params, key, { value, enumerable: false })
1212
+ Object.defineProperty(params, key, { value, enumerable: false, writeable: true })
1213
1213
  }
1214
1214
  }
1215
1215
  }
@@ -209,12 +209,18 @@ function _getKeyFn(topicOrEvent) {
209
209
 
210
210
  async function _getConfig(srv) {
211
211
  const caCerts = await _getCaCerts(srv)
212
+
213
+ const allBrokers =
214
+ srv.options.credentials.cluster?.['brokers.client_ssl'] ||
215
+ srv.options.credentials['cluster.public']?.['brokers.client_ssl']
216
+ const brokers = allBrokers.split(',')
217
+
212
218
  return {
213
219
  clientId: srv.appId,
214
220
  // logLevel: 4,
215
221
  connectionTimeout: 15000,
216
222
  authenticationTimeout: 15000,
217
- brokers: srv.options.credentials.cluster?.['brokers.client_ssl'].split(','),
223
+ brokers,
218
224
  ssl: {
219
225
  rejectUnauthorized: true,
220
226
  ca: caCerts,
@@ -1,4 +1,5 @@
1
1
  const { big } = require('@sap/cds-foss')
2
+ const cds = require('../../cds')
2
3
 
3
4
  // Code adopted from @sap/cds-odata-v2-adapter-proxy
4
5
  // https://www.w3.org/TR/xmlschema11-2/#nt-duDTFrag
@@ -68,31 +69,36 @@ const _convertValue = (ieee754Compatible, exponentialDecimals) => (value, elemen
68
69
  if (value == null) return value
69
70
 
70
71
  const type = _elementType(element)
71
- if (type === 'cds.Boolean') {
72
- if (value === 'true') {
73
- value = true
74
- } else if (value === 'false') {
75
- value = false
76
- }
77
- } else if (type === 'cds.Integer' || type === 'cds.UInt8' || type === 'cds.Int16' || type === 'cds.Int32') {
78
- value = parseInt(value, 10)
79
- } else if (
80
- type === 'cds.Decimal' ||
81
- type === 'cds.DecimalFloat' ||
82
- type === 'cds.Integer64' ||
83
- type === 'cds.Int64'
84
- ) {
85
- const bigValue = big(value)
86
- if (ieee754Compatible) {
87
- // TODO test with arrayed => element.items.scale?
88
- value = exponentialDecimals ? bigValue.toExponential(element.scale) : bigValue.toFixed(element.scale)
89
- } else {
90
- // OData V2 does not even mention ieee754Compatible, but V4 requires JSON number if ieee754Compatible=false
91
- value = bigValue.toNumber()
72
+
73
+ if (cds.env.features.odata_v2_result_conversion) {
74
+ cds.utils.deprecated({ old: 'flag cds.env.features.odata_v2_result_conversion' })
75
+ if (type === 'cds.Boolean') {
76
+ if (value === 'true') {
77
+ value = true
78
+ } else if (value === 'false') {
79
+ value = false
80
+ }
81
+ } else if (type === 'cds.Integer' || type === 'cds.UInt8' || type === 'cds.Int16' || type === 'cds.Int32') {
82
+ value = parseInt(value, 10)
83
+ } else if (
84
+ type === 'cds.Decimal' ||
85
+ type === 'cds.DecimalFloat' ||
86
+ type === 'cds.Integer64' ||
87
+ type === 'cds.Int64'
88
+ ) {
89
+ const bigValue = big(value)
90
+ if (ieee754Compatible) {
91
+ // TODO test with arrayed => element.items.scale?
92
+ value = exponentialDecimals ? bigValue.toExponential(element.scale) : bigValue.toFixed(element.scale)
93
+ } else {
94
+ // OData V2 does not even mention ieee754Compatible, but V4 requires JSON number if ieee754Compatible=false
95
+ value = bigValue.toNumber()
96
+ }
97
+ } else if (type === 'cds.Double') {
98
+ value = parseFloat(value)
92
99
  }
93
- } else if (type === 'cds.Double') {
94
- value = parseFloat(value)
95
- } else if (type === 'cds.Time') {
100
+ }
101
+ if (type === 'cds.Time') {
96
102
  const match = value.match(DurationRegex)
97
103
 
98
104
  if (match) {
@@ -73,13 +73,6 @@ class ODataAdapter extends HttpAdapter {
73
73
  if (req.method === 'POST' && req.headers['content-type']?.match(/multipart\/mixed/)) {
74
74
  return next()
75
75
  }
76
- if (req.method in { POST: 1, PUT: 1, PATCH: 1 } && req.headers['content-type']) {
77
- const parts = req.headers['content-type'].split(';')
78
- // header ending with semicolon is not allowed
79
- if (!parts[0].match(/^application\/json$/) || parts[1] === '') {
80
- throw cds.error('415', { statusCode: 415, code: '415' }) // FIXME: use res.status
81
- }
82
- }
83
76
  // POST with empty body is allowed by actions
84
77
  if (req.method in { PUT: 1, PATCH: 1 }) {
85
78
  if (req.headers['content-length'] === '0') {
@@ -87,6 +80,18 @@ class ODataAdapter extends HttpAdapter {
87
80
  return
88
81
  }
89
82
  }
83
+ if (req.method in { POST: 1, PUT: 1, PATCH: 1 }) {
84
+ const contentType = req.headers['content-type'] ?? ''
85
+ let contentLength = req.headers['content-length']
86
+ contentLength = contentLength ? parseInt(contentLength) : 0
87
+
88
+ const parts = contentType.split(';')
89
+ // header ending with semicolon is not allowed
90
+ if ((contentLength && !parts[0].match(/^application\/json$/)) || parts[1] === '') {
91
+ res.status(415).json({ error: { message: 'Unsupported Media Type', statusCode: 415, code: '415' } })
92
+ return
93
+ }
94
+ }
90
95
 
91
96
  return jsonBodyParser(req, res, next)
92
97
  })
@@ -342,6 +342,8 @@ const _processBatch = async (srv, router, req, res, next, body, ct, boundary) =>
342
342
  : {}
343
343
  }
344
344
 
345
+ request.headers['content-type'] ??= req.headers['content-type']
346
+
345
347
  const { atomicityGroup } = request
346
348
 
347
349
  if (!atomicityGroup || atomicityGroup !== previousAtomicityGroup) {
@@ -465,6 +467,7 @@ const _multipartBatch = async (srv, router, req, res, next) => {
465
467
  const { requests } = await multipartToJson(req.body, boundary)
466
468
  _processBatch(srv, router, req, res, next, { requests }, 'MULTIPART', boundary)
467
469
  } catch (e) {
470
+ // REVISIT: (how) handle multipart accepts?
468
471
  next(e)
469
472
  }
470
473
  }
@@ -3,11 +3,17 @@ const cds = require('../../../lib')
3
3
  const _log = require('../../_runtime/common/error/log')
4
4
 
5
5
  const { normalizeError, unwrapMultipleErrors } = require('../../_runtime/common/error/frontend')
6
+ const { isStandardError } = require('../../_runtime/common/error/standardError')
6
7
 
7
8
  module.exports = () => {
8
9
  return function odata_error(err, req, res, next) {
9
10
  if (err == 401 || err.code == 401) return next(err) // speed up logins, at least temporary until we reviewed and eliminated overhead that may be involved below
10
11
 
12
+ // if error already has statusCode, it comes from express, don't throw
13
+ if (!err.statusCode && isStandardError(err) && cds.env.server.shutdown_on_uncaught_errors) {
14
+ return next(err)
15
+ }
16
+
11
17
  // REVISIT: keep?
12
18
  // log the error (4xx -> warn)
13
19
  _log(err)
@@ -504,13 +504,12 @@ function _addKeys(columns, target) {
504
504
  function _removeDuplicateAsterisk(columns) {
505
505
  let hasExpandStar = false
506
506
 
507
- for (let i = columns.length - 1; i > 0; i--) {
508
- const column = columns[i]
509
- if (!hasExpandStar && !column.ref && column?.expand?.[0] === '*') hasExpandStar = true
510
- else if (hasExpandStar && !column.ref && column?.expand[0] === '*') {
511
- columns.splice(i, 1)
507
+ columns.forEach((column, i) => {
508
+ if (!column.ref && column.expand?.[0] === '*') {
509
+ if (hasExpandStar) columns.splice(i, 1)
510
+ hasExpandStar = true
512
511
  }
513
- }
512
+ })
514
513
  }
515
514
 
516
515
  const _structProperty = (ref, target) => {