@openhi/constructs 0.0.104 → 0.0.106

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. package/README.md +14 -0
  2. package/lib/chunk-2PM2NGXI.mjs +31 -0
  3. package/lib/chunk-2PM2NGXI.mjs.map +1 -0
  4. package/lib/chunk-AGF3RAAZ.mjs +20 -0
  5. package/lib/chunk-AGF3RAAZ.mjs.map +1 -0
  6. package/lib/chunk-AO3E22CS.mjs +108 -0
  7. package/lib/chunk-AO3E22CS.mjs.map +1 -0
  8. package/lib/chunk-CHPEQRXU.mjs +45 -0
  9. package/lib/chunk-CHPEQRXU.mjs.map +1 -0
  10. package/lib/chunk-JUNL76HF.mjs +428 -0
  11. package/lib/chunk-JUNL76HF.mjs.map +1 -0
  12. package/lib/chunk-L6UAP4KP.mjs +27 -0
  13. package/lib/chunk-L6UAP4KP.mjs.map +1 -0
  14. package/lib/{chunk-3QS3WKRC.mjs → chunk-LZOMFHX3.mjs} +9 -2
  15. package/lib/chunk-QMIOLLAS.mjs +531 -0
  16. package/lib/chunk-QMIOLLAS.mjs.map +1 -0
  17. package/lib/chunk-SYBADQXI.mjs +607 -0
  18. package/lib/chunk-SYBADQXI.mjs.map +1 -0
  19. package/lib/chunk-VXX4I3EF.mjs +19 -0
  20. package/lib/chunk-VXX4I3EF.mjs.map +1 -0
  21. package/lib/{chunk-MLTYFMSE.mjs → chunk-VYDIGFIX.mjs} +74 -29
  22. package/lib/chunk-VYDIGFIX.mjs.map +1 -0
  23. package/lib/chunk-YU2HRNUP.mjs +33 -0
  24. package/lib/chunk-YU2HRNUP.mjs.map +1 -0
  25. package/lib/chunk-YZZDUJHI.mjs +37 -0
  26. package/lib/chunk-YZZDUJHI.mjs.map +1 -0
  27. package/lib/cors-options-lambda.handler.mjs +1 -1
  28. package/lib/data-store-postgres-replication.handler.mjs +1 -1
  29. package/lib/events-BfrkMoBD.d.mts +44 -0
  30. package/lib/events-BfrkMoBD.d.ts +44 -0
  31. package/lib/events-CVA3_eEB.d.mts +23 -0
  32. package/lib/events-CVA3_eEB.d.ts +23 -0
  33. package/lib/events-DGep6C7w.d.mts +207 -0
  34. package/lib/events-DGep6C7w.d.ts +207 -0
  35. package/lib/firehose-archive-transform.handler.mjs +1 -1
  36. package/lib/index.d.mts +508 -29
  37. package/lib/index.d.ts +773 -30
  38. package/lib/index.js +2536 -105
  39. package/lib/index.js.map +1 -1
  40. package/lib/index.mjs +899 -106
  41. package/lib/index.mjs.map +1 -1
  42. package/lib/openhi-context-CaBH8SFo.d.mts +39 -0
  43. package/lib/openhi-context-CaBH8SFo.d.ts +39 -0
  44. package/lib/platform-deploy-bridge.handler.d.mts +14 -0
  45. package/lib/platform-deploy-bridge.handler.d.ts +14 -0
  46. package/lib/platform-deploy-bridge.handler.js +762 -0
  47. package/lib/platform-deploy-bridge.handler.js.map +1 -0
  48. package/lib/platform-deploy-bridge.handler.mjs +134 -0
  49. package/lib/platform-deploy-bridge.handler.mjs.map +1 -0
  50. package/lib/post-authentication.handler.mjs +1 -1
  51. package/lib/post-confirmation.handler.js +50 -904
  52. package/lib/post-confirmation.handler.js.map +1 -1
  53. package/lib/post-confirmation.handler.mjs +37 -112
  54. package/lib/post-confirmation.handler.mjs.map +1 -1
  55. package/lib/pre-token-generation.handler.js +135 -55
  56. package/lib/pre-token-generation.handler.js.map +1 -1
  57. package/lib/pre-token-generation.handler.mjs +25 -32
  58. package/lib/pre-token-generation.handler.mjs.map +1 -1
  59. package/lib/provision-default-workspace.handler.d.mts +13 -0
  60. package/lib/provision-default-workspace.handler.d.ts +13 -0
  61. package/lib/provision-default-workspace.handler.js +1172 -0
  62. package/lib/provision-default-workspace.handler.js.map +1 -0
  63. package/lib/provision-default-workspace.handler.mjs +175 -0
  64. package/lib/provision-default-workspace.handler.mjs.map +1 -0
  65. package/lib/rest-api-lambda.handler.js +114 -59
  66. package/lib/rest-api-lambda.handler.js.map +1 -1
  67. package/lib/rest-api-lambda.handler.mjs +60 -587
  68. package/lib/rest-api-lambda.handler.mjs.map +1 -1
  69. package/lib/seed-demo-data.handler.d.mts +107 -0
  70. package/lib/seed-demo-data.handler.d.ts +107 -0
  71. package/lib/seed-demo-data.handler.js +2037 -0
  72. package/lib/seed-demo-data.handler.js.map +1 -0
  73. package/lib/seed-demo-data.handler.mjs +23 -0
  74. package/lib/seed-demo-data.handler.mjs.map +1 -0
  75. package/lib/seed-system-data.handler.d.mts +64 -0
  76. package/lib/seed-system-data.handler.d.ts +64 -0
  77. package/lib/seed-system-data.handler.js +1631 -0
  78. package/lib/seed-system-data.handler.js.map +1 -0
  79. package/lib/seed-system-data.handler.mjs +135 -0
  80. package/lib/seed-system-data.handler.mjs.map +1 -0
  81. package/package.json +4 -2
  82. package/lib/chunk-MLTYFMSE.mjs.map +0 -1
  83. /package/lib/{chunk-3QS3WKRC.mjs.map → chunk-LZOMFHX3.mjs.map} +0 -0
@@ -0,0 +1,428 @@
1
+ import {
2
+ NotFoundError
3
+ } from "./chunk-YZZDUJHI.mjs";
4
+ import {
5
+ SHARD_COUNT,
6
+ getDynamoControlService
7
+ } from "./chunk-VYDIGFIX.mjs";
8
+
9
+ // src/data/operations/control/user/user-create-operation.ts
10
+ import { extractSummary } from "@openhi/types";
11
+ async function createUserOperation(params) {
12
+ const { context, body, tableName } = params;
13
+ const service = getDynamoControlService(tableName);
14
+ const id = body.id ?? `user-${Date.now()}`;
15
+ const parsedResource = typeof body.resource === "string" ? JSON.parse(body.resource) : body.resource ?? {};
16
+ const lastUpdated = context.date ?? (/* @__PURE__ */ new Date()).toISOString();
17
+ const vid = `1`;
18
+ const resource = { resourceType: "User", id, ...parsedResource };
19
+ const summary = JSON.stringify(extractSummary(resource));
20
+ await service.entities.user.put({
21
+ id,
22
+ resource: JSON.stringify(resource),
23
+ summary,
24
+ vid,
25
+ lastUpdated
26
+ }).go();
27
+ return {
28
+ id,
29
+ resource,
30
+ meta: { lastUpdated, versionId: vid }
31
+ };
32
+ }
33
+
34
+ // src/data/operations/control/user/user-delete-operation.ts
35
+ async function deleteUserOperation(params) {
36
+ const { id, tableName } = params;
37
+ const service = getDynamoControlService(tableName);
38
+ await service.entities.user.delete({ id, sk: "CURRENT" }).go();
39
+ }
40
+
41
+ // src/data/operations/control/user/user-get-by-id-operation.ts
42
+ async function getUserByIdOperation(params) {
43
+ const { id, tableName } = params;
44
+ const service = getDynamoControlService(tableName);
45
+ const response = await service.entities.user.get({ id, sk: "CURRENT" }).go();
46
+ const item = response.data;
47
+ if (!item) {
48
+ throw new NotFoundError(`User not found: ${id}`);
49
+ }
50
+ const parsedResource = JSON.parse(item.resource);
51
+ return {
52
+ id,
53
+ resource: { resourceType: "User", id, ...parsedResource }
54
+ };
55
+ }
56
+
57
+ // src/data/operations/data-operations-common.ts
58
+ import { extractSortKey, extractSummary as extractSummary2 } from "@openhi/types";
59
+
60
+ // src/lib/compression.ts
61
+ import { gzipSync, gunzipSync } from "zlib";
62
+ var ENVELOPE_VERSION = 1;
63
+ var COMPRESSION_ALGOS = {
64
+ NONE: "none",
65
+ GZIP: "gzip",
66
+ BROTLI: "brotli",
67
+ DEFLATE: "deflate"
68
+ };
69
+ function isEnvelope(obj) {
70
+ return typeof obj === "object" && obj !== null && "v" in obj && "algo" in obj && "payload" in obj && typeof obj.payload === "string";
71
+ }
72
+ function compressResource(jsonString, options) {
73
+ const algo = options?.algo ?? COMPRESSION_ALGOS.GZIP;
74
+ if (algo === COMPRESSION_ALGOS.NONE) {
75
+ const envelope2 = {
76
+ v: ENVELOPE_VERSION,
77
+ algo: COMPRESSION_ALGOS.NONE,
78
+ payload: jsonString
79
+ };
80
+ return JSON.stringify(envelope2);
81
+ }
82
+ const buf = Buffer.from(jsonString, "utf-8");
83
+ const payload = gzipSync(buf).toString("base64");
84
+ const envelope = {
85
+ v: ENVELOPE_VERSION,
86
+ algo: COMPRESSION_ALGOS.GZIP,
87
+ payload
88
+ };
89
+ return JSON.stringify(envelope);
90
+ }
91
+ function decompressResource(compressedOrRaw) {
92
+ try {
93
+ const parsed = JSON.parse(compressedOrRaw);
94
+ if (isEnvelope(parsed)) {
95
+ if (parsed.algo === COMPRESSION_ALGOS.GZIP) {
96
+ const buf = Buffer.from(parsed.payload, "base64");
97
+ return gunzipSync(buf).toString("utf-8");
98
+ }
99
+ if (parsed.algo === COMPRESSION_ALGOS.NONE) {
100
+ return parsed.payload;
101
+ }
102
+ return parsed.payload;
103
+ }
104
+ } catch {
105
+ }
106
+ try {
107
+ const buf = Buffer.from(compressedOrRaw, "base64");
108
+ if (buf.length >= 2 && buf[0] === 31 && buf[1] === 139) {
109
+ return gunzipSync(buf).toString("utf-8");
110
+ }
111
+ } catch {
112
+ }
113
+ return compressedOrRaw;
114
+ }
115
+
116
+ // src/data/audit-meta.ts
117
+ var OPENHI_EXT = "http://openhi.org/fhir/StructureDefinition";
118
+ function mergeAuditIntoMeta(meta, audit) {
119
+ const existing = meta ?? {};
120
+ const ext = [
121
+ ...Array.isArray(existing.extension) ? existing.extension : []
122
+ ];
123
+ const byUrl = new Map(ext.map((e) => [e.url, e]));
124
+ function set(url, value, type) {
125
+ if (value == null) return;
126
+ byUrl.set(url, { url, [type]: value });
127
+ }
128
+ set(`${OPENHI_EXT}/created-date`, audit.createdDate, "valueDateTime");
129
+ set(`${OPENHI_EXT}/created-by-id`, audit.createdById, "valueString");
130
+ set(`${OPENHI_EXT}/created-by-name`, audit.createdByName, "valueString");
131
+ set(`${OPENHI_EXT}/modified-date`, audit.modifiedDate, "valueDateTime");
132
+ set(`${OPENHI_EXT}/modified-by-id`, audit.modifiedById, "valueString");
133
+ set(`${OPENHI_EXT}/modified-by-name`, audit.modifiedByName, "valueString");
134
+ set(`${OPENHI_EXT}/deleted-date`, audit.deletedDate, "valueDateTime");
135
+ set(`${OPENHI_EXT}/deleted-by-id`, audit.deletedById, "valueString");
136
+ set(`${OPENHI_EXT}/deleted-by-name`, audit.deletedByName, "valueString");
137
+ return { ...existing, extension: Array.from(byUrl.values()) };
138
+ }
139
+
140
+ // src/data/operations/data-operations-common.ts
141
+ var DATA_ENTITY_SK = "CURRENT";
142
+ async function getDataEntityById(entity, tenantId, workspaceId, id, resourceLabel) {
143
+ const result = await entity.get({
144
+ tenantId,
145
+ workspaceId,
146
+ id,
147
+ sk: DATA_ENTITY_SK
148
+ }).go();
149
+ if (!result.data) {
150
+ throw new NotFoundError(`${resourceLabel} ${id} not found`, {
151
+ details: { id }
152
+ });
153
+ }
154
+ const parsed = JSON.parse(decompressResource(result.data.resource));
155
+ return {
156
+ id: result.data.id,
157
+ resource: { ...parsed, id: result.data.id }
158
+ };
159
+ }
160
+ async function deleteDataEntityById(entity, tenantId, workspaceId, id) {
161
+ await entity.delete({
162
+ tenantId,
163
+ workspaceId,
164
+ id,
165
+ sk: DATA_ENTITY_SK
166
+ }).go();
167
+ }
168
+ var BATCH_GET_MAX_ATTEMPTS = 3;
169
+ var BATCH_GET_BASE_BACKOFF_MS = 50;
170
+ async function batchGetWithRetry(entity, keys) {
171
+ if (keys.length === 0) return [];
172
+ const collected = [];
173
+ let pending = keys;
174
+ let attempt = 0;
175
+ while (pending.length > 0) {
176
+ if (attempt > 0) {
177
+ await new Promise(
178
+ (resolve) => setTimeout(resolve, BATCH_GET_BASE_BACKOFF_MS * 2 ** (attempt - 1))
179
+ );
180
+ }
181
+ attempt++;
182
+ const result = await entity.get(pending).go();
183
+ collected.push(...result.data);
184
+ const unprocessed = result.unprocessed ?? [];
185
+ if (unprocessed.length === 0) break;
186
+ if (attempt >= BATCH_GET_MAX_ATTEMPTS) {
187
+ throw new Error(
188
+ `BatchGet exhausted retries: ${unprocessed.length} key(s) still unprocessed after ${BATCH_GET_MAX_ATTEMPTS} attempt(s)`
189
+ );
190
+ }
191
+ pending = unprocessed;
192
+ }
193
+ return collected;
194
+ }
195
+ async function dispatchListMode(mode, shardResults, hooks) {
196
+ if (mode === "count") {
197
+ let total = 0;
198
+ for (const shardResult of shardResults) {
199
+ total += (shardResult.data ?? []).length;
200
+ }
201
+ return { entries: [], total };
202
+ }
203
+ if (mode === "summary") {
204
+ const entries2 = [];
205
+ for (const shardResult of shardResults) {
206
+ for (const item of shardResult.data ?? []) {
207
+ if (typeof item.summary !== "string") continue;
208
+ let parsed;
209
+ try {
210
+ parsed = JSON.parse(item.summary);
211
+ } catch {
212
+ continue;
213
+ }
214
+ entries2.push(hooks.buildSummaryEntry(item.id, parsed));
215
+ }
216
+ }
217
+ return { entries: entries2, total: entries2.length };
218
+ }
219
+ const orderedIds = [];
220
+ for (const shardResult of shardResults) {
221
+ for (const item of shardResult.data ?? []) {
222
+ orderedIds.push(item.id);
223
+ }
224
+ }
225
+ if (orderedIds.length === 0) return { entries: [], total: 0 };
226
+ const items = await hooks.hydrate(orderedIds);
227
+ const byId = new Map(items.map((item) => [hooks.getId(item), item]));
228
+ const entries = [];
229
+ for (const id of orderedIds) {
230
+ const item = byId.get(id);
231
+ if (!item) continue;
232
+ entries.push(hooks.buildEntry(id, item));
233
+ }
234
+ return { entries, total: entries.length };
235
+ }
236
+ async function listDataEntitiesByWorkspace(entity, tenantId, workspaceId, mode = "full") {
237
+ const shardResults = await Promise.all(
238
+ Array.from(
239
+ { length: SHARD_COUNT },
240
+ (_, shard) => entity.query.gsi1({ tenantId, workspaceId, gsi1Shard: String(shard) }).go()
241
+ )
242
+ );
243
+ return dispatchListMode(
244
+ mode,
245
+ shardResults,
246
+ {
247
+ hydrate: (orderedIds) => batchGetWithRetry(
248
+ entity,
249
+ orderedIds.map((id) => ({
250
+ tenantId,
251
+ workspaceId,
252
+ id,
253
+ sk: DATA_ENTITY_SK
254
+ }))
255
+ ),
256
+ getId: (item) => item.id,
257
+ buildEntry: (id, item) => {
258
+ const parsed = JSON.parse(decompressResource(item.resource));
259
+ return { id, resource: { ...parsed, id } };
260
+ },
261
+ buildSummaryEntry: (id, parsed) => ({
262
+ id,
263
+ resource: { ...parsed, id }
264
+ })
265
+ }
266
+ );
267
+ }
268
+ async function createDataEntityRecord(entity, tenantId, workspaceId, id, resourceWithAudit, fallbackDate) {
269
+ const lastUpdated = resourceWithAudit.meta?.lastUpdated ?? fallbackDate ?? (/* @__PURE__ */ new Date()).toISOString();
270
+ const vid = lastUpdated.replace(/[-:T.Z]/g, "").slice(0, 12) || Date.now().toString(36);
271
+ const resourceLike = resourceWithAudit;
272
+ const summary = JSON.stringify(extractSummary2(resourceLike));
273
+ const gsi1sk = extractSortKey(resourceLike);
274
+ await entity.put({
275
+ sk: DATA_ENTITY_SK,
276
+ tenantId,
277
+ workspaceId,
278
+ id,
279
+ resource: compressResource(JSON.stringify(resourceWithAudit)),
280
+ summary,
281
+ vid,
282
+ lastUpdated,
283
+ gsi1sk
284
+ }).go();
285
+ return {
286
+ id,
287
+ resource: resourceWithAudit
288
+ };
289
+ }
290
+ function buildUpdatedResourceWithAudit(body, id, date, actorId, actorName, existingResourceStr, resourceType) {
291
+ const existingMeta = JSON.parse(existingResourceStr).meta;
292
+ const bodyWithMeta = body;
293
+ const resourceWithVersion = {
294
+ ...body,
295
+ resourceType,
296
+ id,
297
+ meta: {
298
+ ...bodyWithMeta.meta ?? {},
299
+ lastUpdated: date,
300
+ versionId: "2"
301
+ }
302
+ };
303
+ const resourceWithAudit = {
304
+ ...resourceWithVersion,
305
+ meta: mergeAuditIntoMeta(resourceWithVersion.meta ?? existingMeta, {
306
+ modifiedDate: date,
307
+ modifiedById: actorId,
308
+ modifiedByName: actorName
309
+ })
310
+ };
311
+ return {
312
+ resource: resourceWithAudit,
313
+ lastUpdated: date
314
+ };
315
+ }
316
+ async function updateDataEntityById(entity, tenantId, workspaceId, id, resourceLabel, context, buildPatched) {
317
+ const existing = await entity.get({
318
+ tenantId,
319
+ workspaceId,
320
+ id,
321
+ sk: DATA_ENTITY_SK
322
+ }).go();
323
+ if (!existing.data) {
324
+ throw new NotFoundError(`${resourceLabel} ${id} not found`, {
325
+ details: { id }
326
+ });
327
+ }
328
+ const existingStr = decompressResource(existing.data.resource);
329
+ const { resource, lastUpdated } = buildPatched(existingStr);
330
+ const resourceLike = resource;
331
+ const summary = JSON.stringify(extractSummary2(resourceLike));
332
+ const gsi1sk = extractSortKey(resourceLike);
333
+ await entity.patch({
334
+ tenantId,
335
+ workspaceId,
336
+ id,
337
+ sk: DATA_ENTITY_SK
338
+ }).set({
339
+ resource: compressResource(JSON.stringify(resource)),
340
+ summary,
341
+ lastUpdated,
342
+ gsi1sk
343
+ }).go();
344
+ return {
345
+ id,
346
+ resource
347
+ };
348
+ }
349
+
350
+ // src/data/operations/control/user/user-list-operation.ts
351
+ var SK = "CURRENT";
352
+ async function listUsersOperation(params) {
353
+ const { tableName, mode = "full" } = params;
354
+ const service = getDynamoControlService(tableName);
355
+ const shardResults = await Promise.all(
356
+ Array.from(
357
+ { length: SHARD_COUNT },
358
+ (_, shard) => service.entities.user.query.gsi1({ gsi1Shard: String(shard) }).go()
359
+ )
360
+ );
361
+ return dispatchListMode(mode, shardResults, {
362
+ hydrate: (orderedIds) => batchGetWithRetry(
363
+ service.entities.user,
364
+ orderedIds.map((id) => ({ id, sk: SK }))
365
+ ),
366
+ getId: (item) => item.id,
367
+ buildEntry: (id, item) => ({
368
+ id,
369
+ resource: {
370
+ resourceType: "User",
371
+ id,
372
+ ...JSON.parse(item.resource)
373
+ }
374
+ }),
375
+ buildSummaryEntry: (id, parsed) => ({
376
+ id,
377
+ resource: { resourceType: "User", id, ...parsed }
378
+ })
379
+ });
380
+ }
381
+
382
+ // src/data/operations/control/user/user-update-operation.ts
383
+ import { extractSummary as extractSummary3 } from "@openhi/types";
384
+ async function updateUserOperation(params) {
385
+ const { context, id, body, tableName } = params;
386
+ const service = getDynamoControlService(tableName);
387
+ const existing = await service.entities.user.get({ id, sk: "CURRENT" }).go();
388
+ if (!existing.data) {
389
+ throw new NotFoundError(`User not found: ${id}`);
390
+ }
391
+ const parsedResource = typeof body.resource === "string" ? JSON.parse(body.resource) : body.resource ?? {};
392
+ const lastUpdated = context.date ?? (/* @__PURE__ */ new Date()).toISOString();
393
+ const vid = `${Date.now()}`;
394
+ const resource = { resourceType: "User", id, ...parsedResource };
395
+ const summary = JSON.stringify(extractSummary3(resource));
396
+ await service.entities.user.put({
397
+ id,
398
+ resource: JSON.stringify(resource),
399
+ summary,
400
+ vid,
401
+ lastUpdated
402
+ }).go();
403
+ return {
404
+ id,
405
+ resource,
406
+ meta: { lastUpdated, versionId: vid }
407
+ };
408
+ }
409
+
410
+ export {
411
+ compressResource,
412
+ decompressResource,
413
+ mergeAuditIntoMeta,
414
+ getDataEntityById,
415
+ deleteDataEntityById,
416
+ batchGetWithRetry,
417
+ dispatchListMode,
418
+ listDataEntitiesByWorkspace,
419
+ createDataEntityRecord,
420
+ buildUpdatedResourceWithAudit,
421
+ updateDataEntityById,
422
+ createUserOperation,
423
+ deleteUserOperation,
424
+ getUserByIdOperation,
425
+ listUsersOperation,
426
+ updateUserOperation
427
+ };
428
+ //# sourceMappingURL=chunk-JUNL76HF.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/data/operations/control/user/user-create-operation.ts","../src/data/operations/control/user/user-delete-operation.ts","../src/data/operations/control/user/user-get-by-id-operation.ts","../src/data/operations/data-operations-common.ts","../src/lib/compression.ts","../src/data/audit-meta.ts","../src/data/operations/control/user/user-list-operation.ts","../src/data/operations/control/user/user-update-operation.ts"],"sourcesContent":["import { extractSummary, type FhirResourceLike } from \"@openhi/types\";\nimport { getDynamoControlService } from \"../../../dynamo/dynamo-control-service\";\nimport { OpenHiContext } from \"../../../openhi-context\";\n\nexport interface UserCreateParams {\n context: OpenHiContext;\n body: { id?: string; resource?: Record<string, unknown> | string };\n tableName?: string;\n}\n\nexport interface UserCreateResult {\n id: string;\n resource: { resourceType: string; id: string; [key: string]: unknown };\n meta: { lastUpdated: string; versionId: string };\n}\n\nexport async function createUserOperation(\n params: UserCreateParams,\n): Promise<UserCreateResult> {\n const { context, body, tableName } = params;\n const service = getDynamoControlService(tableName);\n\n const id = body.id ?? `user-${Date.now()}`;\n const parsedResource =\n typeof body.resource === \"string\"\n ? (JSON.parse(body.resource) as Record<string, unknown>)\n : (body.resource ?? {});\n\n const lastUpdated = context.date ?? new Date().toISOString();\n const vid = `1`;\n\n const resource = { resourceType: \"User\", id, ...parsedResource };\n const summary = JSON.stringify(extractSummary(resource as FhirResourceLike));\n\n await service.entities.user\n .put({\n id,\n resource: JSON.stringify(resource),\n summary,\n vid,\n lastUpdated,\n })\n .go();\n\n return {\n id,\n resource,\n meta: { lastUpdated, versionId: vid },\n };\n}\n","import { getDynamoControlService } from \"../../../dynamo/dynamo-control-service\";\nimport { OpenHiContext } from \"../../../openhi-context\";\n\nexport interface UserDeleteParams {\n context: OpenHiContext;\n id: string;\n tableName?: string;\n}\n\nexport async function deleteUserOperation(\n params: UserDeleteParams,\n): Promise<void> {\n const { id, tableName } = params;\n const service = getDynamoControlService(tableName);\n\n await service.entities.user.delete({ id, sk: \"CURRENT\" }).go();\n}\n","import { getDynamoControlService } from \"../../../dynamo/dynamo-control-service\";\nimport { NotFoundError } from \"../../../errors\";\nimport { OpenHiContext } from \"../../../openhi-context\";\n\nexport interface UserGetByIdParams {\n context: OpenHiContext;\n id: string;\n tableName?: string;\n}\n\nexport interface UserGetByIdResult {\n id: string;\n resource: { resourceType: string; id: string; [key: string]: unknown };\n}\n\nexport async function getUserByIdOperation(\n params: UserGetByIdParams,\n): Promise<UserGetByIdResult> {\n const { id, tableName } = params;\n const service = getDynamoControlService(tableName);\n\n const response = await service.entities.user.get({ id, sk: \"CURRENT\" }).go();\n\n const item = response.data;\n if (!item) {\n throw new NotFoundError(`User not found: ${id}`);\n }\n\n const parsedResource = JSON.parse(item.resource) as Record<string, unknown>;\n\n return {\n id,\n resource: { resourceType: \"User\", id, ...parsedResource },\n };\n}\n","import { extractSortKey, extractSummary } from \"@openhi/types\";\nimport type { Meta, FhirResourceLike } from \"@openhi/types\";\nimport { compressResource, decompressResource } from \"../../lib/compression\";\nimport { mergeAuditIntoMeta, type MetaWithExtensions } from \"../audit-meta\";\nimport { SHARD_COUNT } from \"../dynamo/shard\";\nimport { NotFoundError } from \"../errors\";\nimport type { OpenHiContext } from \"../openhi-context\";\n\n/**\n * Sort key for the current record version. Matches Dynamo record index SK default.\n * Use this in get/update/delete (and create where applicable) for data-plane entities.\n */\nexport const DATA_ENTITY_SK = \"CURRENT\" as const;\n\n/** Base params for data-entity operations: context and optional table override. */\nexport interface BaseDataEntityParams {\n context: OpenHiContext;\n /** Optional table name override; resolved by data service from DYNAMO_TABLE_NAME when omitted. */\n tableName?: string;\n}\n\n/** Params for get-by-id and delete (context + id + optional tableName). */\nexport interface GetByIdParams extends BaseDataEntityParams {\n id: string;\n}\n\n/**\n * Mode for list operations introduced by #853 to back FHIR `_summary` opt-ins.\n *\n * - `full` (default): GSI1 fan-out → BatchGet hydration → full FHIR resource per entry.\n * - `summary`: GSI1 fan-out only; the `summary` JSON projected onto GSI1 is parsed and used\n * as `resource` per entry. Skips BatchGet entirely — that's the cost win the FHIR spec\n * intends `_summary=true` to deliver.\n * - `count`: GSI1 fan-out only; entries are dropped, only `total` is populated. Routes\n * should pass `total` into `buildSearchsetBundle({ mode: \"count\", total })`.\n *\n * `_elements` is implemented at the route layer as `full` mode + post-hydration pruning,\n * since per-element pruning has to happen after decompression and is FHIR-spec-defined\n * (always retain root-level mandatories — see `prune-resource-by-elements.ts`).\n */\nexport type ListOperationMode = \"full\" | \"summary\" | \"count\";\n\n/** Params for list (context + optional tableName + optional mode for #853 `_summary`). */\nexport interface ListParams extends BaseDataEntityParams {\n /** Defaults to `\"full\"` (current behavior); routes pass other modes for `_summary` opt-ins. */\n mode?: ListOperationMode;\n}\n\n/** Result for create / get-by-id / update: single resource. */\nexport interface SingleResourceResult<T> {\n id: string;\n resource: T;\n}\n\n/** Entry shape for list results. */\nexport interface ListEntry<T> {\n id: string;\n resource: T;\n}\n\n/**\n * Result for list: entries array plus total count.\n *\n * - For `mode === \"full\"` and `mode === \"summary\"`, `total === entries.length`.\n * - For `mode === \"count\"`, `entries` is empty and `total` is the GSI1 fan-out count.\n *\n * Splitting `total` from `entries.length` lets count-mode routes report a true count\n * while skipping any per-entry work.\n */\nexport interface ListResult<T> {\n entries: Array<ListEntry<T>>;\n total: number;\n}\n\n/** Minimal entity shape for get (used by getDataEntityById). */\ninterface EntityWithGet {\n get(params: {\n tenantId: string;\n workspaceId: string;\n id: string;\n sk: string;\n }): { go(): Promise<{ data: { id: string; resource: string } | null }> };\n}\n\n/** Minimal entity shape for delete (used by deleteDataEntityById). */\ninterface EntityWithDelete {\n delete(params: {\n tenantId: string;\n workspaceId: string;\n id: string;\n sk: string;\n }): { go(): Promise<unknown> };\n}\n\n/**\n * Minimal entity shape for list via GSI1 + BatchGet hydration (used by listDataEntitiesByWorkspace).\n * GSI1 is sharded per ADR-011, so listing fans out to each shard and concatenates ids; the\n * `resource` attribute is NOT projected onto GSI1 (per the summary projection in\n * `dynamo-db-data-store.ts`), so the second phase BatchGets the base table for full items.\n *\n * GSI1 INCLUDE projection (per `dynamo-db-data-store.ts`) carries `summary`, `vid`, `lastUpdated`,\n * `createdDate`, `modifiedDate`, `createdById`, `modifiedById` alongside the key attributes.\n * `summary` is what `mode: \"summary\"` returns to the caller without hitting the base table.\n */\ninterface DataEntityWithListAndBatchGet {\n query: {\n gsi1(params: {\n tenantId: string;\n workspaceId: string;\n gsi1Shard: string;\n }): {\n go(): Promise<{\n data: Array<{\n id: string;\n summary?: string;\n vid?: string;\n lastUpdated?: string;\n createdDate?: string;\n modifiedDate?: string;\n createdById?: string;\n modifiedById?: string;\n }> | null;\n }>;\n };\n };\n get(\n keys: Array<{\n tenantId: string;\n workspaceId: string;\n id: string;\n sk: string;\n }>,\n ): {\n go(): Promise<{\n data: Array<{ id: string; resource: string }>;\n unprocessed: Array<{\n tenantId: string;\n workspaceId: string;\n id: string;\n sk: string;\n }>;\n }>;\n };\n}\n\n/** Minimal entity shape for put (used by createDataEntityRecord). */\ninterface EntityWithPut {\n put(attrs: {\n sk: string;\n tenantId: string;\n workspaceId: string;\n id: string;\n resource: string;\n summary: string;\n vid: string;\n lastUpdated: string;\n gsi1sk: string;\n }): { go(): Promise<unknown> };\n}\n\n/** Minimal entity shape for patch (used by updateDataEntityById). */\ninterface EntityWithPatch {\n get(params: {\n tenantId: string;\n workspaceId: string;\n id: string;\n sk: string;\n }): { go(): Promise<{ data: { id: string; resource: string } | null }> };\n patch(params: {\n tenantId: string;\n workspaceId: string;\n id: string;\n sk: string;\n }): {\n set(attrs: {\n resource: string;\n summary: string;\n lastUpdated: string;\n gsi1sk: string;\n }): {\n go(): Promise<unknown>;\n };\n };\n}\n\n/**\n * Get a single data-entity record by id. Decompresses and parses resource; throws NotFoundError if missing.\n * Use from get-by-id operations with the appropriate entity and resource type.\n */\nexport async function getDataEntityById<T>(\n entity: EntityWithGet,\n tenantId: string,\n workspaceId: string,\n id: string,\n resourceLabel: string,\n): Promise<SingleResourceResult<T>> {\n const result = await entity\n .get({\n tenantId,\n workspaceId,\n id,\n sk: DATA_ENTITY_SK,\n })\n .go();\n\n if (!result.data) {\n throw new NotFoundError(`${resourceLabel} ${id} not found`, {\n details: { id },\n });\n }\n\n const parsed = JSON.parse(decompressResource(result.data.resource)) as T & {\n id?: string;\n };\n return {\n id: result.data.id,\n resource: { ...parsed, id: result.data.id } as T,\n };\n}\n\n/**\n * Delete a data-entity record by id. Idempotent (no-op if not found).\n * Use from delete operations with the appropriate entity.\n */\nexport async function deleteDataEntityById(\n entity: EntityWithDelete,\n tenantId: string,\n workspaceId: string,\n id: string,\n): Promise<void> {\n await entity\n .delete({\n tenantId,\n workspaceId,\n id,\n sk: DATA_ENTITY_SK,\n })\n .go();\n}\n\n/** Maximum total attempts (initial + retries) when hydrating list ids via BatchGet. */\nconst BATCH_GET_MAX_ATTEMPTS = 3;\n/** Base backoff in milliseconds applied to BatchGet retries; doubles each attempt. */\nconst BATCH_GET_BASE_BACKOFF_MS = 50;\n\n/** Minimal entity shape for BatchGet hydration on the base table; chunks of 100 are handled by ElectroDB. */\ninterface EntityWithBatchGet<TKey, TItem> {\n get(keys: TKey[]): {\n go(): Promise<{ data: TItem[]; unprocessed: TKey[] }>;\n };\n}\n\n/**\n * BatchGet wrapper that retries `UnprocessedKeys` with exponential backoff. ElectroDB chunks the\n * input keys into groups of 100 internally, but does not retry unprocessed keys — DynamoDB can\n * return some keys unprocessed under throttling or partial failure, and the caller must reissue\n * them. Throws after `BATCH_GET_MAX_ATTEMPTS` if any keys remain unprocessed; intended for list\n * hydration (#854) where partial results would silently truncate the response.\n */\nexport async function batchGetWithRetry<TKey, TItem>(\n entity: EntityWithBatchGet<TKey, TItem>,\n keys: TKey[],\n): Promise<TItem[]> {\n if (keys.length === 0) return [];\n\n const collected: TItem[] = [];\n let pending = keys;\n let attempt = 0;\n\n while (pending.length > 0) {\n if (attempt > 0) {\n await new Promise((resolve) =>\n setTimeout(resolve, BATCH_GET_BASE_BACKOFF_MS * 2 ** (attempt - 1)),\n );\n }\n attempt++;\n const result = await entity.get(pending).go();\n collected.push(...result.data);\n const unprocessed = result.unprocessed ?? [];\n if (unprocessed.length === 0) break;\n if (attempt >= BATCH_GET_MAX_ATTEMPTS) {\n throw new Error(\n `BatchGet exhausted retries: ${unprocessed.length} key(s) still unprocessed after ${BATCH_GET_MAX_ATTEMPTS} attempt(s)`,\n );\n }\n pending = unprocessed;\n }\n\n return collected;\n}\n\n/** GSI1 index item shape — what a sharded `gsi1.query().go()` returns per row. */\nexport interface ShardedListIndexItem {\n id: string;\n summary?: string;\n}\n\n/** Hooks that adapt a generic mode-dispatching list to a specific entity's hydration and entry shape. */\nexport interface DispatchListModeHooks<TItem, TEntry> {\n /** Hydrate the base table for the given ids (typically `batchGetWithRetry(entity, keys)`). */\n hydrate: (orderedIds: string[]) => Promise<TItem[]>;\n /** Extract the canonical id from a hydrated item so it can be matched back to the GSI1 order. */\n getId: (item: TItem) => string;\n /** Build the result entry for `mode === \"full\"` from a hydrated base-table item. */\n buildEntry: (id: string, item: TItem) => TEntry;\n /** Build the result entry for `mode === \"summary\"` from the parsed GSI1 `summary` JSON. */\n buildSummaryEntry: (\n id: string,\n parsedSummary: Record<string, unknown>,\n ) => TEntry;\n}\n\n/**\n * Mode dispatcher shared by data-plane and control-plane list operations (#853).\n *\n * Given pre-fetched `shardResults` from a sharded GSI1 fan-out, returns either:\n * - `mode === \"count\"` — `{ entries: [], total }` where total is the sum of shard row counts.\n * - `mode === \"summary\"` — entries built from each shard row's parsed `summary` JSON; rows with\n * missing or unparseable `summary` are dropped.\n * - `mode === \"full\"` — calls `hydrate(orderedIds)` (typically a BatchGet) and builds entries\n * from hydrated items in per-shard GSI1 sort order; missing items are dropped.\n *\n * Lives here (alongside `listDataEntitiesByWorkspace`) because the same dispatch logic is needed\n * by every list operation that backs a FHIR list/search endpoint, including the seven\n * control-plane peers (User, Role, Tenant, Workspace, Membership, RoleAssignment, Configuration).\n */\nexport async function dispatchListMode<TItem, TEntry>(\n mode: ListOperationMode,\n shardResults: Array<{ data: Array<ShardedListIndexItem> | null }>,\n hooks: DispatchListModeHooks<TItem, TEntry>,\n): Promise<{ entries: TEntry[]; total: number }> {\n if (mode === \"count\") {\n let total = 0;\n for (const shardResult of shardResults) {\n total += (shardResult.data ?? []).length;\n }\n return { entries: [], total };\n }\n\n if (mode === \"summary\") {\n const entries: TEntry[] = [];\n for (const shardResult of shardResults) {\n for (const item of shardResult.data ?? []) {\n if (typeof item.summary !== \"string\") continue;\n let parsed: Record<string, unknown>;\n try {\n parsed = JSON.parse(item.summary) as Record<string, unknown>;\n } catch {\n continue;\n }\n entries.push(hooks.buildSummaryEntry(item.id, parsed));\n }\n }\n return { entries, total: entries.length };\n }\n\n const orderedIds: string[] = [];\n for (const shardResult of shardResults) {\n for (const item of shardResult.data ?? []) {\n orderedIds.push(item.id);\n }\n }\n\n if (orderedIds.length === 0) return { entries: [], total: 0 };\n\n const items = await hooks.hydrate(orderedIds);\n const byId = new Map(items.map((item) => [hooks.getId(item), item]));\n\n const entries: TEntry[] = [];\n for (const id of orderedIds) {\n const item = byId.get(id);\n if (!item) continue;\n entries.push(hooks.buildEntry(id, item));\n }\n\n return { entries, total: entries.length };\n}\n\n/**\n * List data-entity records in a workspace via GSI1.\n *\n * `mode` (default `\"full\"`) selects the read shape — see `dispatchListMode`. The data-plane\n * binding here adds the four-shard fan-out (per ADR-011) and the BatchGet hydration with\n * decompression for `mode === \"full\"`. K-way merge by `gsi1sk` is intentionally NOT done here\n * — full server-side natural sort lands with the FHIR list-endpoint plumbing that adds\n * pagination tokens.\n */\nexport async function listDataEntitiesByWorkspace<T>(\n entity: DataEntityWithListAndBatchGet,\n tenantId: string,\n workspaceId: string,\n mode: ListOperationMode = \"full\",\n): Promise<ListResult<T>> {\n const shardResults = await Promise.all(\n Array.from({ length: SHARD_COUNT }, (_, shard) =>\n entity.query\n .gsi1({ tenantId, workspaceId, gsi1Shard: String(shard) })\n .go(),\n ),\n );\n\n return dispatchListMode<{ id: string; resource: string }, ListEntry<T>>(\n mode,\n shardResults,\n {\n hydrate: (orderedIds) =>\n batchGetWithRetry(\n entity,\n orderedIds.map((id) => ({\n tenantId,\n workspaceId,\n id,\n sk: DATA_ENTITY_SK,\n })),\n ),\n getId: (item) => item.id,\n buildEntry: (id, item) => {\n const parsed = JSON.parse(decompressResource(item.resource)) as T & {\n id?: string;\n };\n return { id, resource: { ...parsed, id } as T };\n },\n buildSummaryEntry: (id, parsed) => ({\n id,\n resource: { ...parsed, id } as T,\n }),\n },\n );\n}\n\n/**\n * Create a data-entity record with put. Computes vid from lastUpdated (from resource meta or fallback).\n * Use from create operations (e.g. Practitioner, Encounter) that build the resource with audit in meta.\n */\nexport async function createDataEntityRecord<T>(\n entity: EntityWithPut,\n tenantId: string,\n workspaceId: string,\n id: string,\n resourceWithAudit: T & { meta?: { lastUpdated?: string } },\n fallbackDate: string,\n): Promise<SingleResourceResult<T>> {\n const lastUpdated =\n resourceWithAudit.meta?.lastUpdated ??\n fallbackDate ??\n new Date().toISOString();\n const vid =\n lastUpdated.replace(/[-:T.Z]/g, \"\").slice(0, 12) || Date.now().toString(36);\n\n const resourceLike = resourceWithAudit as unknown as FhirResourceLike;\n const summary = JSON.stringify(extractSummary(resourceLike));\n const gsi1sk = extractSortKey(resourceLike);\n\n await entity\n .put({\n sk: DATA_ENTITY_SK,\n tenantId,\n workspaceId,\n id,\n resource: compressResource(JSON.stringify(resourceWithAudit)),\n summary,\n vid,\n lastUpdated,\n gsi1sk,\n })\n .go();\n\n return {\n id,\n resource: resourceWithAudit as T,\n };\n}\n\n/**\n * Build an updated resource with audit in meta for use with updateDataEntityById.\n * Parses existing resource string for existing meta, merges body with id/resourceType/meta (versionId \"2\"),\n * then merges modified audit (modifiedDate, modifiedById, modifiedByName) into meta.\n * Use from update operations (Patient, Encounter, Practitioner) to avoid duplicating this logic.\n */\nexport function buildUpdatedResourceWithAudit<T extends { meta?: Meta }>(\n body: T,\n id: string,\n date: string,\n actorId: string,\n actorName: string,\n existingResourceStr: string,\n resourceType: string,\n): {\n resource: T & { id: string; meta: MetaWithExtensions };\n lastUpdated: string;\n} {\n const existingMeta: MetaWithExtensions | undefined = (\n JSON.parse(existingResourceStr) as { meta?: MetaWithExtensions }\n ).meta;\n\n const bodyWithMeta = body as T & { id?: string; meta?: Meta };\n const resourceWithVersion: T & { id: string; meta?: Meta } = {\n ...body,\n resourceType: resourceType as T[\"resourceType\"],\n id,\n meta: {\n ...(bodyWithMeta.meta ?? {}),\n lastUpdated: date,\n versionId: \"2\",\n },\n };\n\n const resourceWithAudit: T & { id: string; meta: MetaWithExtensions } = {\n ...resourceWithVersion,\n meta: mergeAuditIntoMeta(resourceWithVersion.meta ?? existingMeta, {\n modifiedDate: date,\n modifiedById: actorId,\n modifiedByName: actorName,\n }),\n };\n\n return {\n resource: resourceWithAudit,\n lastUpdated: date,\n };\n}\n\n/**\n * Update a data-entity record by id: get existing, throw if not found, then call builder with\n * decompressed existing resource string; builder returns { resource, lastUpdated }; then patch.\n * Use from update operations with the appropriate entity and resource type.\n */\nexport async function updateDataEntityById<T>(\n entity: EntityWithPatch,\n tenantId: string,\n workspaceId: string,\n id: string,\n resourceLabel: string,\n context: OpenHiContext,\n buildPatched: (existingResourceStr: string) => {\n resource: unknown;\n lastUpdated: string;\n },\n): Promise<SingleResourceResult<T>> {\n const existing = await entity\n .get({\n tenantId,\n workspaceId,\n id,\n sk: DATA_ENTITY_SK,\n })\n .go();\n\n if (!existing.data) {\n throw new NotFoundError(`${resourceLabel} ${id} not found`, {\n details: { id },\n });\n }\n\n const existingStr = decompressResource(existing.data.resource);\n const { resource, lastUpdated } = buildPatched(existingStr);\n\n const resourceLike = resource as FhirResourceLike;\n const summary = JSON.stringify(extractSummary(resourceLike));\n const gsi1sk = extractSortKey(resourceLike);\n\n await entity\n .patch({\n tenantId,\n workspaceId,\n id,\n sk: DATA_ENTITY_SK,\n })\n .set({\n resource: compressResource(JSON.stringify(resource)),\n summary,\n lastUpdated,\n gsi1sk,\n })\n .go();\n\n return {\n id,\n resource: resource as T,\n };\n}\n","import { gzipSync, gunzipSync } from \"node:zlib\";\n\n/**\n * @see sites/www-docs/content/packages/@openhi/constructs/lib/compression.md\n */\n\n/** Envelope format version. See ADR 2026-02-15-02 (data layer compression). */\nconst ENVELOPE_VERSION = 1;\n\n/**\n * Compression algorithm identifiers supported by the envelope (string values).\n * Only algos that Node.js supports out of the box (zlib): gzip, brotli, deflate.\n * \"none\" = uncompressed payload. zstd was considered in the ADR but requires native addon/WASM.\n */\nexport const COMPRESSION_ALGOS = {\n NONE: \"none\",\n GZIP: \"gzip\",\n BROTLI: \"brotli\",\n DEFLATE: \"deflate\",\n} as const;\n\n/** Algorithm value for envelope `algo`; only gzip and none are implemented today. */\nexport type CompressionAlgo =\n (typeof COMPRESSION_ALGOS)[keyof typeof COMPRESSION_ALGOS];\n\n/** Stored value is a JSON string of this envelope. */\ninterface CompressionEnvelope {\n v: number;\n algo: string;\n payload: string;\n}\n\nfunction isEnvelope(obj: unknown): obj is CompressionEnvelope {\n return (\n typeof obj === \"object\" &&\n obj !== null &&\n \"v\" in obj &&\n \"algo\" in obj &&\n \"payload\" in obj &&\n typeof (obj as CompressionEnvelope).payload === \"string\"\n );\n}\n\n/**\n * Compresses a JSON string (e.g. serialized FHIR resource) for storage in DynamoDB.\n * Uses a versioned envelope: { v, algo, payload } with gzip+base64 in payload.\n * Used by the data layer on write; see REST API docs (compression in data layer).\n * Optional compression: pass `{ algo: COMPRESSION_ALGOS.NONE }` to store in envelope without compressing.\n */\nexport function compressResource(\n jsonString: string,\n options?: { algo?: CompressionAlgo },\n): string {\n const algo = options?.algo ?? COMPRESSION_ALGOS.GZIP;\n if (algo === COMPRESSION_ALGOS.NONE) {\n const envelope: CompressionEnvelope = {\n v: ENVELOPE_VERSION,\n algo: COMPRESSION_ALGOS.NONE,\n payload: jsonString,\n };\n return JSON.stringify(envelope);\n }\n const buf = Buffer.from(jsonString, \"utf-8\");\n const payload = gzipSync(buf).toString(\"base64\");\n const envelope: CompressionEnvelope = {\n v: ENVELOPE_VERSION,\n algo: COMPRESSION_ALGOS.GZIP,\n payload,\n };\n return JSON.stringify(envelope);\n}\n\n/**\n * Decompresses a stored value: versioned envelope (v, algo, payload) or legacy gzip+base64 / raw.\n * If the value is not valid envelope JSON, falls back to legacy: try gzip magic on base64, else return as-is.\n */\nexport function decompressResource(compressedOrRaw: string): string {\n try {\n const parsed = JSON.parse(compressedOrRaw) as unknown;\n if (isEnvelope(parsed)) {\n if (parsed.algo === COMPRESSION_ALGOS.GZIP) {\n const buf = Buffer.from(parsed.payload, \"base64\");\n return gunzipSync(buf).toString(\"utf-8\");\n }\n if (parsed.algo === COMPRESSION_ALGOS.NONE) {\n return parsed.payload;\n }\n // Unknown algo: return payload as-is (safe fallback per ADR)\n return parsed.payload;\n }\n } catch {\n // Not valid envelope JSON — legacy path\n }\n\n // Legacy: pre-envelope gzip+base64 or raw\n try {\n const buf = Buffer.from(compressedOrRaw, \"base64\");\n if (buf.length >= 2 && buf[0] === 0x1f && buf[1] === 0x8b) {\n return gunzipSync(buf).toString(\"utf-8\");\n }\n } catch {\n // not base64 or gunzip failed\n }\n return compressedOrRaw;\n}\n","import type { Extension, Meta } from \"@openhi/types\";\n\n/**\n * Shared audit/meta helpers for FHIR resources. Used by data operations and import.\n * OpenHI audit is stored in resource meta.extension (per ADR 2026-01-13-06).\n *\n * @see sites/www-docs/content/packages/@openhi/constructs/data/dynamo/entity-standards.md\n */\n\n/** OpenHI extension URLs for audit in resource meta (per ADR 2026-01-13-06). */\nconst OPENHI_EXT = \"http://openhi.org/fhir/StructureDefinition\";\n\n/** Meta with optional OpenHI audit extensions (created/modified by, etc.). */\nexport type MetaWithExtensions = Meta & { extension?: Array<Extension> };\n\n/** Audit fields stored in FHIR resource meta.extension. */\nexport interface AuditFields {\n createdDate?: string;\n createdById?: string;\n createdByName?: string;\n modifiedDate?: string;\n modifiedById?: string;\n modifiedByName?: string;\n deletedDate?: string;\n deletedById?: string;\n deletedByName?: string;\n}\n\n/** Audit extension entry shape (subset of Extension used by OpenHI audit). */\ntype AuditExtensionEntry = Pick<\n Extension,\n \"url\" | \"valueString\" | \"valueDateTime\"\n>;\n\n/** Builds meta.extension entries for audit; merges with existing meta. */\nexport function mergeAuditIntoMeta(\n meta: MetaWithExtensions | Record<string, unknown> | undefined,\n audit: AuditFields,\n): MetaWithExtensions {\n const existing = (meta ?? {}) as MetaWithExtensions;\n const ext: Array<AuditExtensionEntry> = [\n ...(Array.isArray(existing.extension)\n ? (existing.extension as Array<AuditExtensionEntry>)\n : []),\n ];\n const byUrl = new Map(ext.map((e) => [e.url, e]));\n function set(\n url: string,\n value: string | undefined,\n type: \"valueString\" | \"valueDateTime\",\n ) {\n if (value == null) return;\n byUrl.set(url, { url, [type]: value });\n }\n set(`${OPENHI_EXT}/created-date`, audit.createdDate, \"valueDateTime\");\n set(`${OPENHI_EXT}/created-by-id`, audit.createdById, \"valueString\");\n set(`${OPENHI_EXT}/created-by-name`, audit.createdByName, \"valueString\");\n set(`${OPENHI_EXT}/modified-date`, audit.modifiedDate, \"valueDateTime\");\n set(`${OPENHI_EXT}/modified-by-id`, audit.modifiedById, \"valueString\");\n set(`${OPENHI_EXT}/modified-by-name`, audit.modifiedByName, \"valueString\");\n set(`${OPENHI_EXT}/deleted-date`, audit.deletedDate, \"valueDateTime\");\n set(`${OPENHI_EXT}/deleted-by-id`, audit.deletedById, \"valueString\");\n set(`${OPENHI_EXT}/deleted-by-name`, audit.deletedByName, \"valueString\");\n return { ...existing, extension: Array.from(byUrl.values()) };\n}\n","import { getDynamoControlService } from \"../../../dynamo/dynamo-control-service\";\nimport { SHARD_COUNT } from \"../../../dynamo/shard\";\nimport { OpenHiContext } from \"../../../openhi-context\";\nimport {\n batchGetWithRetry,\n dispatchListMode,\n type ListOperationMode,\n} from \"../../data-operations-common\";\n\nconst SK = \"CURRENT\";\n\nexport interface UserListParams {\n context: OpenHiContext;\n tableName?: string;\n /** #853: defaults to `\"full\"`. `\"summary\"` skips BatchGet, `\"count\"` returns total only. */\n mode?: ListOperationMode;\n}\n\nexport interface UserListResult {\n entries: Array<{\n id: string;\n resource: { resourceType: string; id: string; [key: string]: unknown };\n }>;\n total: number;\n}\n\n/**\n * Lists Users via GSI1 (sharded). `mode` (default `\"full\"`) selects between BatchGet hydration,\n * summary-only (parse `summary` JSON projected on GSI1), or count-only (skip both). See\n * `dispatchListMode` in data-operations-common for the canonical mode contract.\n */\nexport async function listUsersOperation(\n params: UserListParams,\n): Promise<UserListResult> {\n const { tableName, mode = \"full\" } = params;\n const service = getDynamoControlService(tableName);\n\n const shardResults = await Promise.all(\n Array.from({ length: SHARD_COUNT }, (_, shard) =>\n service.entities.user.query.gsi1({ gsi1Shard: String(shard) }).go(),\n ),\n );\n\n return dispatchListMode<\n { id: string; resource: string },\n UserListResult[\"entries\"][number]\n >(mode, shardResults, {\n hydrate: (orderedIds) =>\n batchGetWithRetry(\n service.entities.user,\n orderedIds.map((id) => ({ id, sk: SK })),\n ) as Promise<Array<{ id: string; resource: string }>>,\n getId: (item) => item.id,\n buildEntry: (id, item) => ({\n id,\n resource: {\n resourceType: \"User\",\n id,\n ...(JSON.parse(item.resource) as Record<string, unknown>),\n },\n }),\n buildSummaryEntry: (id, parsed) => ({\n id,\n resource: { resourceType: \"User\", id, ...parsed },\n }),\n });\n}\n","import { extractSummary, type FhirResourceLike } from \"@openhi/types\";\nimport { getDynamoControlService } from \"../../../dynamo/dynamo-control-service\";\nimport { NotFoundError } from \"../../../errors\";\nimport { OpenHiContext } from \"../../../openhi-context\";\n\nexport interface UserUpdateParams {\n context: OpenHiContext;\n id: string;\n body: { resource?: Record<string, unknown> | string };\n tableName?: string;\n}\n\nexport interface UserUpdateResult {\n id: string;\n resource: { resourceType: string; id: string; [key: string]: unknown };\n meta: { lastUpdated: string; versionId: string };\n}\n\nexport async function updateUserOperation(\n params: UserUpdateParams,\n): Promise<UserUpdateResult> {\n const { context, id, body, tableName } = params;\n const service = getDynamoControlService(tableName);\n\n const existing = await service.entities.user.get({ id, sk: \"CURRENT\" }).go();\n if (!existing.data) {\n throw new NotFoundError(`User not found: ${id}`);\n }\n\n const parsedResource =\n typeof body.resource === \"string\"\n ? (JSON.parse(body.resource) as Record<string, unknown>)\n : (body.resource ?? {});\n\n const lastUpdated = context.date ?? new Date().toISOString();\n const vid = `${Date.now()}`;\n\n const resource = { resourceType: \"User\", id, ...parsedResource };\n const summary = JSON.stringify(extractSummary(resource as FhirResourceLike));\n\n await service.entities.user\n .put({\n id,\n resource: JSON.stringify(resource),\n summary,\n vid,\n lastUpdated,\n })\n .go();\n\n return {\n id,\n resource,\n meta: { lastUpdated, versionId: vid },\n };\n}\n"],"mappings":";;;;;;;;;AAAA,SAAS,sBAA6C;AAgBtD,eAAsB,oBACpB,QAC2B;AAC3B,QAAM,EAAE,SAAS,MAAM,UAAU,IAAI;AACrC,QAAM,UAAU,wBAAwB,SAAS;AAEjD,QAAM,KAAK,KAAK,MAAM,QAAQ,KAAK,IAAI,CAAC;AACxC,QAAM,iBACJ,OAAO,KAAK,aAAa,WACpB,KAAK,MAAM,KAAK,QAAQ,IACxB,KAAK,YAAY,CAAC;AAEzB,QAAM,cAAc,QAAQ,SAAQ,oBAAI,KAAK,GAAE,YAAY;AAC3D,QAAM,MAAM;AAEZ,QAAM,WAAW,EAAE,cAAc,QAAQ,IAAI,GAAG,eAAe;AAC/D,QAAM,UAAU,KAAK,UAAU,eAAe,QAA4B,CAAC;AAE3E,QAAM,QAAQ,SAAS,KACpB,IAAI;AAAA,IACH;AAAA,IACA,UAAU,KAAK,UAAU,QAAQ;AAAA,IACjC;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,GAAG;AAEN,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,MAAM,EAAE,aAAa,WAAW,IAAI;AAAA,EACtC;AACF;;;ACxCA,eAAsB,oBACpB,QACe;AACf,QAAM,EAAE,IAAI,UAAU,IAAI;AAC1B,QAAM,UAAU,wBAAwB,SAAS;AAEjD,QAAM,QAAQ,SAAS,KAAK,OAAO,EAAE,IAAI,IAAI,UAAU,CAAC,EAAE,GAAG;AAC/D;;;ACDA,eAAsB,qBACpB,QAC4B;AAC5B,QAAM,EAAE,IAAI,UAAU,IAAI;AAC1B,QAAM,UAAU,wBAAwB,SAAS;AAEjD,QAAM,WAAW,MAAM,QAAQ,SAAS,KAAK,IAAI,EAAE,IAAI,IAAI,UAAU,CAAC,EAAE,GAAG;AAE3E,QAAM,OAAO,SAAS;AACtB,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,cAAc,mBAAmB,EAAE,EAAE;AAAA,EACjD;AAEA,QAAM,iBAAiB,KAAK,MAAM,KAAK,QAAQ;AAE/C,SAAO;AAAA,IACL;AAAA,IACA,UAAU,EAAE,cAAc,QAAQ,IAAI,GAAG,eAAe;AAAA,EAC1D;AACF;;;AClCA,SAAS,gBAAgB,kBAAAA,uBAAsB;;;ACA/C,SAAS,UAAU,kBAAkB;AAOrC,IAAM,mBAAmB;AAOlB,IAAM,oBAAoB;AAAA,EAC/B,MAAM;AAAA,EACN,MAAM;AAAA,EACN,QAAQ;AAAA,EACR,SAAS;AACX;AAaA,SAAS,WAAW,KAA0C;AAC5D,SACE,OAAO,QAAQ,YACf,QAAQ,QACR,OAAO,OACP,UAAU,OACV,aAAa,OACb,OAAQ,IAA4B,YAAY;AAEpD;AAQO,SAAS,iBACd,YACA,SACQ;AACR,QAAM,OAAO,SAAS,QAAQ,kBAAkB;AAChD,MAAI,SAAS,kBAAkB,MAAM;AACnC,UAAMC,YAAgC;AAAA,MACpC,GAAG;AAAA,MACH,MAAM,kBAAkB;AAAA,MACxB,SAAS;AAAA,IACX;AACA,WAAO,KAAK,UAAUA,SAAQ;AAAA,EAChC;AACA,QAAM,MAAM,OAAO,KAAK,YAAY,OAAO;AAC3C,QAAM,UAAU,SAAS,GAAG,EAAE,SAAS,QAAQ;AAC/C,QAAM,WAAgC;AAAA,IACpC,GAAG;AAAA,IACH,MAAM,kBAAkB;AAAA,IACxB;AAAA,EACF;AACA,SAAO,KAAK,UAAU,QAAQ;AAChC;AAMO,SAAS,mBAAmB,iBAAiC;AAClE,MAAI;AACF,UAAM,SAAS,KAAK,MAAM,eAAe;AACzC,QAAI,WAAW,MAAM,GAAG;AACtB,UAAI,OAAO,SAAS,kBAAkB,MAAM;AAC1C,cAAM,MAAM,OAAO,KAAK,OAAO,SAAS,QAAQ;AAChD,eAAO,WAAW,GAAG,EAAE,SAAS,OAAO;AAAA,MACzC;AACA,UAAI,OAAO,SAAS,kBAAkB,MAAM;AAC1C,eAAO,OAAO;AAAA,MAChB;AAEA,aAAO,OAAO;AAAA,IAChB;AAAA,EACF,QAAQ;AAAA,EAER;AAGA,MAAI;AACF,UAAM,MAAM,OAAO,KAAK,iBAAiB,QAAQ;AACjD,QAAI,IAAI,UAAU,KAAK,IAAI,CAAC,MAAM,MAAQ,IAAI,CAAC,MAAM,KAAM;AACzD,aAAO,WAAW,GAAG,EAAE,SAAS,OAAO;AAAA,IACzC;AAAA,EACF,QAAQ;AAAA,EAER;AACA,SAAO;AACT;;;AC9FA,IAAM,aAAa;AAyBZ,SAAS,mBACd,MACA,OACoB;AACpB,QAAM,WAAY,QAAQ,CAAC;AAC3B,QAAM,MAAkC;AAAA,IACtC,GAAI,MAAM,QAAQ,SAAS,SAAS,IAC/B,SAAS,YACV,CAAC;AAAA,EACP;AACA,QAAM,QAAQ,IAAI,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC;AAChD,WAAS,IACP,KACA,OACA,MACA;AACA,QAAI,SAAS,KAAM;AACnB,UAAM,IAAI,KAAK,EAAE,KAAK,CAAC,IAAI,GAAG,MAAM,CAAC;AAAA,EACvC;AACA,MAAI,GAAG,UAAU,iBAAiB,MAAM,aAAa,eAAe;AACpE,MAAI,GAAG,UAAU,kBAAkB,MAAM,aAAa,aAAa;AACnE,MAAI,GAAG,UAAU,oBAAoB,MAAM,eAAe,aAAa;AACvE,MAAI,GAAG,UAAU,kBAAkB,MAAM,cAAc,eAAe;AACtE,MAAI,GAAG,UAAU,mBAAmB,MAAM,cAAc,aAAa;AACrE,MAAI,GAAG,UAAU,qBAAqB,MAAM,gBAAgB,aAAa;AACzE,MAAI,GAAG,UAAU,iBAAiB,MAAM,aAAa,eAAe;AACpE,MAAI,GAAG,UAAU,kBAAkB,MAAM,aAAa,aAAa;AACnE,MAAI,GAAG,UAAU,oBAAoB,MAAM,eAAe,aAAa;AACvE,SAAO,EAAE,GAAG,UAAU,WAAW,MAAM,KAAK,MAAM,OAAO,CAAC,EAAE;AAC9D;;;AFpDO,IAAM,iBAAiB;AAiL9B,eAAsB,kBACpB,QACA,UACA,aACA,IACA,eACkC;AAClC,QAAM,SAAS,MAAM,OAClB,IAAI;AAAA,IACH;AAAA,IACA;AAAA,IACA;AAAA,IACA,IAAI;AAAA,EACN,CAAC,EACA,GAAG;AAEN,MAAI,CAAC,OAAO,MAAM;AAChB,UAAM,IAAI,cAAc,GAAG,aAAa,IAAI,EAAE,cAAc;AAAA,MAC1D,SAAS,EAAE,GAAG;AAAA,IAChB,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,KAAK,MAAM,mBAAmB,OAAO,KAAK,QAAQ,CAAC;AAGlE,SAAO;AAAA,IACL,IAAI,OAAO,KAAK;AAAA,IAChB,UAAU,EAAE,GAAG,QAAQ,IAAI,OAAO,KAAK,GAAG;AAAA,EAC5C;AACF;AAMA,eAAsB,qBACpB,QACA,UACA,aACA,IACe;AACf,QAAM,OACH,OAAO;AAAA,IACN;AAAA,IACA;AAAA,IACA;AAAA,IACA,IAAI;AAAA,EACN,CAAC,EACA,GAAG;AACR;AAGA,IAAM,yBAAyB;AAE/B,IAAM,4BAA4B;AAgBlC,eAAsB,kBACpB,QACA,MACkB;AAClB,MAAI,KAAK,WAAW,EAAG,QAAO,CAAC;AAE/B,QAAM,YAAqB,CAAC;AAC5B,MAAI,UAAU;AACd,MAAI,UAAU;AAEd,SAAO,QAAQ,SAAS,GAAG;AACzB,QAAI,UAAU,GAAG;AACf,YAAM,IAAI;AAAA,QAAQ,CAAC,YACjB,WAAW,SAAS,4BAA4B,MAAM,UAAU,EAAE;AAAA,MACpE;AAAA,IACF;AACA;AACA,UAAM,SAAS,MAAM,OAAO,IAAI,OAAO,EAAE,GAAG;AAC5C,cAAU,KAAK,GAAG,OAAO,IAAI;AAC7B,UAAM,cAAc,OAAO,eAAe,CAAC;AAC3C,QAAI,YAAY,WAAW,EAAG;AAC9B,QAAI,WAAW,wBAAwB;AACrC,YAAM,IAAI;AAAA,QACR,+BAA+B,YAAY,MAAM,mCAAmC,sBAAsB;AAAA,MAC5G;AAAA,IACF;AACA,cAAU;AAAA,EACZ;AAEA,SAAO;AACT;AAqCA,eAAsB,iBACpB,MACA,cACA,OAC+C;AAC/C,MAAI,SAAS,SAAS;AACpB,QAAI,QAAQ;AACZ,eAAW,eAAe,cAAc;AACtC,gBAAU,YAAY,QAAQ,CAAC,GAAG;AAAA,IACpC;AACA,WAAO,EAAE,SAAS,CAAC,GAAG,MAAM;AAAA,EAC9B;AAEA,MAAI,SAAS,WAAW;AACtB,UAAMC,WAAoB,CAAC;AAC3B,eAAW,eAAe,cAAc;AACtC,iBAAW,QAAQ,YAAY,QAAQ,CAAC,GAAG;AACzC,YAAI,OAAO,KAAK,YAAY,SAAU;AACtC,YAAI;AACJ,YAAI;AACF,mBAAS,KAAK,MAAM,KAAK,OAAO;AAAA,QAClC,QAAQ;AACN;AAAA,QACF;AACA,QAAAA,SAAQ,KAAK,MAAM,kBAAkB,KAAK,IAAI,MAAM,CAAC;AAAA,MACvD;AAAA,IACF;AACA,WAAO,EAAE,SAAAA,UAAS,OAAOA,SAAQ,OAAO;AAAA,EAC1C;AAEA,QAAM,aAAuB,CAAC;AAC9B,aAAW,eAAe,cAAc;AACtC,eAAW,QAAQ,YAAY,QAAQ,CAAC,GAAG;AACzC,iBAAW,KAAK,KAAK,EAAE;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,WAAW,WAAW,EAAG,QAAO,EAAE,SAAS,CAAC,GAAG,OAAO,EAAE;AAE5D,QAAM,QAAQ,MAAM,MAAM,QAAQ,UAAU;AAC5C,QAAM,OAAO,IAAI,IAAI,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,MAAM,IAAI,GAAG,IAAI,CAAC,CAAC;AAEnE,QAAM,UAAoB,CAAC;AAC3B,aAAW,MAAM,YAAY;AAC3B,UAAM,OAAO,KAAK,IAAI,EAAE;AACxB,QAAI,CAAC,KAAM;AACX,YAAQ,KAAK,MAAM,WAAW,IAAI,IAAI,CAAC;AAAA,EACzC;AAEA,SAAO,EAAE,SAAS,OAAO,QAAQ,OAAO;AAC1C;AAWA,eAAsB,4BACpB,QACA,UACA,aACA,OAA0B,QACF;AACxB,QAAM,eAAe,MAAM,QAAQ;AAAA,IACjC,MAAM;AAAA,MAAK,EAAE,QAAQ,YAAY;AAAA,MAAG,CAAC,GAAG,UACtC,OAAO,MACJ,KAAK,EAAE,UAAU,aAAa,WAAW,OAAO,KAAK,EAAE,CAAC,EACxD,GAAG;AAAA,IACR;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,MACE,SAAS,CAAC,eACR;AAAA,QACE;AAAA,QACA,WAAW,IAAI,CAAC,QAAQ;AAAA,UACtB;AAAA,UACA;AAAA,UACA;AAAA,UACA,IAAI;AAAA,QACN,EAAE;AAAA,MACJ;AAAA,MACF,OAAO,CAAC,SAAS,KAAK;AAAA,MACtB,YAAY,CAAC,IAAI,SAAS;AACxB,cAAM,SAAS,KAAK,MAAM,mBAAmB,KAAK,QAAQ,CAAC;AAG3D,eAAO,EAAE,IAAI,UAAU,EAAE,GAAG,QAAQ,GAAG,EAAO;AAAA,MAChD;AAAA,MACA,mBAAmB,CAAC,IAAI,YAAY;AAAA,QAClC;AAAA,QACA,UAAU,EAAE,GAAG,QAAQ,GAAG;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AACF;AAMA,eAAsB,uBACpB,QACA,UACA,aACA,IACA,mBACA,cACkC;AAClC,QAAM,cACJ,kBAAkB,MAAM,eACxB,iBACA,oBAAI,KAAK,GAAE,YAAY;AACzB,QAAM,MACJ,YAAY,QAAQ,YAAY,EAAE,EAAE,MAAM,GAAG,EAAE,KAAK,KAAK,IAAI,EAAE,SAAS,EAAE;AAE5E,QAAM,eAAe;AACrB,QAAM,UAAU,KAAK,UAAUC,gBAAe,YAAY,CAAC;AAC3D,QAAM,SAAS,eAAe,YAAY;AAE1C,QAAM,OACH,IAAI;AAAA,IACH,IAAI;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,iBAAiB,KAAK,UAAU,iBAAiB,CAAC;AAAA,IAC5D;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,GAAG;AAEN,SAAO;AAAA,IACL;AAAA,IACA,UAAU;AAAA,EACZ;AACF;AAQO,SAAS,8BACd,MACA,IACA,MACA,SACA,WACA,qBACA,cAIA;AACA,QAAM,eACJ,KAAK,MAAM,mBAAmB,EAC9B;AAEF,QAAM,eAAe;AACrB,QAAM,sBAAuD;AAAA,IAC3D,GAAG;AAAA,IACH;AAAA,IACA;AAAA,IACA,MAAM;AAAA,MACJ,GAAI,aAAa,QAAQ,CAAC;AAAA,MAC1B,aAAa;AAAA,MACb,WAAW;AAAA,IACb;AAAA,EACF;AAEA,QAAM,oBAAkE;AAAA,IACtE,GAAG;AAAA,IACH,MAAM,mBAAmB,oBAAoB,QAAQ,cAAc;AAAA,MACjE,cAAc;AAAA,MACd,cAAc;AAAA,MACd,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,UAAU;AAAA,IACV,aAAa;AAAA,EACf;AACF;AAOA,eAAsB,qBACpB,QACA,UACA,aACA,IACA,eACA,SACA,cAIkC;AAClC,QAAM,WAAW,MAAM,OACpB,IAAI;AAAA,IACH;AAAA,IACA;AAAA,IACA;AAAA,IACA,IAAI;AAAA,EACN,CAAC,EACA,GAAG;AAEN,MAAI,CAAC,SAAS,MAAM;AAClB,UAAM,IAAI,cAAc,GAAG,aAAa,IAAI,EAAE,cAAc;AAAA,MAC1D,SAAS,EAAE,GAAG;AAAA,IAChB,CAAC;AAAA,EACH;AAEA,QAAM,cAAc,mBAAmB,SAAS,KAAK,QAAQ;AAC7D,QAAM,EAAE,UAAU,YAAY,IAAI,aAAa,WAAW;AAE1D,QAAM,eAAe;AACrB,QAAM,UAAU,KAAK,UAAUA,gBAAe,YAAY,CAAC;AAC3D,QAAM,SAAS,eAAe,YAAY;AAE1C,QAAM,OACH,MAAM;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,IAAI;AAAA,EACN,CAAC,EACA,IAAI;AAAA,IACH,UAAU,iBAAiB,KAAK,UAAU,QAAQ,CAAC;AAAA,IACnD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,GAAG;AAEN,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;;;AG3jBA,IAAM,KAAK;AAsBX,eAAsB,mBACpB,QACyB;AACzB,QAAM,EAAE,WAAW,OAAO,OAAO,IAAI;AACrC,QAAM,UAAU,wBAAwB,SAAS;AAEjD,QAAM,eAAe,MAAM,QAAQ;AAAA,IACjC,MAAM;AAAA,MAAK,EAAE,QAAQ,YAAY;AAAA,MAAG,CAAC,GAAG,UACtC,QAAQ,SAAS,KAAK,MAAM,KAAK,EAAE,WAAW,OAAO,KAAK,EAAE,CAAC,EAAE,GAAG;AAAA,IACpE;AAAA,EACF;AAEA,SAAO,iBAGL,MAAM,cAAc;AAAA,IACpB,SAAS,CAAC,eACR;AAAA,MACE,QAAQ,SAAS;AAAA,MACjB,WAAW,IAAI,CAAC,QAAQ,EAAE,IAAI,IAAI,GAAG,EAAE;AAAA,IACzC;AAAA,IACF,OAAO,CAAC,SAAS,KAAK;AAAA,IACtB,YAAY,CAAC,IAAI,UAAU;AAAA,MACzB;AAAA,MACA,UAAU;AAAA,QACR,cAAc;AAAA,QACd;AAAA,QACA,GAAI,KAAK,MAAM,KAAK,QAAQ;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,IAAI,YAAY;AAAA,MAClC;AAAA,MACA,UAAU,EAAE,cAAc,QAAQ,IAAI,GAAG,OAAO;AAAA,IAClD;AAAA,EACF,CAAC;AACH;;;AClEA,SAAS,kBAAAC,uBAA6C;AAkBtD,eAAsB,oBACpB,QAC2B;AAC3B,QAAM,EAAE,SAAS,IAAI,MAAM,UAAU,IAAI;AACzC,QAAM,UAAU,wBAAwB,SAAS;AAEjD,QAAM,WAAW,MAAM,QAAQ,SAAS,KAAK,IAAI,EAAE,IAAI,IAAI,UAAU,CAAC,EAAE,GAAG;AAC3E,MAAI,CAAC,SAAS,MAAM;AAClB,UAAM,IAAI,cAAc,mBAAmB,EAAE,EAAE;AAAA,EACjD;AAEA,QAAM,iBACJ,OAAO,KAAK,aAAa,WACpB,KAAK,MAAM,KAAK,QAAQ,IACxB,KAAK,YAAY,CAAC;AAEzB,QAAM,cAAc,QAAQ,SAAQ,oBAAI,KAAK,GAAE,YAAY;AAC3D,QAAM,MAAM,GAAG,KAAK,IAAI,CAAC;AAEzB,QAAM,WAAW,EAAE,cAAc,QAAQ,IAAI,GAAG,eAAe;AAC/D,QAAM,UAAU,KAAK,UAAUC,gBAAe,QAA4B,CAAC;AAE3E,QAAM,QAAQ,SAAS,KACpB,IAAI;AAAA,IACH;AAAA,IACA,UAAU,KAAK,UAAU,QAAQ;AAAA,IACjC;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,GAAG;AAEN,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,MAAM,EAAE,aAAa,WAAW,IAAI;AAAA,EACtC;AACF;","names":["extractSummary","envelope","entries","extractSummary","extractSummary","extractSummary"]}
@@ -0,0 +1,27 @@
1
+ import {
2
+ NotFoundError
3
+ } from "./chunk-YZZDUJHI.mjs";
4
+ import {
5
+ getDynamoControlService
6
+ } from "./chunk-VYDIGFIX.mjs";
7
+
8
+ // src/data/operations/control/role/role-get-by-id-operation.ts
9
+ async function getRoleByIdOperation(params) {
10
+ const { id, tableName } = params;
11
+ const service = getDynamoControlService(tableName);
12
+ const response = await service.entities.role.get({ id, sk: "CURRENT" }).go();
13
+ const item = response.data;
14
+ if (!item) {
15
+ throw new NotFoundError(`Role not found: ${id}`);
16
+ }
17
+ const parsedResource = JSON.parse(item.resource);
18
+ return {
19
+ id,
20
+ resource: { resourceType: "Role", id, ...parsedResource }
21
+ };
22
+ }
23
+
24
+ export {
25
+ getRoleByIdOperation
26
+ };
27
+ //# sourceMappingURL=chunk-L6UAP4KP.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/data/operations/control/role/role-get-by-id-operation.ts"],"sourcesContent":["import { getDynamoControlService } from \"../../../dynamo/dynamo-control-service\";\nimport { NotFoundError } from \"../../../errors\";\nimport { OpenHiContext } from \"../../../openhi-context\";\n\nexport interface RoleGetByIdParams {\n context: OpenHiContext;\n id: string;\n tableName?: string;\n}\n\nexport interface RoleGetByIdResult {\n id: string;\n resource: { resourceType: string; id: string; [key: string]: unknown };\n}\n\nexport async function getRoleByIdOperation(\n params: RoleGetByIdParams,\n): Promise<RoleGetByIdResult> {\n const { id, tableName } = params;\n const service = getDynamoControlService(tableName);\n\n const response = await service.entities.role.get({ id, sk: \"CURRENT\" }).go();\n\n const item = response.data;\n if (!item) {\n throw new NotFoundError(`Role not found: ${id}`);\n }\n\n const parsedResource = JSON.parse(item.resource) as Record<string, unknown>;\n\n return {\n id,\n resource: { resourceType: \"Role\", id, ...parsedResource },\n };\n}\n"],"mappings":";;;;;;;;AAeA,eAAsB,qBACpB,QAC4B;AAC5B,QAAM,EAAE,IAAI,UAAU,IAAI;AAC1B,QAAM,UAAU,wBAAwB,SAAS;AAEjD,QAAM,WAAW,MAAM,QAAQ,SAAS,KAAK,IAAI,EAAE,IAAI,IAAI,UAAU,CAAC,EAAE,GAAG;AAE3E,QAAM,OAAO,SAAS;AACtB,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,cAAc,mBAAmB,EAAE,EAAE;AAAA,EACjD;AAEA,QAAM,iBAAiB,KAAK,MAAM,KAAK,QAAQ;AAE/C,SAAO;AAAA,IACL;AAAA,IACA,UAAU,EAAE,cAAc,QAAQ,IAAI,GAAG,eAAe;AAAA,EAC1D;AACF;","names":[]}
@@ -4,7 +4,13 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
4
  var __getOwnPropNames = Object.getOwnPropertyNames;
5
5
  var __getProtoOf = Object.getPrototypeOf;
6
6
  var __hasOwnProp = Object.prototype.hasOwnProperty;
7
- var __commonJS = (cb, mod) => function __require() {
7
+ var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
8
+ get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
9
+ }) : x)(function(x) {
10
+ if (typeof require !== "undefined") return require.apply(this, arguments);
11
+ throw Error('Dynamic require of "' + x + '" is not supported');
12
+ });
13
+ var __commonJS = (cb, mod) => function __require2() {
8
14
  return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
9
15
  };
10
16
  var __copyProps = (to, from, except, desc) => {
@@ -25,7 +31,8 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
25
31
  ));
26
32
 
27
33
  export {
34
+ __require,
28
35
  __commonJS,
29
36
  __toESM
30
37
  };
31
- //# sourceMappingURL=chunk-3QS3WKRC.mjs.map
38
+ //# sourceMappingURL=chunk-LZOMFHX3.mjs.map