@twin.org/dataspace-data-plane-service 0.0.3-next.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +73 -0
- package/dist/es/dataspaceDataPlaneRoutes.js +312 -0
- package/dist/es/dataspaceDataPlaneRoutes.js.map +1 -0
- package/dist/es/dataspaceDataPlaneService.js +1077 -0
- package/dist/es/dataspaceDataPlaneService.js.map +1 -0
- package/dist/es/dataspaceDataPlaneSocketRoutes.js +82 -0
- package/dist/es/dataspaceDataPlaneSocketRoutes.js.map +1 -0
- package/dist/es/entities/activityLogDetails.js +69 -0
- package/dist/es/entities/activityLogDetails.js.map +1 -0
- package/dist/es/entities/activityTask.js +27 -0
- package/dist/es/entities/activityTask.js.map +1 -0
- package/dist/es/index.js +13 -0
- package/dist/es/index.js.map +1 -0
- package/dist/es/models/IDataspaceDataPlaneServiceConfig.js +4 -0
- package/dist/es/models/IDataspaceDataPlaneServiceConfig.js.map +1 -0
- package/dist/es/models/IDataspaceDataPlaneServiceConstructorOptions.js +2 -0
- package/dist/es/models/IDataspaceDataPlaneServiceConstructorOptions.js.map +1 -0
- package/dist/es/restEntryPoints.js +10 -0
- package/dist/es/restEntryPoints.js.map +1 -0
- package/dist/es/schema.js +13 -0
- package/dist/es/schema.js.map +1 -0
- package/dist/es/socketEntryPoints.js +11 -0
- package/dist/es/socketEntryPoints.js.map +1 -0
- package/dist/types/dataspaceDataPlaneRoutes.d.ts +50 -0
- package/dist/types/dataspaceDataPlaneService.d.ts +90 -0
- package/dist/types/dataspaceDataPlaneSocketRoutes.d.ts +29 -0
- package/dist/types/entities/activityLogDetails.d.ts +33 -0
- package/dist/types/entities/activityTask.d.ts +14 -0
- package/dist/types/index.d.ts +10 -0
- package/dist/types/models/IDataspaceDataPlaneServiceConfig.d.ts +15 -0
- package/dist/types/models/IDataspaceDataPlaneServiceConstructorOptions.d.ts +55 -0
- package/dist/types/restEntryPoints.d.ts +2 -0
- package/dist/types/schema.d.ts +4 -0
- package/dist/types/socketEntryPoints.d.ts +2 -0
- package/docs/changelog.md +415 -0
- package/docs/examples.md +1 -0
- package/docs/open-api/spec.json +1658 -0
- package/docs/reference/classes/ActivityLogDetails.md +69 -0
- package/docs/reference/classes/ActivityTask.md +29 -0
- package/docs/reference/classes/DataspaceDataPlaneService.md +336 -0
- package/docs/reference/functions/activityLogEntryGet.md +31 -0
- package/docs/reference/functions/activityLogStatusConnected.md +17 -0
- package/docs/reference/functions/activityLogStatusDisconnected.md +23 -0
- package/docs/reference/functions/activityLogStatusUpdate.md +37 -0
- package/docs/reference/functions/activityStreamNotify.md +37 -0
- package/docs/reference/functions/generateRestRoutesDataspaceDataPlane.md +25 -0
- package/docs/reference/functions/generateSocketRoutesDataspaceDataPlane.md +25 -0
- package/docs/reference/functions/getDataAssetEntities.md +31 -0
- package/docs/reference/functions/initSchema.md +9 -0
- package/docs/reference/functions/queryDataAsset.md +31 -0
- package/docs/reference/index.md +32 -0
- package/docs/reference/interfaces/IDataspaceDataPlaneServiceConfig.md +31 -0
- package/docs/reference/interfaces/IDataspaceDataPlaneServiceConstructorOptions.md +132 -0
- package/docs/reference/variables/ACTIVITY_LOG_ROUTE.md +5 -0
- package/docs/reference/variables/restEntryPoints.md +3 -0
- package/docs/reference/variables/socketEntryPoints.md +3 -0
- package/docs/reference/variables/tagsDataspaceDataPlane.md +5 -0
- package/locales/en.json +41 -0
- package/package.json +74 -0
|
@@ -0,0 +1,1077 @@
|
|
|
1
|
+
// Copyright 2025 IOTA Stiftung.
|
|
2
|
+
// SPDX-License-Identifier: Apache-2.0.
|
|
3
|
+
import { TaskStatus } from "@twin.org/background-task-models";
|
|
4
|
+
import { ContextIdKeys, ContextIdStore } from "@twin.org/context";
|
|
5
|
+
import { ArrayHelper, ComponentFactory, ConflictError, Converter, GeneralError, GuardError, Guards, Is, NotFoundError, RandomHelper, UnprocessableError, Validation } from "@twin.org/core";
|
|
6
|
+
import { Blake2b } from "@twin.org/crypto";
|
|
7
|
+
import { JsonLdDataTypes, JsonLdHelper, JsonLdProcessor } from "@twin.org/data-json-ld";
|
|
8
|
+
import { ActivityProcessingStatus, DataRequestType, DataspaceAppFactory } from "@twin.org/dataspace-models";
|
|
9
|
+
import { EngineCoreFactory } from "@twin.org/engine-models";
|
|
10
|
+
import { ComparisonOperator, LogicalOperator } from "@twin.org/entity";
|
|
11
|
+
import { EntityStorageConnectorFactory } from "@twin.org/entity-storage-models";
|
|
12
|
+
import { DataspaceProtocolDataTypes, DataspaceProtocolTransferProcessStateType } from "@twin.org/standards-dataspace-protocol";
|
|
13
|
+
import { SchemaOrgContexts, SchemaOrgDataTypes, SchemaOrgTypes } from "@twin.org/standards-schema-org";
|
|
14
|
+
import { ActivityStreamsDataTypes } from "@twin.org/standards-w3c-activity-streams";
|
|
15
|
+
import { TrustHelper } from "@twin.org/trust-models";
|
|
16
|
+
/**
|
|
17
|
+
* Dataspace Data Plane Service.
|
|
18
|
+
*/
|
|
19
|
+
export class DataspaceDataPlaneService {
|
|
20
|
+
/**
|
|
21
|
+
* Runtime name for the class.
|
|
22
|
+
*/
|
|
23
|
+
static CLASS_NAME = "DataspaceDataPlaneService";
|
|
24
|
+
/**
|
|
25
|
+
* Milliseconds per minute (60 * 1000).
|
|
26
|
+
* @internal
|
|
27
|
+
*/
|
|
28
|
+
static _MS_PER_MINUTE = 60 * 1000;
|
|
29
|
+
/**
|
|
30
|
+
* Minutes per day (24 * 60 = 1440).
|
|
31
|
+
* @internal
|
|
32
|
+
*/
|
|
33
|
+
static _MINUTES_PER_DAY = 24 * 60;
|
|
34
|
+
/**
|
|
35
|
+
* Milliseconds per day (24 hours).
|
|
36
|
+
* @internal
|
|
37
|
+
*/
|
|
38
|
+
static _MS_PER_DAY = 24 * 60 * 60 * 1000;
|
|
39
|
+
/**
|
|
40
|
+
* The default cleanup interval in minutes. (1 hour)
|
|
41
|
+
* @internal
|
|
42
|
+
*/
|
|
43
|
+
static _DEFAULT_CLEANUP_INTERVAL = 60;
|
|
44
|
+
/**
|
|
45
|
+
* The default retain interval in minutes. (10 minutes)
|
|
46
|
+
* @internal
|
|
47
|
+
*/
|
|
48
|
+
static _DEFAULT_RETAIN_INTERVAL = 10;
|
|
49
|
+
/**
|
|
50
|
+
* Logging service type.
|
|
51
|
+
* @internal
|
|
52
|
+
*/
|
|
53
|
+
_loggingComponentType;
|
|
54
|
+
/**
|
|
55
|
+
* Logging service.
|
|
56
|
+
* @internal
|
|
57
|
+
*/
|
|
58
|
+
_logging;
|
|
59
|
+
/**
|
|
60
|
+
* Storage service for activity logging.
|
|
61
|
+
* @internal
|
|
62
|
+
*/
|
|
63
|
+
_entityStorageActivityLogs;
|
|
64
|
+
/**
|
|
65
|
+
* Storage service for activity tasks.
|
|
66
|
+
* @internal
|
|
67
|
+
*/
|
|
68
|
+
_entityStorageActivityTasks;
|
|
69
|
+
/**
|
|
70
|
+
* Background Task Component.
|
|
71
|
+
* @internal
|
|
72
|
+
*/
|
|
73
|
+
_backgroundTaskComponent;
|
|
74
|
+
/**
|
|
75
|
+
* Activity Log Status callbacks.
|
|
76
|
+
* @internal
|
|
77
|
+
*/
|
|
78
|
+
_activityLogStatusCallbacks;
|
|
79
|
+
/**
|
|
80
|
+
* Task retention. -1 retain forever.
|
|
81
|
+
* @internal
|
|
82
|
+
*/
|
|
83
|
+
_retainTasksFor;
|
|
84
|
+
/**
|
|
85
|
+
* Activity Log Entry retention. -1 retain forever.
|
|
86
|
+
* @internal
|
|
87
|
+
*/
|
|
88
|
+
_retainActivityLogsFor;
|
|
89
|
+
/**
|
|
90
|
+
* Clean up interval for activity logs.
|
|
91
|
+
* @internal
|
|
92
|
+
*/
|
|
93
|
+
_activityLogCleanUpInterval;
|
|
94
|
+
/**
|
|
95
|
+
* Whether there is an ongoing clean up process.
|
|
96
|
+
* @internal
|
|
97
|
+
*/
|
|
98
|
+
_cleanUpProcessOngoing;
|
|
99
|
+
/**
|
|
100
|
+
* The task scheduler used to clean up activity logs.
|
|
101
|
+
* @internal
|
|
102
|
+
*/
|
|
103
|
+
_taskScheduler;
|
|
104
|
+
/**
|
|
105
|
+
* The keys to use from the context ids to create partitions.
|
|
106
|
+
* @internal
|
|
107
|
+
*/
|
|
108
|
+
_partitionContextIds;
|
|
109
|
+
/**
|
|
110
|
+
* The trust component.
|
|
111
|
+
* @internal
|
|
112
|
+
*/
|
|
113
|
+
_trustComponent;
|
|
114
|
+
/**
|
|
115
|
+
* The policy enforcement point for ODRL policy enforcement.
|
|
116
|
+
* @internal
|
|
117
|
+
*/
|
|
118
|
+
_policyEnforcementPoint;
|
|
119
|
+
/**
|
|
120
|
+
* Entity storage for Transfer Process entities.
|
|
121
|
+
* Used to read transfer state from shared storage (written by Control Plane).
|
|
122
|
+
* @internal
|
|
123
|
+
*/
|
|
124
|
+
_transferProcessStorage;
|
|
125
|
+
/**
|
|
126
|
+
* The list of active tenants required for task cleanup.
|
|
127
|
+
* @internal
|
|
128
|
+
*/
|
|
129
|
+
_activeTenants;
|
|
130
|
+
/**
|
|
131
|
+
* Create a new instance of DataspaceDataPlane.
|
|
132
|
+
* @param options The options for the data plane.
|
|
133
|
+
*/
|
|
134
|
+
constructor(options) {
|
|
135
|
+
this._loggingComponentType = options?.loggingComponentType ?? "logging";
|
|
136
|
+
this._logging = ComponentFactory.getIfExists(this._loggingComponentType);
|
|
137
|
+
this._entityStorageActivityLogs = EntityStorageConnectorFactory.get(options?.activityLogEntityStorageType ?? "activity-log-details");
|
|
138
|
+
this._entityStorageActivityTasks = EntityStorageConnectorFactory.get(options?.activityTaskEntityStorageType ?? "activity-task");
|
|
139
|
+
this._backgroundTaskComponent = ComponentFactory.get(options?.backgroundTaskComponentType ?? "background-task");
|
|
140
|
+
this._taskScheduler = ComponentFactory.get(options?.taskSchedulerComponentType ?? "task-scheduler");
|
|
141
|
+
this._trustComponent = ComponentFactory.get(options?.trustComponentType ?? "trust");
|
|
142
|
+
this._policyEnforcementPoint = ComponentFactory.getIfExists(options?.pepComponentType ?? "policy-enforcement-point-service");
|
|
143
|
+
// Entity storage for Transfer Process state lookup
|
|
144
|
+
// Used to read transfer state from shared storage (written by Control Plane)
|
|
145
|
+
this._transferProcessStorage = EntityStorageConnectorFactory.get(options?.transferProcessEntityStorageType ?? "transfer-process");
|
|
146
|
+
JsonLdDataTypes.registerTypes();
|
|
147
|
+
ActivityStreamsDataTypes.registerTypes();
|
|
148
|
+
SchemaOrgDataTypes.registerRedirects();
|
|
149
|
+
DataspaceProtocolDataTypes.registerRedirects();
|
|
150
|
+
DataspaceProtocolDataTypes.registerTypes();
|
|
151
|
+
this._activityLogStatusCallbacks = {};
|
|
152
|
+
this._activeTenants = [];
|
|
153
|
+
this._partitionContextIds = options?.partitionContextIds;
|
|
154
|
+
this._retainTasksFor =
|
|
155
|
+
DataspaceDataPlaneService._DEFAULT_RETAIN_INTERVAL * DataspaceDataPlaneService._MS_PER_MINUTE;
|
|
156
|
+
this._retainActivityLogsFor =
|
|
157
|
+
DataspaceDataPlaneService._DEFAULT_RETAIN_INTERVAL * DataspaceDataPlaneService._MS_PER_MINUTE;
|
|
158
|
+
this._activityLogCleanUpInterval = DataspaceDataPlaneService._DEFAULT_CLEANUP_INTERVAL;
|
|
159
|
+
this._cleanUpProcessOngoing = false;
|
|
160
|
+
const validationErrors = [];
|
|
161
|
+
if (!Is.empty(options?.config?.retainActivityLogsFor)) {
|
|
162
|
+
Guards.integer(DataspaceDataPlaneService.CLASS_NAME, "options.config.retainActivityLogsFor", options.config.retainActivityLogsFor);
|
|
163
|
+
if (options.config.retainActivityLogsFor === -1) {
|
|
164
|
+
this._retainTasksFor = -1;
|
|
165
|
+
this._retainActivityLogsFor = -1;
|
|
166
|
+
}
|
|
167
|
+
else {
|
|
168
|
+
Validation.integer("options.config.retainActivityLogsFor", options.config.retainActivityLogsFor, validationErrors, undefined, { minValue: 1 });
|
|
169
|
+
// Retention of internal tasks launched
|
|
170
|
+
// 5 minutes of margin with respect to the Activity Log Entry to ensure proper removal
|
|
171
|
+
this._retainTasksFor =
|
|
172
|
+
(options.config.retainActivityLogsFor + 5) * DataspaceDataPlaneService._MS_PER_MINUTE;
|
|
173
|
+
this._retainActivityLogsFor =
|
|
174
|
+
options.config.retainActivityLogsFor * DataspaceDataPlaneService._MS_PER_MINUTE;
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
if (!Is.empty(options?.config?.activityLogsCleanUpInterval)) {
|
|
178
|
+
Guards.integer(DataspaceDataPlaneService.CLASS_NAME, "options.config.activityLogsCleanUpInterval", options.config.activityLogsCleanUpInterval);
|
|
179
|
+
Validation.integer("options.config.activityLogsCleanUpInterval", options.config.activityLogsCleanUpInterval, validationErrors, undefined, { minValue: 1 });
|
|
180
|
+
this._activityLogCleanUpInterval = options.config.activityLogsCleanUpInterval;
|
|
181
|
+
}
|
|
182
|
+
Validation.asValidationError(DataspaceDataPlaneService.CLASS_NAME, "options.config", validationErrors);
|
|
183
|
+
}
|
|
184
|
+
/**
|
|
185
|
+
* Returns the class name of the component.
|
|
186
|
+
* @returns The class name of the component.
|
|
187
|
+
*/
|
|
188
|
+
className() {
|
|
189
|
+
return DataspaceDataPlaneService.CLASS_NAME;
|
|
190
|
+
}
|
|
191
|
+
/**
|
|
192
|
+
* The service needs to be started when the application is initialized.
|
|
193
|
+
* @param nodeLoggingComponentType The node logging component type.
|
|
194
|
+
*/
|
|
195
|
+
async start(nodeLoggingComponentType) {
|
|
196
|
+
const engine = EngineCoreFactory.getIfExists("engine");
|
|
197
|
+
if (Is.empty(engine) || engine.isClone()) {
|
|
198
|
+
await this._logging?.log({
|
|
199
|
+
level: "debug",
|
|
200
|
+
source: DataspaceDataPlaneService.CLASS_NAME,
|
|
201
|
+
message: "engineCloneStart"
|
|
202
|
+
});
|
|
203
|
+
return;
|
|
204
|
+
}
|
|
205
|
+
// Only we have a task scheduler if there is a retention different than -1
|
|
206
|
+
if (this._retainActivityLogsFor !== -1) {
|
|
207
|
+
const taskTime = [
|
|
208
|
+
{
|
|
209
|
+
nextTriggerTime: Date.now() + 5000,
|
|
210
|
+
...this.calculateCleaningTaskSchedule(this._activityLogCleanUpInterval)
|
|
211
|
+
}
|
|
212
|
+
];
|
|
213
|
+
await this._taskScheduler.addTask("dataspace-cleanup", taskTime, async () => {
|
|
214
|
+
await this._logging?.log({
|
|
215
|
+
level: "debug",
|
|
216
|
+
source: DataspaceDataPlaneService.CLASS_NAME,
|
|
217
|
+
message: "scheduledCleanUpTask"
|
|
218
|
+
});
|
|
219
|
+
await this.cleanupActivityLog();
|
|
220
|
+
});
|
|
221
|
+
await this._logging?.log({
|
|
222
|
+
level: "debug",
|
|
223
|
+
source: DataspaceDataPlaneService.CLASS_NAME,
|
|
224
|
+
message: "taskSchedulerStarted",
|
|
225
|
+
data: {
|
|
226
|
+
taskTime
|
|
227
|
+
}
|
|
228
|
+
});
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
/**
|
|
232
|
+
* Notify an Activity.
|
|
233
|
+
* @param activity The Activity notified.
|
|
234
|
+
* @returns The Activity's Log Entry identifier.
|
|
235
|
+
*/
|
|
236
|
+
async notifyActivity(activity) {
|
|
237
|
+
Guards.object(DataspaceDataPlaneService.CLASS_NAME, "activity", activity);
|
|
238
|
+
await this.updateActiveTenants();
|
|
239
|
+
await this._logging?.log({
|
|
240
|
+
level: "debug",
|
|
241
|
+
source: DataspaceDataPlaneService.CLASS_NAME,
|
|
242
|
+
message: "newActivity",
|
|
243
|
+
data: {
|
|
244
|
+
activityType: activity.type,
|
|
245
|
+
generator: this.calculateActivityGeneratorIdentity(activity)
|
|
246
|
+
}
|
|
247
|
+
});
|
|
248
|
+
// Validate that the Activity notified is encoded using the representation format expected
|
|
249
|
+
const validationFailures = [];
|
|
250
|
+
await JsonLdHelper.validate(activity, validationFailures, { failOnMissingType: true });
|
|
251
|
+
Validation.asValidationError(DataspaceDataPlaneService.CLASS_NAME, "activity", validationFailures);
|
|
252
|
+
// Avoid using terms not defined in any Ld Context
|
|
253
|
+
const compactedObj = await JsonLdProcessor.compact(activity, activity["@context"]);
|
|
254
|
+
// Calculate Activity Log Entry Id
|
|
255
|
+
const canonical = await JsonLdProcessor.canonize(compactedObj);
|
|
256
|
+
const canonicalBytes = Converter.utf8ToBytes(canonical);
|
|
257
|
+
const activityLogId = Converter.bytesToHex(Blake2b.sum256(canonicalBytes));
|
|
258
|
+
const activityLogEntryId = `urn:x-activity-log:${activityLogId}`;
|
|
259
|
+
// Check if entry already exists
|
|
260
|
+
const existingLogEntry = await this._entityStorageActivityLogs.get(activityLogEntryId);
|
|
261
|
+
let existingSuccessfulApps = [];
|
|
262
|
+
let isRetry = false;
|
|
263
|
+
if (!Is.undefined(existingLogEntry)) {
|
|
264
|
+
// Check if there are failed tasks that can be retried
|
|
265
|
+
const existingEntry = await this.getActivityLogEntry(activityLogEntryId);
|
|
266
|
+
// If all tasks completed successfully, this is a duplicate
|
|
267
|
+
if (existingEntry.status === ActivityProcessingStatus.Completed) {
|
|
268
|
+
throw new ConflictError(DataspaceDataPlaneService.CLASS_NAME, "activityAlreadyNotified", activityLogEntryId);
|
|
269
|
+
}
|
|
270
|
+
// If still processing then reject to avoid race conditions
|
|
271
|
+
if (existingEntry.status === ActivityProcessingStatus.Pending ||
|
|
272
|
+
existingEntry.status === ActivityProcessingStatus.Running ||
|
|
273
|
+
existingEntry.status === ActivityProcessingStatus.Registering) {
|
|
274
|
+
throw new ConflictError(DataspaceDataPlaneService.CLASS_NAME, "activityStillProcessing", activityLogEntryId);
|
|
275
|
+
}
|
|
276
|
+
// Status is Error - prepare for retry
|
|
277
|
+
existingSuccessfulApps = await this.prepareForRetry(activityLogEntryId, existingEntry);
|
|
278
|
+
isRetry = true;
|
|
279
|
+
}
|
|
280
|
+
else {
|
|
281
|
+
const logEntry = {
|
|
282
|
+
id: activityLogEntryId,
|
|
283
|
+
activityId: Is.string(activity.id) ? activity.id : undefined,
|
|
284
|
+
generator: this.calculateActivityGeneratorIdentity(activity),
|
|
285
|
+
dateCreated: new Date().toISOString(),
|
|
286
|
+
dateModified: new Date().toISOString()
|
|
287
|
+
};
|
|
288
|
+
await this._entityStorageActivityLogs.set(logEntry);
|
|
289
|
+
}
|
|
290
|
+
const activityQuerySet = await this.calculateActivityQuerySet(compactedObj);
|
|
291
|
+
const tasksScheduled = [];
|
|
292
|
+
for (const query of activityQuerySet) {
|
|
293
|
+
const dataspaceAppIds = this.getAppForActivityQuery(query);
|
|
294
|
+
for (const dataspaceAppId of dataspaceAppIds) {
|
|
295
|
+
// Only process apps that haven't already completed successfully
|
|
296
|
+
if (!existingSuccessfulApps.includes(dataspaceAppId)) {
|
|
297
|
+
const payload = {
|
|
298
|
+
activityLogEntryId,
|
|
299
|
+
activity: compactedObj,
|
|
300
|
+
executorApp: dataspaceAppId
|
|
301
|
+
};
|
|
302
|
+
const taskType = Converter.bytesToHex(RandomHelper.generate(16));
|
|
303
|
+
const taskId = await this._backgroundTaskComponent.create(taskType, payload, {
|
|
304
|
+
retainFor: this._retainTasksFor
|
|
305
|
+
});
|
|
306
|
+
await this._backgroundTaskComponent.registerHandler(taskType, "@twin.org/dataspace-app-runner", "appRunner", async (task) => {
|
|
307
|
+
await this.finaliseTask(task);
|
|
308
|
+
}, {
|
|
309
|
+
initialiseMethod: "appRunnerStart",
|
|
310
|
+
shutdownMethod: "appRunnerEnd"
|
|
311
|
+
});
|
|
312
|
+
tasksScheduled.push({
|
|
313
|
+
taskId,
|
|
314
|
+
dataspaceAppId
|
|
315
|
+
});
|
|
316
|
+
await this._logging?.log({
|
|
317
|
+
level: "info",
|
|
318
|
+
source: DataspaceDataPlaneService.CLASS_NAME,
|
|
319
|
+
message: "scheduledTask",
|
|
320
|
+
data: {
|
|
321
|
+
taskId,
|
|
322
|
+
dataspaceAppId,
|
|
323
|
+
isRetry
|
|
324
|
+
}
|
|
325
|
+
});
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
const existingActivityTasks = isRetry
|
|
330
|
+
? await this._entityStorageActivityTasks.get(activityLogEntryId)
|
|
331
|
+
: undefined;
|
|
332
|
+
const existingTasksToKeep = existingActivityTasks?.associatedTasks.filter(t => existingSuccessfulApps.includes(t.dataspaceAppId)) ?? [];
|
|
333
|
+
await this._entityStorageActivityTasks.set({
|
|
334
|
+
activityLogEntryId,
|
|
335
|
+
associatedTasks: [...existingTasksToKeep, ...tasksScheduled]
|
|
336
|
+
});
|
|
337
|
+
return activityLogEntryId;
|
|
338
|
+
}
|
|
339
|
+
/**
|
|
340
|
+
* Subscribes to the activity log.
|
|
341
|
+
* @param callback The callback to be called when Activity Log is called.
|
|
342
|
+
* @param subscriptionId The Subscription Id.
|
|
343
|
+
* @returns The subscription Id.
|
|
344
|
+
*/
|
|
345
|
+
async subscribeToActivityLog(callback, subscriptionId) {
|
|
346
|
+
Guards.function(DataspaceDataPlaneService.CLASS_NAME, "callback", callback);
|
|
347
|
+
const theSubscriptionId = Is.stringValue(subscriptionId)
|
|
348
|
+
? subscriptionId
|
|
349
|
+
: Converter.bytesToHex(RandomHelper.generate(16));
|
|
350
|
+
this._activityLogStatusCallbacks[theSubscriptionId] = callback;
|
|
351
|
+
return theSubscriptionId;
|
|
352
|
+
}
|
|
353
|
+
/**
|
|
354
|
+
* Subscribes to the activity log.
|
|
355
|
+
* @param subscriptionId The Subscription Id.
|
|
356
|
+
*/
|
|
357
|
+
async unSubscribeToActivityLog(subscriptionId) {
|
|
358
|
+
Guards.stringValue(DataspaceDataPlaneService.CLASS_NAME, "subscriptionId", subscriptionId);
|
|
359
|
+
delete this._activityLogStatusCallbacks[subscriptionId];
|
|
360
|
+
}
|
|
361
|
+
/**
|
|
362
|
+
* Returns the activity processing details of an activity.
|
|
363
|
+
* @param logEntryId The Id of the Activity Log Entry (a URI).
|
|
364
|
+
* @returns the Activity Log Entry with the processing details.
|
|
365
|
+
* @throws NotFoundError if activity log entry is not known.
|
|
366
|
+
*/
|
|
367
|
+
async getActivityLogEntry(logEntryId) {
|
|
368
|
+
Guards.stringValue(DataspaceDataPlaneService.CLASS_NAME, "logEntryId", logEntryId);
|
|
369
|
+
const result = await this._entityStorageActivityLogs.get(logEntryId);
|
|
370
|
+
if (Is.undefined(result)) {
|
|
371
|
+
throw new NotFoundError(DataspaceDataPlaneService.CLASS_NAME, "activityLogEntryNotFound", logEntryId);
|
|
372
|
+
}
|
|
373
|
+
let pendingTasks;
|
|
374
|
+
let runningTasks;
|
|
375
|
+
let finalizedTasks;
|
|
376
|
+
let inErrorTasks;
|
|
377
|
+
// For calculating the processing status. `Registering` if we cannot determine the activity tasks yet
|
|
378
|
+
let status = ActivityProcessingStatus.Registering;
|
|
379
|
+
// Now query the associated tasks
|
|
380
|
+
const activityTasks = await this._entityStorageActivityTasks.get(logEntryId);
|
|
381
|
+
// If activity tasks is undefined it is because the corresponding store has not been persisted yet
|
|
382
|
+
if (!Is.undefined(activityTasks)) {
|
|
383
|
+
pendingTasks = [];
|
|
384
|
+
runningTasks = [];
|
|
385
|
+
finalizedTasks = [];
|
|
386
|
+
inErrorTasks = [];
|
|
387
|
+
for (const entity of activityTasks.associatedTasks) {
|
|
388
|
+
const taskDetails = await this._backgroundTaskComponent.get(entity.taskId);
|
|
389
|
+
if (Is.object(taskDetails)) {
|
|
390
|
+
switch (taskDetails.status) {
|
|
391
|
+
case TaskStatus.Success:
|
|
392
|
+
finalizedTasks.push({
|
|
393
|
+
...entity,
|
|
394
|
+
result: JSON.stringify(taskDetails.result),
|
|
395
|
+
startDate: taskDetails?.dateCreated,
|
|
396
|
+
endDate: taskDetails?.dateCompleted
|
|
397
|
+
});
|
|
398
|
+
break;
|
|
399
|
+
case TaskStatus.Pending:
|
|
400
|
+
pendingTasks.push(entity);
|
|
401
|
+
break;
|
|
402
|
+
case TaskStatus.Processing:
|
|
403
|
+
runningTasks.push({ ...entity, startDate: taskDetails.dateCreated });
|
|
404
|
+
break;
|
|
405
|
+
case TaskStatus.Failed:
|
|
406
|
+
inErrorTasks.push({
|
|
407
|
+
...entity,
|
|
408
|
+
error: taskDetails.error
|
|
409
|
+
});
|
|
410
|
+
break;
|
|
411
|
+
case TaskStatus.Cancelled:
|
|
412
|
+
// Nothing to do for cancelled tasks
|
|
413
|
+
break;
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
if (Is.arrayValue(inErrorTasks)) {
|
|
418
|
+
status = ActivityProcessingStatus.Error;
|
|
419
|
+
}
|
|
420
|
+
else if (Is.arrayValue(runningTasks)) {
|
|
421
|
+
status = ActivityProcessingStatus.Running;
|
|
422
|
+
}
|
|
423
|
+
else if (Is.arrayValue(pendingTasks)) {
|
|
424
|
+
status = ActivityProcessingStatus.Pending;
|
|
425
|
+
}
|
|
426
|
+
else {
|
|
427
|
+
status = ActivityProcessingStatus.Completed;
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
return { ...result, status, pendingTasks, runningTasks, finalizedTasks, inErrorTasks };
|
|
431
|
+
}
|
|
432
|
+
/**
|
|
433
|
+
* Get Data Asset entities. Allows to retrieve entities by their type or id.
|
|
434
|
+
* @param entitySet The set of entities to be retrieved.
|
|
435
|
+
* @param entitySet.jsonLdContext The JSON-LD Context to be used to expand the referred entityType.
|
|
436
|
+
* @param consumerPid The consumer Process ID from the DSP Transfer Process.
|
|
437
|
+
* Used to resolve datasetId from the Transfer Process.
|
|
438
|
+
* @param cursor Pagination details - cursor.
|
|
439
|
+
* @param limit Pagination details - max number of entities.
|
|
440
|
+
* @param trustPayload Trust payload to verify the requesters identity.
|
|
441
|
+
* @returns The entities requested as a JSON-LD Document.
|
|
442
|
+
*/
|
|
443
|
+
async getDataAssetEntities(entitySet, consumerPid, cursor, limit, trustPayload) {
|
|
444
|
+
Guards.object(DataspaceDataPlaneService.CLASS_NAME, "entitySet", entitySet);
|
|
445
|
+
Guards.string(DataspaceDataPlaneService.CLASS_NAME, "entitySet.entityType", entitySet.entityType);
|
|
446
|
+
Guards.stringValue(DataspaceDataPlaneService.CLASS_NAME, "consumerPid", consumerPid);
|
|
447
|
+
const trustInfo = await TrustHelper.verifyTrust(this._trustComponent, trustPayload, "getDataAssetEntities");
|
|
448
|
+
const dataConsumerIdentity = trustInfo.identity;
|
|
449
|
+
Guards.stringValue(DataspaceDataPlaneService.CLASS_NAME, "dataConsumerIdentity", dataConsumerIdentity);
|
|
450
|
+
// Use consumerPid to resolve datasetId via Transfer Process
|
|
451
|
+
// This validates the transfer token, state, and extracts datasetId
|
|
452
|
+
const transferContext = await this.validateTransfer(consumerPid, trustPayload);
|
|
453
|
+
const resolvedDatasetId = transferContext.datasetId;
|
|
454
|
+
const serviceDataset = await this.getDatasetFromApps(resolvedDatasetId);
|
|
455
|
+
// Expand entity type if LD context provided
|
|
456
|
+
let finalType = entitySet.entityType;
|
|
457
|
+
if (!Is.undefined(entitySet.jsonLdContext)) {
|
|
458
|
+
const auxiliaryObj = {
|
|
459
|
+
"@context": entitySet.jsonLdContext,
|
|
460
|
+
"@type": entitySet.entityType
|
|
461
|
+
};
|
|
462
|
+
const expanded = (await JsonLdProcessor.expand(auxiliaryObj))[0];
|
|
463
|
+
finalType = expanded["@type"]?.[0];
|
|
464
|
+
}
|
|
465
|
+
if (Is.undefined(finalType)) {
|
|
466
|
+
throw new GuardError(DataspaceDataPlaneService.CLASS_NAME, "notExpandableType", "entitySet.entityType", entitySet.entityType);
|
|
467
|
+
}
|
|
468
|
+
const datasetId = serviceDataset["@id"];
|
|
469
|
+
Guards.stringValue(DataspaceDataPlaneService.CLASS_NAME, "datasetId", datasetId);
|
|
470
|
+
const appId = await this.getAppForDataAssetQuery({ datasetId });
|
|
471
|
+
// getAppForDataAssetQuery already validates app exists
|
|
472
|
+
const app = DataspaceAppFactory.get(appId);
|
|
473
|
+
const handleDataRequest = app.handleDataRequest?.bind(app);
|
|
474
|
+
Guards.function(DataspaceDataPlaneService.CLASS_NAME, "handleDataRequest", handleDataRequest);
|
|
475
|
+
const dataRequest = {
|
|
476
|
+
type: DataRequestType.DataAssetEntities,
|
|
477
|
+
dataAsset: serviceDataset,
|
|
478
|
+
entitySet: {
|
|
479
|
+
entityType: finalType,
|
|
480
|
+
entityId: entitySet.entityId
|
|
481
|
+
}
|
|
482
|
+
};
|
|
483
|
+
const { data, cursor: cursorResult } = await handleDataRequest(dataRequest, cursor, limit);
|
|
484
|
+
let finalData;
|
|
485
|
+
if (Is.array(data)) {
|
|
486
|
+
finalData = data;
|
|
487
|
+
}
|
|
488
|
+
else {
|
|
489
|
+
finalData = [data];
|
|
490
|
+
}
|
|
491
|
+
const itemList = {
|
|
492
|
+
"@context": SchemaOrgContexts.Context,
|
|
493
|
+
type: SchemaOrgTypes.ItemList,
|
|
494
|
+
itemListElement: finalData
|
|
495
|
+
};
|
|
496
|
+
let result = {
|
|
497
|
+
itemList,
|
|
498
|
+
cursor: cursorResult
|
|
499
|
+
};
|
|
500
|
+
// Apply policy filters from Agreement
|
|
501
|
+
if (transferContext?.agreement) {
|
|
502
|
+
const filtered = await this.applyPolicyFilters(result, transferContext.agreement);
|
|
503
|
+
if (filtered.itemList) {
|
|
504
|
+
result = filtered;
|
|
505
|
+
}
|
|
506
|
+
else {
|
|
507
|
+
result.itemList[SchemaOrgTypes.ItemListElement] = [];
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
return result;
|
|
511
|
+
}
|
|
512
|
+
/**
|
|
513
|
+
* Queries a data asset controlled by this Dataspace App.
|
|
514
|
+
* @param consumerPid The consumer Process ID from the DSP Transfer Process.
|
|
515
|
+
* Used to resolve datasetId from the Transfer Process.
|
|
516
|
+
* @param query The filtering query.
|
|
517
|
+
* @param cursor Pagination details - cursor.
|
|
518
|
+
* @param limit Pagination details - max number of entities.
|
|
519
|
+
* @param trustPayload Trust payload to verify the requesters identity.
|
|
520
|
+
* @returns The item list and optional cursor for pagination via Link headers.
|
|
521
|
+
*/
|
|
522
|
+
async queryDataAsset(consumerPid, query, cursor, limit, trustPayload) {
|
|
523
|
+
Guards.stringValue(DataspaceDataPlaneService.CLASS_NAME, "consumerPid", consumerPid);
|
|
524
|
+
Guards.object(DataspaceDataPlaneService.CLASS_NAME, "query", query);
|
|
525
|
+
Guards.string(DataspaceDataPlaneService.CLASS_NAME, "query.type", query.type);
|
|
526
|
+
const trustInfo = await TrustHelper.verifyTrust(this._trustComponent, trustPayload, "queryDataAsset");
|
|
527
|
+
const dataConsumerIdentity = trustInfo.identity;
|
|
528
|
+
Guards.stringValue(DataspaceDataPlaneService.CLASS_NAME, "dataConsumerIdentity", dataConsumerIdentity);
|
|
529
|
+
// Use consumerPid to resolve datasetId via Transfer Process
|
|
530
|
+
// This validates the transfer token, state, and extracts datasetId
|
|
531
|
+
const transferContext = await this.validateTransfer(consumerPid, trustPayload);
|
|
532
|
+
const resolvedDatasetId = transferContext.datasetId;
|
|
533
|
+
const serviceDataset = await this.getDatasetFromApps(resolvedDatasetId);
|
|
534
|
+
const datasetId = serviceDataset["@id"];
|
|
535
|
+
Guards.stringValue(DataspaceDataPlaneService.CLASS_NAME, "datasetId", datasetId);
|
|
536
|
+
const appId = await this.getAppForDataAssetQuery({ datasetId });
|
|
537
|
+
const app = DataspaceAppFactory.get(appId);
|
|
538
|
+
if (!app.supportedQueryTypes().includes(query.type)) {
|
|
539
|
+
throw new UnprocessableError(DataspaceDataPlaneService.CLASS_NAME, "queryTypeNotSupported", {
|
|
540
|
+
queryType: query.type
|
|
541
|
+
});
|
|
542
|
+
}
|
|
543
|
+
const dataRequest = {
|
|
544
|
+
type: DataRequestType.QueryDataAsset,
|
|
545
|
+
dataAsset: serviceDataset,
|
|
546
|
+
query
|
|
547
|
+
};
|
|
548
|
+
const handleDataRequest = app.handleDataRequest?.bind(app);
|
|
549
|
+
Guards.function(DataspaceDataPlaneService.CLASS_NAME, "handleDataRequest", handleDataRequest);
|
|
550
|
+
const { data, cursor: cursorResult } = await handleDataRequest(dataRequest, cursor, limit);
|
|
551
|
+
let finalData;
|
|
552
|
+
if (Is.array(data)) {
|
|
553
|
+
finalData = data;
|
|
554
|
+
}
|
|
555
|
+
else {
|
|
556
|
+
finalData = [data];
|
|
557
|
+
}
|
|
558
|
+
const itemList = {
|
|
559
|
+
"@context": SchemaOrgContexts.Context,
|
|
560
|
+
type: SchemaOrgTypes.ItemList,
|
|
561
|
+
itemListElement: finalData
|
|
562
|
+
};
|
|
563
|
+
let result = {
|
|
564
|
+
itemList,
|
|
565
|
+
cursor: cursorResult
|
|
566
|
+
};
|
|
567
|
+
// Apply policy filters from Agreement if using consumerPid flow
|
|
568
|
+
if (transferContext?.agreement) {
|
|
569
|
+
const filtered = await this.applyPolicyFilters(result, transferContext.agreement);
|
|
570
|
+
if (filtered.itemList) {
|
|
571
|
+
result = filtered;
|
|
572
|
+
}
|
|
573
|
+
else {
|
|
574
|
+
result.itemList[SchemaOrgTypes.ItemListElement] = [];
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
return result;
|
|
578
|
+
}
|
|
579
|
+
/**
|
|
580
|
+
* Validate transfer authorization for data requests.
|
|
581
|
+
* Reads directly from shared TransferProcess entity storage.
|
|
582
|
+
* @param consumerPid The consumer process ID from the transfer request.
|
|
583
|
+
* @param trustPayload The trust payload for verification (validates signature and expiry).
|
|
584
|
+
* @returns The transfer context containing datasetId, agreement, and other transfer details.
|
|
585
|
+
* @throws GeneralError if transfer process storage is not configured.
|
|
586
|
+
* @throws NotFoundError if transfer process is not found.
|
|
587
|
+
* @throws UnauthorizedError if trust verification fails.
|
|
588
|
+
* @throws GeneralError if transfer is not in STARTED state.
|
|
589
|
+
*/
|
|
590
|
+
async validateTransfer(consumerPid, trustPayload) {
|
|
591
|
+
Guards.stringValue(DataspaceDataPlaneService.CLASS_NAME, "consumerPid", consumerPid);
|
|
592
|
+
// Verify trust payload (validates JWT signature, expiry, and returns verification info)
|
|
593
|
+
// The trust verifier handles all token validation including expiry
|
|
594
|
+
await TrustHelper.verifyTrust(this._trustComponent, trustPayload, "validateTransfer");
|
|
595
|
+
// Direct lookup from shared entity storage by consumerPid (which is the primary key)
|
|
596
|
+
const transferProcess = await this._transferProcessStorage.get(consumerPid);
|
|
597
|
+
if (!transferProcess) {
|
|
598
|
+
throw new NotFoundError(DataspaceDataPlaneService.CLASS_NAME, "transferProcessNotFound", consumerPid);
|
|
599
|
+
}
|
|
600
|
+
// Validate state: must be STARTED
|
|
601
|
+
if (transferProcess.state !== DataspaceProtocolTransferProcessStateType.STARTED) {
|
|
602
|
+
throw new GeneralError(DataspaceDataPlaneService.CLASS_NAME, "transferNotInStartedState", {
|
|
603
|
+
currentState: transferProcess.state
|
|
604
|
+
});
|
|
605
|
+
}
|
|
606
|
+
// Validate datasetId is present
|
|
607
|
+
if (!Is.stringValue(transferProcess.datasetId)) {
|
|
608
|
+
throw new GeneralError(DataspaceDataPlaneService.CLASS_NAME, "transferMissingDatasetId");
|
|
609
|
+
}
|
|
610
|
+
return this.buildTransferContext(transferProcess);
|
|
611
|
+
}
|
|
612
|
+
// ============================================================================
|
|
613
|
+
// PRIVATE HELPER METHODS
|
|
614
|
+
// ============================================================================
|
|
615
|
+
/**
|
|
616
|
+
* Calculates the activity generator from the generator or actor fields.
|
|
617
|
+
* @param activity The activity.
|
|
618
|
+
* @returns The generator's identity.
|
|
619
|
+
* @throws General Error if no identity is found.
|
|
620
|
+
* @internal
|
|
621
|
+
*/
|
|
622
|
+
calculateActivityGeneratorIdentity(activity) {
|
|
623
|
+
if (Is.stringValue(activity.generator)) {
|
|
624
|
+
return activity.generator;
|
|
625
|
+
}
|
|
626
|
+
if (Is.object(activity.generator) && Is.stringValue(activity.generator.id)) {
|
|
627
|
+
return activity.generator.id;
|
|
628
|
+
}
|
|
629
|
+
if (Is.stringValue(activity.actor)) {
|
|
630
|
+
return activity.actor;
|
|
631
|
+
}
|
|
632
|
+
if (Is.object(activity.actor) && Is.stringValue(activity.actor.id)) {
|
|
633
|
+
return activity.actor.id;
|
|
634
|
+
}
|
|
635
|
+
throw new GuardError(DataspaceDataPlaneService.CLASS_NAME, "invalidActivityGeneratorIdentity", "activity.generator", {
|
|
636
|
+
generator: activity.generator,
|
|
637
|
+
actor: activity.actor
|
|
638
|
+
});
|
|
639
|
+
}
|
|
640
|
+
/**
|
|
641
|
+
* Process activity task finalization.
|
|
642
|
+
* @param proofEntity The proof entity to process.
|
|
643
|
+
* @internal
|
|
644
|
+
*/
|
|
645
|
+
async finaliseTask(task) {
|
|
646
|
+
const payload = task.payload;
|
|
647
|
+
if (Is.empty(payload)) {
|
|
648
|
+
return;
|
|
649
|
+
}
|
|
650
|
+
const activityLogEntry = await this._entityStorageActivityLogs.get(payload.activityLogEntryId);
|
|
651
|
+
if (Is.undefined(activityLogEntry)) {
|
|
652
|
+
await this._logging?.log({
|
|
653
|
+
level: "error",
|
|
654
|
+
source: DataspaceDataPlaneService.CLASS_NAME,
|
|
655
|
+
message: "unknownActivityLogEntryId",
|
|
656
|
+
data: {
|
|
657
|
+
activityLogEntryId: payload.activityLogEntryId
|
|
658
|
+
}
|
|
659
|
+
});
|
|
660
|
+
}
|
|
661
|
+
if (task.status === TaskStatus.Success || task.status === TaskStatus.Failed) {
|
|
662
|
+
for (const callback of Object.values(this._activityLogStatusCallbacks)) {
|
|
663
|
+
await callback({
|
|
664
|
+
activityLogEntryId: payload.activityLogEntryId,
|
|
665
|
+
activityId: Is.string(payload.activity.id) ? payload.activity.id : undefined,
|
|
666
|
+
taskProcessingStatus: {
|
|
667
|
+
dataspaceAppId: payload.executorApp,
|
|
668
|
+
taskId: task.id,
|
|
669
|
+
taskStatus: task.status
|
|
670
|
+
}
|
|
671
|
+
});
|
|
672
|
+
}
|
|
673
|
+
// Now let's see if the full activity processing has completed, if so the entry must be marked for retention
|
|
674
|
+
if (this._retainActivityLogsFor !== -1) {
|
|
675
|
+
const entry = await this.getActivityLogEntry(payload.activityLogEntryId);
|
|
676
|
+
if (entry.status === ActivityProcessingStatus.Completed ||
|
|
677
|
+
entry.status === ActivityProcessingStatus.Error) {
|
|
678
|
+
const retainUntil = Date.now() + this._retainActivityLogsFor;
|
|
679
|
+
await this._entityStorageActivityLogs.set({
|
|
680
|
+
id: entry.id,
|
|
681
|
+
activityId: entry.activityId,
|
|
682
|
+
generator: entry.generator,
|
|
683
|
+
dateCreated: entry.dateCreated,
|
|
684
|
+
dateModified: entry.dateModified,
|
|
685
|
+
retainUntil,
|
|
686
|
+
retryCount: entry.retryCount
|
|
687
|
+
});
|
|
688
|
+
}
|
|
689
|
+
}
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
/**
|
|
693
|
+
* Updates the list of active tenants for cleanup tasks.
|
|
694
|
+
* @internal
|
|
695
|
+
*/
|
|
696
|
+
async updateActiveTenants() {
|
|
697
|
+
const contextIds = await ContextIdStore.getContextIds();
|
|
698
|
+
const tenantId = contextIds?.[ContextIdKeys.Tenant];
|
|
699
|
+
if (Is.stringValue(tenantId) && !this._activeTenants.includes(tenantId)) {
|
|
700
|
+
this._activeTenants.push(tenantId);
|
|
701
|
+
}
|
|
702
|
+
}
|
|
703
|
+
/**
|
|
704
|
+
* Cleans up the activity log by deleting those entries that no longer shall be retained.
|
|
705
|
+
* @internal
|
|
706
|
+
*/
|
|
707
|
+
async cleanupActivityLog() {
|
|
708
|
+
if (this._cleanUpProcessOngoing) {
|
|
709
|
+
await this._logging?.log({
|
|
710
|
+
level: "debug",
|
|
711
|
+
message: "cleanUpOngoing",
|
|
712
|
+
source: DataspaceDataPlaneService.CLASS_NAME
|
|
713
|
+
});
|
|
714
|
+
return;
|
|
715
|
+
}
|
|
716
|
+
this._cleanUpProcessOngoing = true;
|
|
717
|
+
let numRecordsDeleted = 0;
|
|
718
|
+
if (this._partitionContextIds?.includes(ContextIdKeys.Tenant)) {
|
|
719
|
+
// The cleanup must be done tenant by tenant
|
|
720
|
+
// as the data behind the scenes might be partitioned
|
|
721
|
+
for (const tenantId of this._activeTenants) {
|
|
722
|
+
const localContextIds = (await ContextIdStore.getContextIds()) ?? {};
|
|
723
|
+
localContextIds[ContextIdKeys.Tenant] = tenantId;
|
|
724
|
+
await ContextIdStore.run(localContextIds, async () => {
|
|
725
|
+
numRecordsDeleted += await this.cleanupActivityLogPartition();
|
|
726
|
+
});
|
|
727
|
+
}
|
|
728
|
+
}
|
|
729
|
+
else {
|
|
730
|
+
numRecordsDeleted += await this.cleanupActivityLogPartition();
|
|
731
|
+
}
|
|
732
|
+
await this._logging?.log({
|
|
733
|
+
level: "debug",
|
|
734
|
+
message: "activityLogCleanedUp",
|
|
735
|
+
source: DataspaceDataPlaneService.CLASS_NAME,
|
|
736
|
+
data: {
|
|
737
|
+
numRecordsDeleted
|
|
738
|
+
}
|
|
739
|
+
});
|
|
740
|
+
this._cleanUpProcessOngoing = false;
|
|
741
|
+
}
|
|
742
|
+
/**
|
|
743
|
+
* Cleans up the activity log partition for the current context ids.
|
|
744
|
+
* @returns The number of records deleted in this partition.
|
|
745
|
+
* @internal
|
|
746
|
+
*/
|
|
747
|
+
async cleanupActivityLogPartition() {
|
|
748
|
+
let numRecordsDeleted = 0;
|
|
749
|
+
try {
|
|
750
|
+
let cursor;
|
|
751
|
+
const now = Date.now();
|
|
752
|
+
do {
|
|
753
|
+
const result = await this._entityStorageActivityLogs.query({
|
|
754
|
+
conditions: [
|
|
755
|
+
{
|
|
756
|
+
property: "retainUntil",
|
|
757
|
+
value: 0,
|
|
758
|
+
comparison: ComparisonOperator.GreaterThan
|
|
759
|
+
},
|
|
760
|
+
{
|
|
761
|
+
property: "retainUntil",
|
|
762
|
+
value: now,
|
|
763
|
+
comparison: ComparisonOperator.LessThan
|
|
764
|
+
}
|
|
765
|
+
],
|
|
766
|
+
logicalOperator: LogicalOperator.And
|
|
767
|
+
});
|
|
768
|
+
cursor = result.cursor;
|
|
769
|
+
for (const entity of result.entities) {
|
|
770
|
+
const logEntryDetails = await this.getActivityLogEntry(entity.id);
|
|
771
|
+
if (logEntryDetails.status === ActivityProcessingStatus.Completed ||
|
|
772
|
+
logEntryDetails.status === ActivityProcessingStatus.Error) {
|
|
773
|
+
await this._entityStorageActivityLogs.remove(entity.id);
|
|
774
|
+
await this._entityStorageActivityTasks.remove(entity.id);
|
|
775
|
+
numRecordsDeleted++;
|
|
776
|
+
}
|
|
777
|
+
}
|
|
778
|
+
} while (Is.stringValue(cursor));
|
|
779
|
+
}
|
|
780
|
+
catch {
|
|
781
|
+
// If cleaning up the retained items fail we don't really care, they will get cleaned up on the next sweep.
|
|
782
|
+
}
|
|
783
|
+
return numRecordsDeleted;
|
|
784
|
+
}
|
|
785
|
+
/**
|
|
786
|
+
* Calculates the (Activity, Object, Target) query set.
|
|
787
|
+
* @param compactedObj The compactObj representing the Activity.
|
|
788
|
+
* @returns the (Activity, Object, Target) query set.
|
|
789
|
+
* @internal
|
|
790
|
+
*/
|
|
791
|
+
async calculateActivityQuerySet(compactedObj) {
|
|
792
|
+
const expanded = await JsonLdProcessor.expand({
|
|
793
|
+
"@context": compactedObj["@context"],
|
|
794
|
+
"@type": compactedObj.type
|
|
795
|
+
});
|
|
796
|
+
const expandedDoc = expanded[0];
|
|
797
|
+
const activityTypes = expandedDoc["@type"];
|
|
798
|
+
if (!Is.arrayValue(activityTypes)) {
|
|
799
|
+
throw new GuardError(DataspaceDataPlaneService.CLASS_NAME, "invalidActivity", "compactedObj.type", compactedObj.type);
|
|
800
|
+
}
|
|
801
|
+
const objects = ArrayHelper.fromObjectOrArray(compactedObj?.object);
|
|
802
|
+
if (Is.arrayValue(objects)) {
|
|
803
|
+
for (const obj of objects) {
|
|
804
|
+
if (Is.object(obj) && Is.empty(obj["@context"])) {
|
|
805
|
+
obj["@context"] = compactedObj["@context"];
|
|
806
|
+
}
|
|
807
|
+
}
|
|
808
|
+
}
|
|
809
|
+
const objectExpanded = await JsonLdProcessor.expand(compactedObj.object);
|
|
810
|
+
const objectTypes = objectExpanded[0]["@type"];
|
|
811
|
+
if (!Is.arrayValue(objectTypes)) {
|
|
812
|
+
throw new GuardError(DataspaceDataPlaneService.CLASS_NAME, "invalidActivity", "objectTypes", compactedObj);
|
|
813
|
+
}
|
|
814
|
+
let targetTypes = [""];
|
|
815
|
+
if (Is.object(compactedObj.target)) {
|
|
816
|
+
if (Is.undefined(compactedObj.target["@context"])) {
|
|
817
|
+
compactedObj.target["@context"] = compactedObj["@context"];
|
|
818
|
+
}
|
|
819
|
+
const targetExpanded = await JsonLdProcessor.expand(compactedObj.target);
|
|
820
|
+
targetTypes = targetExpanded[0]["@type"];
|
|
821
|
+
if (!Is.arrayValue(targetTypes)) {
|
|
822
|
+
throw new GuardError(DataspaceDataPlaneService.CLASS_NAME, "invalidActivity", "compactedObj.target.type", compactedObj.target?.type);
|
|
823
|
+
}
|
|
824
|
+
}
|
|
825
|
+
const result = [];
|
|
826
|
+
for (const activityType of activityTypes) {
|
|
827
|
+
for (const objectType of objectTypes) {
|
|
828
|
+
for (const targetType of targetTypes) {
|
|
829
|
+
const query = {
|
|
830
|
+
activityType,
|
|
831
|
+
objectType,
|
|
832
|
+
targetType: !Is.stringValue(targetType) ? undefined : targetType
|
|
833
|
+
};
|
|
834
|
+
result.push(query);
|
|
835
|
+
}
|
|
836
|
+
}
|
|
837
|
+
}
|
|
838
|
+
return result;
|
|
839
|
+
}
|
|
840
|
+
/**
|
|
841
|
+
* Calculates the cleaning task schedule.
|
|
842
|
+
* @param minutes The period in minutes.
|
|
843
|
+
* @returns The cleaning task schedule.
|
|
844
|
+
* @internal
|
|
845
|
+
*/
|
|
846
|
+
calculateCleaningTaskSchedule(minutes) {
|
|
847
|
+
let minutesRemain = minutes;
|
|
848
|
+
const days = Math.floor(minutesRemain / DataspaceDataPlaneService._MINUTES_PER_DAY);
|
|
849
|
+
minutesRemain %= DataspaceDataPlaneService._MINUTES_PER_DAY;
|
|
850
|
+
const hours = Math.floor(minutesRemain / 60);
|
|
851
|
+
minutesRemain %= 60;
|
|
852
|
+
return { intervalDays: days, intervalHours: hours, intervalMinutes: minutesRemain };
|
|
853
|
+
}
|
|
854
|
+
/**
|
|
855
|
+
* Returns an App for a (Activity, Object, Target).
|
|
856
|
+
* @param activityQuery The (Activity, Object, Target) query specified using a FQN.
|
|
857
|
+
* @returns The Dataspace Data Plane Apps or empty list if nothing is registered.
|
|
858
|
+
* @internal
|
|
859
|
+
*/
|
|
860
|
+
getAppForActivityQuery(activityQuery) {
|
|
861
|
+
const matchingElements = [];
|
|
862
|
+
const appNames = DataspaceAppFactory.names();
|
|
863
|
+
for (const appId of appNames) {
|
|
864
|
+
const app = DataspaceAppFactory.get(appId);
|
|
865
|
+
const appQueries = app.activitiesHandled();
|
|
866
|
+
for (const appQuery of appQueries) {
|
|
867
|
+
if (appQuery.objectType === activityQuery.objectType &&
|
|
868
|
+
(Is.undefined(appQuery.activityType) ||
|
|
869
|
+
appQuery.activityType === activityQuery.activityType) &&
|
|
870
|
+
(Is.undefined(appQuery.targetType) || appQuery.targetType === activityQuery.targetType)) {
|
|
871
|
+
matchingElements.push(appId);
|
|
872
|
+
}
|
|
873
|
+
}
|
|
874
|
+
}
|
|
875
|
+
return matchingElements;
|
|
876
|
+
}
|
|
877
|
+
/**
|
|
878
|
+
* Get a dataset from registered apps by its ID.
|
|
879
|
+
* @param datasetId The dataset identifier (@id)
|
|
880
|
+
* @returns The dataset
|
|
881
|
+
* @throws NotFoundError if no app handles this dataset
|
|
882
|
+
* @internal
|
|
883
|
+
*/
|
|
884
|
+
async getDatasetFromApps(datasetId) {
|
|
885
|
+
Guards.stringValue(DataspaceDataPlaneService.CLASS_NAME, "datasetId", datasetId);
|
|
886
|
+
const appNames = DataspaceAppFactory.names();
|
|
887
|
+
for (const appId of appNames) {
|
|
888
|
+
const app = DataspaceAppFactory.get(appId);
|
|
889
|
+
const datasets = await app.datasetsHandled();
|
|
890
|
+
const dataset = datasets.find(d => d["@id"] === datasetId);
|
|
891
|
+
if (dataset) {
|
|
892
|
+
return dataset;
|
|
893
|
+
}
|
|
894
|
+
}
|
|
895
|
+
throw new NotFoundError(DataspaceDataPlaneService.CLASS_NAME, "noAppRegistered", datasetId, {
|
|
896
|
+
datasetId
|
|
897
|
+
});
|
|
898
|
+
}
|
|
899
|
+
/**
|
|
900
|
+
* Returns an App for a Data Asset query (datasetId, ...).
|
|
901
|
+
* @param dataAssetQuery The data asset query.
|
|
902
|
+
* @returns The Dataspace Data Plane App ID.
|
|
903
|
+
* @internal
|
|
904
|
+
*/
|
|
905
|
+
async getAppForDataAssetQuery(dataAssetQuery) {
|
|
906
|
+
const matchingElements = [];
|
|
907
|
+
const appNames = DataspaceAppFactory.names();
|
|
908
|
+
for (const appId of appNames) {
|
|
909
|
+
const app = DataspaceAppFactory.get(appId);
|
|
910
|
+
const datasets = await app.datasetsHandled();
|
|
911
|
+
for (const dataset of datasets) {
|
|
912
|
+
if (dataset["@id"] === dataAssetQuery.datasetId) {
|
|
913
|
+
matchingElements.push(appId);
|
|
914
|
+
}
|
|
915
|
+
}
|
|
916
|
+
}
|
|
917
|
+
if (matchingElements.length > 1) {
|
|
918
|
+
const error = new ConflictError(DataspaceDataPlaneService.CLASS_NAME, "tooManyAppsRegistered", dataAssetQuery.datasetId, matchingElements, {
|
|
919
|
+
datasetId: dataAssetQuery.datasetId
|
|
920
|
+
});
|
|
921
|
+
await this._logging?.log({
|
|
922
|
+
source: DataspaceDataPlaneService.CLASS_NAME,
|
|
923
|
+
level: "error",
|
|
924
|
+
message: "tooManyAppsRegistered",
|
|
925
|
+
error,
|
|
926
|
+
data: {
|
|
927
|
+
datasetId: dataAssetQuery.datasetId
|
|
928
|
+
}
|
|
929
|
+
});
|
|
930
|
+
throw error;
|
|
931
|
+
}
|
|
932
|
+
if (!Is.arrayValue(matchingElements)) {
|
|
933
|
+
const error = new NotFoundError(DataspaceDataPlaneService.CLASS_NAME, "noAppRegistered", dataAssetQuery.datasetId, {
|
|
934
|
+
datasetId: dataAssetQuery.datasetId
|
|
935
|
+
});
|
|
936
|
+
await this._logging?.log({
|
|
937
|
+
source: DataspaceDataPlaneService.CLASS_NAME,
|
|
938
|
+
level: "error",
|
|
939
|
+
message: "noAppRegistered",
|
|
940
|
+
error,
|
|
941
|
+
data: {
|
|
942
|
+
datasetId: dataAssetQuery.datasetId
|
|
943
|
+
}
|
|
944
|
+
});
|
|
945
|
+
throw error;
|
|
946
|
+
}
|
|
947
|
+
return matchingElements[0];
|
|
948
|
+
}
|
|
949
|
+
/**
|
|
950
|
+
* Prepare an activity for retry by updating metadata and returning apps to skip.
|
|
951
|
+
* @param activityLogEntryId The activity log entry ID.
|
|
952
|
+
* @param existingEntry The existing activity log entry with error status.
|
|
953
|
+
* @returns Array of app IDs that already succeeded and should be skipped.
|
|
954
|
+
* @internal
|
|
955
|
+
*/
|
|
956
|
+
async prepareForRetry(activityLogEntryId, existingEntry) {
|
|
957
|
+
const appsToRetry = existingEntry.inErrorTasks?.map(t => t.dataspaceAppId) ?? [];
|
|
958
|
+
if (!Is.arrayValue(appsToRetry)) {
|
|
959
|
+
throw new NotFoundError(DataspaceDataPlaneService.CLASS_NAME, "noFailedTasksToRetry", activityLogEntryId);
|
|
960
|
+
}
|
|
961
|
+
const successfulApps = existingEntry.finalizedTasks?.map(t => t.dataspaceAppId) ?? [];
|
|
962
|
+
await this._logging?.log({
|
|
963
|
+
level: "debug",
|
|
964
|
+
source: DataspaceDataPlaneService.CLASS_NAME,
|
|
965
|
+
message: "replacingFailedTasks",
|
|
966
|
+
data: {
|
|
967
|
+
activityLogEntryId,
|
|
968
|
+
appsToRetry,
|
|
969
|
+
successfulApps
|
|
970
|
+
}
|
|
971
|
+
});
|
|
972
|
+
const logEntry = await this._entityStorageActivityLogs.get(activityLogEntryId);
|
|
973
|
+
if (logEntry) {
|
|
974
|
+
logEntry.dateModified = new Date().toISOString();
|
|
975
|
+
// Extend retention to allow retry to complete
|
|
976
|
+
if (this._retainActivityLogsFor !== -1) {
|
|
977
|
+
logEntry.retainUntil = Date.now() + this._retainActivityLogsFor;
|
|
978
|
+
}
|
|
979
|
+
// Monitoring purposes
|
|
980
|
+
logEntry.retryCount = (logEntry.retryCount ?? 0) + 1;
|
|
981
|
+
await this._entityStorageActivityLogs.set(logEntry);
|
|
982
|
+
}
|
|
983
|
+
return successfulApps;
|
|
984
|
+
}
|
|
985
|
+
/**
|
|
986
|
+
* Build transfer context from a TransferProcessEntity.
|
|
987
|
+
* @param transferProcess The transfer process entity.
|
|
988
|
+
* @returns The transfer context for use by data access methods.
|
|
989
|
+
* @internal
|
|
990
|
+
*/
|
|
991
|
+
buildTransferContext(transferProcess) {
|
|
992
|
+
// Build the IOdrlAgreement from stored data
|
|
993
|
+
// The entity stores agreementId and policies separately
|
|
994
|
+
//
|
|
995
|
+
// NOTE: Currently policies are cached in the TransferProcessEntity at transfer start time.
|
|
996
|
+
// Eventually, this should fetch fresh policies from Rights Management (PAP) using:
|
|
997
|
+
// const freshAgreement = await this._policyAdministrationPoint.get(transferProcess.agreementId);
|
|
998
|
+
// This would ensure policies are always up-to-date and support dynamic policy updates.
|
|
999
|
+
const agreement = {
|
|
1000
|
+
"@context": "http://www.w3.org/ns/odrl.jsonld",
|
|
1001
|
+
"@type": "Agreement",
|
|
1002
|
+
uid: transferProcess.agreementId,
|
|
1003
|
+
target: transferProcess.datasetId,
|
|
1004
|
+
// Provider is the assigner, consumer is the assignee
|
|
1005
|
+
assigner: transferProcess.providerIdentity ?? "",
|
|
1006
|
+
assignee: transferProcess.consumerIdentity ?? ""
|
|
1007
|
+
};
|
|
1008
|
+
// Extract policies from the stored Agreement
|
|
1009
|
+
// Extract permission, prohibition, and obligation
|
|
1010
|
+
if (Is.arrayValue(transferProcess.policies)) {
|
|
1011
|
+
const storedAgreement = transferProcess.policies[0];
|
|
1012
|
+
if (storedAgreement) {
|
|
1013
|
+
agreement.permission = storedAgreement.permission;
|
|
1014
|
+
agreement.prohibition = storedAgreement.prohibition;
|
|
1015
|
+
agreement.obligation = storedAgreement.obligation;
|
|
1016
|
+
}
|
|
1017
|
+
}
|
|
1018
|
+
const state = transferProcess.state;
|
|
1019
|
+
const dataAddress = transferProcess.dataAddress;
|
|
1020
|
+
return {
|
|
1021
|
+
consumerPid: transferProcess.consumerPid,
|
|
1022
|
+
providerPid: transferProcess.providerPid,
|
|
1023
|
+
agreement,
|
|
1024
|
+
datasetId: transferProcess.datasetId,
|
|
1025
|
+
offerId: transferProcess.offerId,
|
|
1026
|
+
state,
|
|
1027
|
+
consumerIdentity: transferProcess.consumerIdentity,
|
|
1028
|
+
providerIdentity: transferProcess.providerIdentity,
|
|
1029
|
+
dataAddress
|
|
1030
|
+
};
|
|
1031
|
+
}
|
|
1032
|
+
/**
|
|
1033
|
+
* Apply ODRL policy enforcement to query results.
|
|
1034
|
+
* Delegates to the Policy Enforcement Point (PEP) which coordinates
|
|
1035
|
+
* PDP, arbiters, and enforcement processors.
|
|
1036
|
+
* @param result The data asset item list result to filter.
|
|
1037
|
+
* @param agreement The ODRL Agreement containing policies.
|
|
1038
|
+
* @returns The result with policies applied.
|
|
1039
|
+
* @internal
|
|
1040
|
+
*/
|
|
1041
|
+
async applyPolicyFilters(result, agreement) {
|
|
1042
|
+
if (!agreement || !this._policyEnforcementPoint) {
|
|
1043
|
+
return result;
|
|
1044
|
+
}
|
|
1045
|
+
const processed = await this._policyEnforcementPoint.interceptWithPolicy(agreement, result);
|
|
1046
|
+
if (Is.arrayValue(agreement.obligation)) {
|
|
1047
|
+
await this.logObligations(agreement.obligation, agreement.uid);
|
|
1048
|
+
}
|
|
1049
|
+
return processed;
|
|
1050
|
+
}
|
|
1051
|
+
/**
|
|
1052
|
+
* Log ODRL obligations for auditing purposes.
|
|
1053
|
+
* Obligations are duties that must be fulfilled as part of the agreement.
|
|
1054
|
+
* @param obligations The obligation rules from the Agreement.
|
|
1055
|
+
* @param agreementId The agreement ID for reference.
|
|
1056
|
+
* @internal
|
|
1057
|
+
*/
|
|
1058
|
+
async logObligations(obligations, agreementId) {
|
|
1059
|
+
if (!Is.arrayValue(obligations)) {
|
|
1060
|
+
return;
|
|
1061
|
+
}
|
|
1062
|
+
for (const obligation of obligations) {
|
|
1063
|
+
await this._logging?.log({
|
|
1064
|
+
level: "info",
|
|
1065
|
+
source: DataspaceDataPlaneService.CLASS_NAME,
|
|
1066
|
+
message: "policyObligationTriggered",
|
|
1067
|
+
data: {
|
|
1068
|
+
agreementId,
|
|
1069
|
+
action: obligation.action,
|
|
1070
|
+
target: obligation.target,
|
|
1071
|
+
assignee: obligation.assignee
|
|
1072
|
+
}
|
|
1073
|
+
});
|
|
1074
|
+
}
|
|
1075
|
+
}
|
|
1076
|
+
}
|
|
1077
|
+
//# sourceMappingURL=dataspaceDataPlaneService.js.map
|