@dxos/echo-pipeline 0.6.12 → 0.6.13-main.548ca8d
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/lib/browser/chunk-PESZVYAN.mjs +2050 -0
- package/dist/lib/browser/chunk-PESZVYAN.mjs.map +7 -0
- package/dist/lib/browser/index.mjs +3463 -17
- package/dist/lib/browser/index.mjs.map +4 -4
- package/dist/lib/browser/meta.json +1 -1
- package/dist/lib/browser/testing/index.mjs +3 -4
- package/dist/lib/browser/testing/index.mjs.map +3 -3
- package/dist/lib/node/{chunk-7HHYCGUR.cjs → chunk-6EZVIJNE.cjs} +89 -47
- package/dist/lib/node/chunk-6EZVIJNE.cjs.map +7 -0
- package/dist/lib/node/index.cjs +3440 -35
- package/dist/lib/node/index.cjs.map +4 -4
- package/dist/lib/node/meta.json +1 -1
- package/dist/lib/node/testing/index.cjs +11 -12
- package/dist/lib/node/testing/index.cjs.map +3 -3
- package/dist/lib/{browser/chunk-UKXIJW43.mjs → node-esm/chunk-4LW7MDPZ.mjs} +76 -36
- package/dist/lib/node-esm/chunk-4LW7MDPZ.mjs.map +7 -0
- package/dist/lib/{browser/chunk-MPWFDDQK.mjs → node-esm/index.mjs} +1702 -335
- package/dist/lib/node-esm/index.mjs.map +7 -0
- package/dist/lib/node-esm/meta.json +1 -0
- package/dist/lib/node-esm/testing/index.mjs +551 -0
- package/dist/lib/node-esm/testing/index.mjs.map +7 -0
- package/dist/types/src/automerge/automerge-host.d.ts +24 -1
- package/dist/types/src/automerge/automerge-host.d.ts.map +1 -1
- package/dist/types/src/automerge/collection-synchronizer.d.ts +2 -0
- package/dist/types/src/automerge/collection-synchronizer.d.ts.map +1 -1
- package/dist/types/src/automerge/echo-network-adapter.d.ts.map +1 -1
- package/dist/types/src/automerge/echo-replicator.d.ts +3 -3
- package/dist/types/src/automerge/echo-replicator.d.ts.map +1 -1
- package/dist/types/src/automerge/mesh-echo-replicator-connection.d.ts +3 -3
- package/dist/types/src/automerge/mesh-echo-replicator-connection.d.ts.map +1 -1
- package/dist/types/src/automerge/mesh-echo-replicator.d.ts.map +1 -1
- package/dist/types/src/automerge/space-collection.d.ts +3 -2
- package/dist/types/src/automerge/space-collection.d.ts.map +1 -1
- package/dist/types/src/db-host/automerge-metrics.d.ts +11 -0
- package/dist/types/src/db-host/automerge-metrics.d.ts.map +1 -0
- package/dist/types/src/db-host/data-service.d.ts +3 -2
- package/dist/types/src/db-host/data-service.d.ts.map +1 -1
- package/dist/types/src/db-host/database-root.d.ts +20 -0
- package/dist/types/src/db-host/database-root.d.ts.map +1 -0
- package/dist/types/src/db-host/documents-iterator.d.ts +7 -0
- package/dist/types/src/db-host/documents-iterator.d.ts.map +1 -0
- package/dist/types/src/db-host/echo-host.d.ts +73 -0
- package/dist/types/src/db-host/echo-host.d.ts.map +1 -0
- package/dist/types/src/db-host/index.d.ts +5 -0
- package/dist/types/src/db-host/index.d.ts.map +1 -1
- package/dist/types/src/db-host/migration.d.ts +8 -0
- package/dist/types/src/db-host/migration.d.ts.map +1 -0
- package/dist/types/src/db-host/query-service.d.ts +25 -0
- package/dist/types/src/db-host/query-service.d.ts.map +1 -0
- package/dist/types/src/db-host/query-state.d.ts +41 -0
- package/dist/types/src/db-host/query-state.d.ts.map +1 -0
- package/dist/types/src/db-host/space-state-manager.d.ts +23 -0
- package/dist/types/src/db-host/space-state-manager.d.ts.map +1 -0
- package/dist/types/src/edge/echo-edge-replicator.d.ts +23 -0
- package/dist/types/src/edge/echo-edge-replicator.d.ts.map +1 -0
- package/dist/types/src/edge/echo-edge-replicator.test.d.ts +2 -0
- package/dist/types/src/edge/echo-edge-replicator.test.d.ts.map +1 -0
- package/dist/types/src/edge/index.d.ts +2 -0
- package/dist/types/src/edge/index.d.ts.map +1 -0
- package/dist/types/src/index.d.ts +1 -0
- package/dist/types/src/index.d.ts.map +1 -1
- package/dist/types/src/metadata/metadata-store.d.ts +4 -1
- package/dist/types/src/metadata/metadata-store.d.ts.map +1 -1
- package/dist/types/src/testing/test-agent-builder.d.ts.map +1 -1
- package/dist/types/src/testing/test-replicator.d.ts +4 -4
- package/dist/types/src/testing/test-replicator.d.ts.map +1 -1
- package/package.json +40 -50
- package/src/automerge/automerge-host.test.ts +8 -9
- package/src/automerge/automerge-host.ts +46 -7
- package/src/automerge/automerge-repo.test.ts +18 -16
- package/src/automerge/collection-synchronizer.test.ts +10 -5
- package/src/automerge/collection-synchronizer.ts +17 -6
- package/src/automerge/echo-data-monitor.test.ts +1 -3
- package/src/automerge/echo-network-adapter.test.ts +4 -3
- package/src/automerge/echo-network-adapter.ts +5 -4
- package/src/automerge/echo-replicator.ts +3 -3
- package/src/automerge/mesh-echo-replicator-connection.ts +10 -9
- package/src/automerge/mesh-echo-replicator.ts +2 -1
- package/src/automerge/space-collection.ts +3 -2
- package/src/automerge/storage-adapter.test.ts +2 -3
- package/src/db-host/automerge-metrics.ts +38 -0
- package/src/db-host/data-service.ts +29 -14
- package/src/db-host/database-root.ts +86 -0
- package/src/db-host/documents-iterator.ts +73 -0
- package/src/db-host/documents-synchronizer.test.ts +2 -2
- package/src/db-host/echo-host.ts +257 -0
- package/src/db-host/index.ts +6 -1
- package/src/db-host/migration.ts +57 -0
- package/src/db-host/query-service.ts +208 -0
- package/src/db-host/query-state.ts +200 -0
- package/src/db-host/space-state-manager.ts +90 -0
- package/src/edge/echo-edge-replicator.test.ts +96 -0
- package/src/edge/echo-edge-replicator.ts +337 -0
- package/src/edge/index.ts +5 -0
- package/src/index.ts +1 -0
- package/src/metadata/metadata-store.ts +20 -0
- package/src/pipeline/pipeline-stress.test.ts +44 -47
- package/src/pipeline/pipeline.test.ts +3 -4
- package/src/space/control-pipeline.test.ts +2 -3
- package/src/space/control-pipeline.ts +10 -1
- package/src/space/replication.browser.test.ts +2 -8
- package/src/space/space-manager.browser.test.ts +6 -5
- package/src/space/space-protocol.browser.test.ts +29 -34
- package/src/space/space-protocol.test.ts +29 -27
- package/src/space/space.test.ts +28 -11
- package/src/testing/test-agent-builder.ts +2 -2
- package/src/testing/test-replicator.ts +3 -3
- package/dist/lib/browser/chunk-MPWFDDQK.mjs.map +0 -7
- package/dist/lib/browser/chunk-UKXIJW43.mjs.map +0 -7
- package/dist/lib/browser/chunk-XPCF2V5U.mjs +0 -31
- package/dist/lib/browser/chunk-XPCF2V5U.mjs.map +0 -7
- package/dist/lib/browser/light.mjs +0 -32
- package/dist/lib/browser/light.mjs.map +0 -7
- package/dist/lib/node/chunk-5DH4KR2S.cjs +0 -2148
- package/dist/lib/node/chunk-5DH4KR2S.cjs.map +0 -7
- package/dist/lib/node/chunk-7HHYCGUR.cjs.map +0 -7
- package/dist/lib/node/chunk-DZVH7HDD.cjs +0 -43
- package/dist/lib/node/chunk-DZVH7HDD.cjs.map +0 -7
- package/dist/lib/node/light.cjs +0 -52
- package/dist/lib/node/light.cjs.map +0 -7
- package/dist/types/src/light.d.ts +0 -4
- package/dist/types/src/light.d.ts.map +0 -1
- package/src/light.ts +0 -7
|
@@ -0,0 +1,2050 @@
|
|
|
1
|
+
import "@dxos/node-std/globals";
|
|
2
|
+
|
|
3
|
+
// inject-globals:@inject-globals
|
|
4
|
+
import {
|
|
5
|
+
global,
|
|
6
|
+
Buffer,
|
|
7
|
+
process
|
|
8
|
+
} from "@dxos/node-std/inject-globals";
|
|
9
|
+
|
|
10
|
+
// packages/core/echo/echo-pipeline/src/common/codec.ts
|
|
11
|
+
import { createCodecEncoding } from "@dxos/hypercore";
|
|
12
|
+
import { schema } from "@dxos/protocols/proto";
|
|
13
|
+
var codec = schema.getCodecForType("dxos.echo.feed.FeedMessage");
|
|
14
|
+
var valueEncoding = createCodecEncoding(codec);
|
|
15
|
+
|
|
16
|
+
// packages/core/echo/echo-pipeline/src/common/feeds.ts
|
|
17
|
+
import { invariant } from "@dxos/invariant";
|
|
18
|
+
var __dxlog_file = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/common/feeds.ts";
|
|
19
|
+
var createMappedFeedWriter = (mapper, writer) => {
|
|
20
|
+
invariant(mapper, void 0, {
|
|
21
|
+
F: __dxlog_file,
|
|
22
|
+
L: 16,
|
|
23
|
+
S: void 0,
|
|
24
|
+
A: [
|
|
25
|
+
"mapper",
|
|
26
|
+
""
|
|
27
|
+
]
|
|
28
|
+
});
|
|
29
|
+
invariant(writer, void 0, {
|
|
30
|
+
F: __dxlog_file,
|
|
31
|
+
L: 17,
|
|
32
|
+
S: void 0,
|
|
33
|
+
A: [
|
|
34
|
+
"writer",
|
|
35
|
+
""
|
|
36
|
+
]
|
|
37
|
+
});
|
|
38
|
+
return {
|
|
39
|
+
write: async (data, options) => await writer.write(await mapper(data), options)
|
|
40
|
+
};
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
// packages/core/echo/echo-pipeline/src/common/space-id.ts
|
|
44
|
+
import { subtleCrypto } from "@dxos/crypto";
|
|
45
|
+
import { PublicKey, SpaceId } from "@dxos/keys";
|
|
46
|
+
import { ComplexMap } from "@dxos/util";
|
|
47
|
+
var SPACE_IDS_CACHE = new ComplexMap(PublicKey.hash);
|
|
48
|
+
var createIdFromSpaceKey = async (spaceKey) => {
|
|
49
|
+
const cachedValue = SPACE_IDS_CACHE.get(spaceKey);
|
|
50
|
+
if (cachedValue !== void 0) {
|
|
51
|
+
return cachedValue;
|
|
52
|
+
}
|
|
53
|
+
const digest = await subtleCrypto.digest("SHA-256", spaceKey.asUint8Array());
|
|
54
|
+
const bytes = new Uint8Array(digest).slice(0, SpaceId.byteLength);
|
|
55
|
+
const spaceId = SpaceId.encode(bytes);
|
|
56
|
+
SPACE_IDS_CACHE.set(spaceKey, spaceId);
|
|
57
|
+
return spaceId;
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
// packages/core/echo/echo-pipeline/src/metadata/metadata-store.ts
|
|
61
|
+
import CRC32 from "crc-32";
|
|
62
|
+
import { Event, scheduleTaskInterval, synchronized } from "@dxos/async";
|
|
63
|
+
import { Context } from "@dxos/context";
|
|
64
|
+
import { invariant as invariant2 } from "@dxos/invariant";
|
|
65
|
+
import { PublicKey as PublicKey2 } from "@dxos/keys";
|
|
66
|
+
import { log } from "@dxos/log";
|
|
67
|
+
import { DataCorruptionError, STORAGE_VERSION } from "@dxos/protocols";
|
|
68
|
+
import { schema as schema2 } from "@dxos/protocols/proto";
|
|
69
|
+
import { Invitation, SpaceState } from "@dxos/protocols/proto/dxos/client/services";
|
|
70
|
+
import { ComplexMap as ComplexMap2, arrayToBuffer, forEachAsync, isNotNullOrUndefined } from "@dxos/util";
|
|
71
|
+
function _ts_decorate(decorators, target, key, desc) {
|
|
72
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
73
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
74
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
75
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
76
|
+
}
|
|
77
|
+
var __dxlog_file2 = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/metadata/metadata-store.ts";
|
|
78
|
+
var EXPIRED_INVITATION_CLEANUP_INTERVAL = 60 * 60 * 1e3;
|
|
79
|
+
var emptyEchoMetadata = () => ({
|
|
80
|
+
version: STORAGE_VERSION,
|
|
81
|
+
spaces: [],
|
|
82
|
+
created: /* @__PURE__ */ new Date(),
|
|
83
|
+
updated: /* @__PURE__ */ new Date()
|
|
84
|
+
});
|
|
85
|
+
var emptyLargeSpaceMetadata = () => ({});
|
|
86
|
+
var EchoMetadata = schema2.getCodecForType("dxos.echo.metadata.EchoMetadata");
|
|
87
|
+
var LargeSpaceMetadata = schema2.getCodecForType("dxos.echo.metadata.LargeSpaceMetadata");
|
|
88
|
+
var MetadataStore = class {
|
|
89
|
+
constructor(directory) {
|
|
90
|
+
this._metadata = emptyEchoMetadata();
|
|
91
|
+
this._spaceLargeMetadata = new ComplexMap2(PublicKey2.hash);
|
|
92
|
+
this._metadataFile = void 0;
|
|
93
|
+
this.update = new Event();
|
|
94
|
+
this._invitationCleanupCtx = new Context(void 0, {
|
|
95
|
+
F: __dxlog_file2,
|
|
96
|
+
L: 55
|
|
97
|
+
});
|
|
98
|
+
this._directory = directory;
|
|
99
|
+
}
|
|
100
|
+
get metadata() {
|
|
101
|
+
return this._metadata;
|
|
102
|
+
}
|
|
103
|
+
get version() {
|
|
104
|
+
return this._metadata.version ?? 0;
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Returns a list of currently saved spaces. The list and objects in it can be modified addSpace and
|
|
108
|
+
* addSpaceFeed functions.
|
|
109
|
+
*/
|
|
110
|
+
get spaces() {
|
|
111
|
+
return this._metadata.spaces ?? [];
|
|
112
|
+
}
|
|
113
|
+
async _readFile(file, codec2) {
|
|
114
|
+
try {
|
|
115
|
+
const { size: fileLength } = await file.stat();
|
|
116
|
+
if (fileLength < 8) {
|
|
117
|
+
return;
|
|
118
|
+
}
|
|
119
|
+
const dataSize = fromBytesInt32(await file.read(0, 4));
|
|
120
|
+
const checksum = fromBytesInt32(await file.read(4, 4));
|
|
121
|
+
log("loaded", {
|
|
122
|
+
size: dataSize,
|
|
123
|
+
checksum,
|
|
124
|
+
name: file.filename
|
|
125
|
+
}, {
|
|
126
|
+
F: __dxlog_file2,
|
|
127
|
+
L: 91,
|
|
128
|
+
S: this,
|
|
129
|
+
C: (f, a) => f(...a)
|
|
130
|
+
});
|
|
131
|
+
if (fileLength < dataSize + 8) {
|
|
132
|
+
throw new DataCorruptionError("Metadata size is smaller than expected.", {
|
|
133
|
+
fileLength,
|
|
134
|
+
dataSize
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
const data = await file.read(8, dataSize);
|
|
138
|
+
const calculatedChecksum = CRC32.buf(data);
|
|
139
|
+
if (calculatedChecksum !== checksum) {
|
|
140
|
+
throw new DataCorruptionError("Metadata checksum is invalid.");
|
|
141
|
+
}
|
|
142
|
+
return codec2.decode(data);
|
|
143
|
+
} finally {
|
|
144
|
+
await file.close();
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
/**
|
|
148
|
+
* @internal
|
|
149
|
+
*/
|
|
150
|
+
async _writeFile(file, codec2, data) {
|
|
151
|
+
const encoded = arrayToBuffer(codec2.encode(data));
|
|
152
|
+
const checksum = CRC32.buf(encoded);
|
|
153
|
+
const result = Buffer.alloc(8 + encoded.length);
|
|
154
|
+
result.writeInt32LE(encoded.length, 0);
|
|
155
|
+
result.writeInt32LE(checksum, 4);
|
|
156
|
+
encoded.copy(result, 8);
|
|
157
|
+
await file.write(0, result);
|
|
158
|
+
log("saved", {
|
|
159
|
+
size: encoded.length,
|
|
160
|
+
checksum
|
|
161
|
+
}, {
|
|
162
|
+
F: __dxlog_file2,
|
|
163
|
+
L: 126,
|
|
164
|
+
S: this,
|
|
165
|
+
C: (f, a) => f(...a)
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
async close() {
|
|
169
|
+
await this._invitationCleanupCtx.dispose();
|
|
170
|
+
await this.flush();
|
|
171
|
+
await this._metadataFile?.close();
|
|
172
|
+
this._metadataFile = void 0;
|
|
173
|
+
this._metadata = emptyEchoMetadata();
|
|
174
|
+
this._spaceLargeMetadata.clear();
|
|
175
|
+
}
|
|
176
|
+
/**
|
|
177
|
+
* Loads metadata from persistent storage.
|
|
178
|
+
*/
|
|
179
|
+
async load() {
|
|
180
|
+
if (!this._metadataFile || this._metadataFile.closed) {
|
|
181
|
+
this._metadataFile = this._directory.getOrCreateFile("EchoMetadata");
|
|
182
|
+
}
|
|
183
|
+
try {
|
|
184
|
+
const metadata = await this._readFile(this._metadataFile, EchoMetadata);
|
|
185
|
+
if (metadata) {
|
|
186
|
+
this._metadata = metadata;
|
|
187
|
+
}
|
|
188
|
+
this._metadata.spaces?.forEach((space) => {
|
|
189
|
+
space.state ??= SpaceState.SPACE_ACTIVE;
|
|
190
|
+
});
|
|
191
|
+
} catch (err) {
|
|
192
|
+
log.error("failed to load metadata", {
|
|
193
|
+
err
|
|
194
|
+
}, {
|
|
195
|
+
F: __dxlog_file2,
|
|
196
|
+
L: 158,
|
|
197
|
+
S: this,
|
|
198
|
+
C: (f, a) => f(...a)
|
|
199
|
+
});
|
|
200
|
+
this._metadata = emptyEchoMetadata();
|
|
201
|
+
}
|
|
202
|
+
await forEachAsync([
|
|
203
|
+
this._metadata.identity?.haloSpace.key,
|
|
204
|
+
...this._metadata.spaces?.map((space) => space.key) ?? []
|
|
205
|
+
].filter(isNotNullOrUndefined), async (key) => {
|
|
206
|
+
try {
|
|
207
|
+
await this._loadSpaceLargeMetadata(key);
|
|
208
|
+
} catch (err) {
|
|
209
|
+
log.error("failed to load space large metadata", {
|
|
210
|
+
err
|
|
211
|
+
}, {
|
|
212
|
+
F: __dxlog_file2,
|
|
213
|
+
L: 170,
|
|
214
|
+
S: this,
|
|
215
|
+
C: (f, a) => f(...a)
|
|
216
|
+
});
|
|
217
|
+
}
|
|
218
|
+
});
|
|
219
|
+
scheduleTaskInterval(this._invitationCleanupCtx, async () => {
|
|
220
|
+
for (const invitation of this._metadata.invitations ?? []) {
|
|
221
|
+
if (hasInvitationExpired(invitation) || isLegacyInvitationFormat(invitation)) {
|
|
222
|
+
await this.removeInvitation(invitation.invitationId);
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
}, EXPIRED_INVITATION_CLEANUP_INTERVAL);
|
|
226
|
+
}
|
|
227
|
+
async _save() {
|
|
228
|
+
const data = {
|
|
229
|
+
...this._metadata,
|
|
230
|
+
version: STORAGE_VERSION,
|
|
231
|
+
created: this._metadata.created ?? /* @__PURE__ */ new Date(),
|
|
232
|
+
updated: /* @__PURE__ */ new Date()
|
|
233
|
+
};
|
|
234
|
+
this.update.emit(data);
|
|
235
|
+
const file = this._directory.getOrCreateFile("EchoMetadata");
|
|
236
|
+
await this._writeFile(file, EchoMetadata, data);
|
|
237
|
+
}
|
|
238
|
+
async _loadSpaceLargeMetadata(key) {
|
|
239
|
+
const file = this._directory.getOrCreateFile(`space_${key.toHex()}_large`);
|
|
240
|
+
try {
|
|
241
|
+
const metadata = await this._readFile(file, LargeSpaceMetadata);
|
|
242
|
+
if (metadata) {
|
|
243
|
+
this._spaceLargeMetadata.set(key, metadata);
|
|
244
|
+
}
|
|
245
|
+
} catch (err) {
|
|
246
|
+
log.error("failed to load space large metadata", {
|
|
247
|
+
err
|
|
248
|
+
}, {
|
|
249
|
+
F: __dxlog_file2,
|
|
250
|
+
L: 212,
|
|
251
|
+
S: this,
|
|
252
|
+
C: (f, a) => f(...a)
|
|
253
|
+
});
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
async _saveSpaceLargeMetadata(key) {
|
|
257
|
+
const data = this._getLargeSpaceMetadata(key);
|
|
258
|
+
const file = this._directory.getOrCreateFile(`space_${key.toHex()}_large`);
|
|
259
|
+
await this._writeFile(file, LargeSpaceMetadata, data);
|
|
260
|
+
}
|
|
261
|
+
async flush() {
|
|
262
|
+
await this._directory.flush();
|
|
263
|
+
}
|
|
264
|
+
_getSpace(spaceKey) {
|
|
265
|
+
if (this._metadata.identity?.haloSpace.key.equals(spaceKey)) {
|
|
266
|
+
return this._metadata.identity.haloSpace;
|
|
267
|
+
}
|
|
268
|
+
const space = this.spaces.find((space2) => space2.key === spaceKey);
|
|
269
|
+
invariant2(space, "Space not found", {
|
|
270
|
+
F: __dxlog_file2,
|
|
271
|
+
L: 234,
|
|
272
|
+
S: this,
|
|
273
|
+
A: [
|
|
274
|
+
"space",
|
|
275
|
+
"'Space not found'"
|
|
276
|
+
]
|
|
277
|
+
});
|
|
278
|
+
return space;
|
|
279
|
+
}
|
|
280
|
+
hasSpace(spaceKey) {
|
|
281
|
+
if (this._metadata.identity?.haloSpace.key.equals(spaceKey)) {
|
|
282
|
+
return true;
|
|
283
|
+
}
|
|
284
|
+
return !!this.spaces.find((space) => space.key === spaceKey);
|
|
285
|
+
}
|
|
286
|
+
_getLargeSpaceMetadata(key) {
|
|
287
|
+
let entry = this._spaceLargeMetadata.get(key);
|
|
288
|
+
if (entry) {
|
|
289
|
+
return entry;
|
|
290
|
+
}
|
|
291
|
+
entry = emptyLargeSpaceMetadata();
|
|
292
|
+
this._spaceLargeMetadata.set(key, entry);
|
|
293
|
+
return entry;
|
|
294
|
+
}
|
|
295
|
+
/**
|
|
296
|
+
* Clears storage - doesn't work for now.
|
|
297
|
+
*/
|
|
298
|
+
async clear() {
|
|
299
|
+
log("clearing all metadata", void 0, {
|
|
300
|
+
F: __dxlog_file2,
|
|
301
|
+
L: 262,
|
|
302
|
+
S: this,
|
|
303
|
+
C: (f, a) => f(...a)
|
|
304
|
+
});
|
|
305
|
+
await this._directory.delete();
|
|
306
|
+
this._metadata = emptyEchoMetadata();
|
|
307
|
+
}
|
|
308
|
+
getIdentityRecord() {
|
|
309
|
+
return this._metadata.identity;
|
|
310
|
+
}
|
|
311
|
+
async setIdentityRecord(record) {
|
|
312
|
+
invariant2(!this._metadata.identity, "Cannot overwrite existing identity in metadata", {
|
|
313
|
+
F: __dxlog_file2,
|
|
314
|
+
L: 272,
|
|
315
|
+
S: this,
|
|
316
|
+
A: [
|
|
317
|
+
"!this._metadata.identity",
|
|
318
|
+
"'Cannot overwrite existing identity in metadata'"
|
|
319
|
+
]
|
|
320
|
+
});
|
|
321
|
+
this._metadata.identity = record;
|
|
322
|
+
await this._save();
|
|
323
|
+
await this.flush();
|
|
324
|
+
}
|
|
325
|
+
getInvitations() {
|
|
326
|
+
return this._metadata.invitations ?? [];
|
|
327
|
+
}
|
|
328
|
+
async addInvitation(invitation) {
|
|
329
|
+
if (this._metadata.invitations?.find((i) => i.invitationId === invitation.invitationId)) {
|
|
330
|
+
return;
|
|
331
|
+
}
|
|
332
|
+
(this._metadata.invitations ??= []).push(invitation);
|
|
333
|
+
await this._save();
|
|
334
|
+
await this.flush();
|
|
335
|
+
}
|
|
336
|
+
async removeInvitation(invitationId) {
|
|
337
|
+
this._metadata.invitations = (this._metadata.invitations ?? []).filter((i) => i.invitationId !== invitationId);
|
|
338
|
+
await this._save();
|
|
339
|
+
await this.flush();
|
|
340
|
+
}
|
|
341
|
+
async addSpace(record) {
|
|
342
|
+
invariant2(!(this._metadata.spaces ?? []).find((space) => space.key === record.key), "Cannot overwrite existing space in metadata", {
|
|
343
|
+
F: __dxlog_file2,
|
|
344
|
+
L: 300,
|
|
345
|
+
S: this,
|
|
346
|
+
A: [
|
|
347
|
+
"!(this._metadata.spaces ?? []).find((space) => space.key === record.key)",
|
|
348
|
+
"'Cannot overwrite existing space in metadata'"
|
|
349
|
+
]
|
|
350
|
+
});
|
|
351
|
+
(this._metadata.spaces ??= []).push(record);
|
|
352
|
+
await this._save();
|
|
353
|
+
await this.flush();
|
|
354
|
+
}
|
|
355
|
+
async setSpaceDataLatestTimeframe(spaceKey, timeframe) {
|
|
356
|
+
this._getSpace(spaceKey).dataTimeframe = timeframe;
|
|
357
|
+
await this._save();
|
|
358
|
+
}
|
|
359
|
+
async setSpaceControlLatestTimeframe(spaceKey, timeframe) {
|
|
360
|
+
this._getSpace(spaceKey).controlTimeframe = timeframe;
|
|
361
|
+
await this._save();
|
|
362
|
+
await this.flush();
|
|
363
|
+
}
|
|
364
|
+
async setCache(spaceKey, cache) {
|
|
365
|
+
this._getSpace(spaceKey).cache = cache;
|
|
366
|
+
await this._save();
|
|
367
|
+
}
|
|
368
|
+
async setWritableFeedKeys(spaceKey, controlFeedKey, dataFeedKey) {
|
|
369
|
+
const space = this._getSpace(spaceKey);
|
|
370
|
+
space.controlFeedKey = controlFeedKey;
|
|
371
|
+
space.dataFeedKey = dataFeedKey;
|
|
372
|
+
await this._save();
|
|
373
|
+
await this.flush();
|
|
374
|
+
}
|
|
375
|
+
async setSpaceState(spaceKey, state) {
|
|
376
|
+
this._getSpace(spaceKey).state = state;
|
|
377
|
+
await this._save();
|
|
378
|
+
await this.flush();
|
|
379
|
+
}
|
|
380
|
+
getSpaceControlPipelineSnapshot(spaceKey) {
|
|
381
|
+
return this._getLargeSpaceMetadata(spaceKey).controlPipelineSnapshot;
|
|
382
|
+
}
|
|
383
|
+
async setSpaceControlPipelineSnapshot(spaceKey, snapshot) {
|
|
384
|
+
this._getLargeSpaceMetadata(spaceKey).controlPipelineSnapshot = snapshot;
|
|
385
|
+
await this._saveSpaceLargeMetadata(spaceKey);
|
|
386
|
+
await this.flush();
|
|
387
|
+
}
|
|
388
|
+
getSpaceEdgeReplicationSetting(spaceKey) {
|
|
389
|
+
return this.hasSpace(spaceKey) ? this._getSpace(spaceKey).edgeReplication : void 0;
|
|
390
|
+
}
|
|
391
|
+
async setSpaceEdgeReplicationSetting(spaceKey, setting) {
|
|
392
|
+
this._getSpace(spaceKey).edgeReplication = setting;
|
|
393
|
+
await this._save();
|
|
394
|
+
await this.flush();
|
|
395
|
+
}
|
|
396
|
+
};
|
|
397
|
+
_ts_decorate([
|
|
398
|
+
synchronized
|
|
399
|
+
], MetadataStore.prototype, "load", null);
|
|
400
|
+
_ts_decorate([
|
|
401
|
+
synchronized
|
|
402
|
+
], MetadataStore.prototype, "_save", null);
|
|
403
|
+
_ts_decorate([
|
|
404
|
+
synchronized
|
|
405
|
+
], MetadataStore.prototype, "_saveSpaceLargeMetadata", null);
|
|
406
|
+
var fromBytesInt32 = (buf) => buf.readInt32LE(0);
|
|
407
|
+
var hasInvitationExpired = (invitation) => {
|
|
408
|
+
return Boolean(invitation.created && invitation.lifetime && invitation.lifetime !== 0 && invitation.created.getTime() + invitation.lifetime * 1e3 < Date.now());
|
|
409
|
+
};
|
|
410
|
+
var isLegacyInvitationFormat = (invitation) => {
|
|
411
|
+
return invitation.type === Invitation.Type.MULTIUSE;
|
|
412
|
+
};
|
|
413
|
+
|
|
414
|
+
// packages/core/echo/echo-pipeline/src/pipeline/timeframe-clock.ts
|
|
415
|
+
import { Event as Event2 } from "@dxos/async";
|
|
416
|
+
import { timed } from "@dxos/debug";
|
|
417
|
+
import { log as log2 } from "@dxos/log";
|
|
418
|
+
import { Timeframe } from "@dxos/timeframe";
|
|
419
|
+
function _ts_decorate2(decorators, target, key, desc) {
|
|
420
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
421
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
422
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
423
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
424
|
+
}
|
|
425
|
+
var __dxlog_file3 = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/pipeline/timeframe-clock.ts";
|
|
426
|
+
var mapTimeframeToFeedIndexes = (timeframe) => timeframe.frames().map(([feedKey, index]) => ({
|
|
427
|
+
feedKey,
|
|
428
|
+
index
|
|
429
|
+
}));
|
|
430
|
+
var mapFeedIndexesToTimeframe = (indexes) => new Timeframe(indexes.map(({ feedKey, index }) => [
|
|
431
|
+
feedKey,
|
|
432
|
+
index
|
|
433
|
+
]));
|
|
434
|
+
var startAfter = (timeframe) => timeframe.frames().map(([feedKey, index]) => ({
|
|
435
|
+
feedKey,
|
|
436
|
+
index: index + 1
|
|
437
|
+
}));
|
|
438
|
+
var TimeframeClock = class {
|
|
439
|
+
constructor(_timeframe = new Timeframe()) {
|
|
440
|
+
this._timeframe = _timeframe;
|
|
441
|
+
this.update = new Event2();
|
|
442
|
+
this._pendingTimeframe = _timeframe;
|
|
443
|
+
}
|
|
444
|
+
/**
|
|
445
|
+
* Timeframe that was processed by ECHO.
|
|
446
|
+
*/
|
|
447
|
+
get timeframe() {
|
|
448
|
+
return this._timeframe;
|
|
449
|
+
}
|
|
450
|
+
/**
|
|
451
|
+
* Timeframe that is currently being processed by ECHO.
|
|
452
|
+
* Will be equal to `timeframe` after the processing is complete.
|
|
453
|
+
*/
|
|
454
|
+
get pendingTimeframe() {
|
|
455
|
+
return this._pendingTimeframe;
|
|
456
|
+
}
|
|
457
|
+
setTimeframe(timeframe) {
|
|
458
|
+
this._timeframe = timeframe;
|
|
459
|
+
this._pendingTimeframe = timeframe;
|
|
460
|
+
this.update.emit(this._timeframe);
|
|
461
|
+
}
|
|
462
|
+
updatePendingTimeframe(key, seq) {
|
|
463
|
+
this._pendingTimeframe = Timeframe.merge(this._pendingTimeframe, new Timeframe([
|
|
464
|
+
[
|
|
465
|
+
key,
|
|
466
|
+
seq
|
|
467
|
+
]
|
|
468
|
+
]));
|
|
469
|
+
}
|
|
470
|
+
updateTimeframe() {
|
|
471
|
+
this._timeframe = this._pendingTimeframe;
|
|
472
|
+
this.update.emit(this._timeframe);
|
|
473
|
+
}
|
|
474
|
+
hasGaps(timeframe) {
|
|
475
|
+
const gaps = Timeframe.dependencies(timeframe, this._timeframe);
|
|
476
|
+
return !gaps.isEmpty();
|
|
477
|
+
}
|
|
478
|
+
async waitUntilReached(target) {
|
|
479
|
+
log2("waitUntilReached", {
|
|
480
|
+
target,
|
|
481
|
+
current: this._timeframe
|
|
482
|
+
}, {
|
|
483
|
+
F: __dxlog_file3,
|
|
484
|
+
L: 70,
|
|
485
|
+
S: this,
|
|
486
|
+
C: (f, a) => f(...a)
|
|
487
|
+
});
|
|
488
|
+
await this.update.waitForCondition(() => {
|
|
489
|
+
log2("check if reached", {
|
|
490
|
+
target,
|
|
491
|
+
current: this._timeframe,
|
|
492
|
+
deps: Timeframe.dependencies(target, this._timeframe)
|
|
493
|
+
}, {
|
|
494
|
+
F: __dxlog_file3,
|
|
495
|
+
L: 72,
|
|
496
|
+
S: this,
|
|
497
|
+
C: (f, a) => f(...a)
|
|
498
|
+
});
|
|
499
|
+
return Timeframe.dependencies(target, this._timeframe).isEmpty();
|
|
500
|
+
});
|
|
501
|
+
}
|
|
502
|
+
};
|
|
503
|
+
_ts_decorate2([
|
|
504
|
+
timed(5e3)
|
|
505
|
+
], TimeframeClock.prototype, "waitUntilReached", null);
|
|
506
|
+
|
|
507
|
+
// packages/core/echo/echo-pipeline/src/pipeline/pipeline.ts
|
|
508
|
+
import { Event as Event3, sleepWithContext, synchronized as synchronized2, Trigger } from "@dxos/async";
|
|
509
|
+
import { Context as Context2, rejectOnDispose } from "@dxos/context";
|
|
510
|
+
import { failUndefined } from "@dxos/debug";
|
|
511
|
+
import { FeedSetIterator } from "@dxos/feed-store";
|
|
512
|
+
import { invariant as invariant4 } from "@dxos/invariant";
|
|
513
|
+
import { PublicKey as PublicKey3 } from "@dxos/keys";
|
|
514
|
+
import { log as log4 } from "@dxos/log";
|
|
515
|
+
import { Timeframe as Timeframe2 } from "@dxos/timeframe";
|
|
516
|
+
import { ComplexMap as ComplexMap3 } from "@dxos/util";
|
|
517
|
+
|
|
518
|
+
// packages/core/echo/echo-pipeline/src/pipeline/message-selector.ts
|
|
519
|
+
import { invariant as invariant3 } from "@dxos/invariant";
|
|
520
|
+
import { log as log3 } from "@dxos/log";
|
|
521
|
+
var __dxlog_file4 = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/pipeline/message-selector.ts";
|
|
522
|
+
var createMessageSelector = (timeframeClock) => {
|
|
523
|
+
return (messages) => {
|
|
524
|
+
for (let i = 0; i < messages.length; i++) {
|
|
525
|
+
const { data: { timeframe } } = messages[i];
|
|
526
|
+
invariant3(timeframe, void 0, {
|
|
527
|
+
F: __dxlog_file4,
|
|
528
|
+
L: 25,
|
|
529
|
+
S: void 0,
|
|
530
|
+
A: [
|
|
531
|
+
"timeframe",
|
|
532
|
+
""
|
|
533
|
+
]
|
|
534
|
+
});
|
|
535
|
+
if (!timeframeClock.hasGaps(timeframe)) {
|
|
536
|
+
return i;
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
log3("Skipping...", void 0, {
|
|
540
|
+
F: __dxlog_file4,
|
|
541
|
+
L: 33,
|
|
542
|
+
S: void 0,
|
|
543
|
+
C: (f, a) => f(...a)
|
|
544
|
+
});
|
|
545
|
+
};
|
|
546
|
+
};
|
|
547
|
+
|
|
548
|
+
// packages/core/echo/echo-pipeline/src/pipeline/pipeline.ts
|
|
549
|
+
function _ts_decorate3(decorators, target, key, desc) {
|
|
550
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
551
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
552
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
553
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
554
|
+
}
|
|
555
|
+
var __dxlog_file5 = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/pipeline/pipeline.ts";
|
|
556
|
+
var PipelineState = class {
|
|
557
|
+
constructor(_feeds, _timeframeClock) {
|
|
558
|
+
this._feeds = _feeds;
|
|
559
|
+
this._timeframeClock = _timeframeClock;
|
|
560
|
+
this._ctx = new Context2(void 0, {
|
|
561
|
+
F: __dxlog_file5,
|
|
562
|
+
L: 41
|
|
563
|
+
});
|
|
564
|
+
this.timeframeUpdate = this._timeframeClock.update;
|
|
565
|
+
this.stalled = new Event3();
|
|
566
|
+
this._startTimeframe = new Timeframe2();
|
|
567
|
+
this._reachedTarget = false;
|
|
568
|
+
}
|
|
569
|
+
/**
|
|
570
|
+
* Latest theoretical timeframe based on the last mutation in each feed.
|
|
571
|
+
* NOTE: This might never be reached if the mutation dependencies
|
|
572
|
+
*/
|
|
573
|
+
// TODO(dmaretskyi): Rename `totalTimeframe`? or `lastTimeframe`.
|
|
574
|
+
get endTimeframe() {
|
|
575
|
+
return mapFeedIndexesToTimeframe(Array.from(this._feeds.values()).filter((feed) => feed.length > 0).map((feed) => ({
|
|
576
|
+
feedKey: feed.key,
|
|
577
|
+
index: feed.length - 1
|
|
578
|
+
})));
|
|
579
|
+
}
|
|
580
|
+
get startTimeframe() {
|
|
581
|
+
return this._startTimeframe;
|
|
582
|
+
}
|
|
583
|
+
get timeframe() {
|
|
584
|
+
return this._timeframeClock.timeframe;
|
|
585
|
+
}
|
|
586
|
+
get pendingTimeframe() {
|
|
587
|
+
return this._timeframeClock.pendingTimeframe;
|
|
588
|
+
}
|
|
589
|
+
get targetTimeframe() {
|
|
590
|
+
return this._targetTimeframe ? this._targetTimeframe : new Timeframe2();
|
|
591
|
+
}
|
|
592
|
+
get reachedTarget() {
|
|
593
|
+
return this._reachedTarget;
|
|
594
|
+
}
|
|
595
|
+
get feeds() {
|
|
596
|
+
return Array.from(this._feeds.values());
|
|
597
|
+
}
|
|
598
|
+
async waitUntilTimeframe(target) {
|
|
599
|
+
await this._timeframeClock.waitUntilReached(target);
|
|
600
|
+
}
|
|
601
|
+
setTargetTimeframe(target) {
|
|
602
|
+
this._targetTimeframe = target;
|
|
603
|
+
}
|
|
604
|
+
/**
|
|
605
|
+
* Wait until the pipeline processes all messages in the feed and reaches the target timeframe if that is set.
|
|
606
|
+
*
|
|
607
|
+
* This function will resolve immediately if the pipeline is stalled.
|
|
608
|
+
*
|
|
609
|
+
* @param timeout Timeout in milliseconds to specify the maximum wait time.
|
|
610
|
+
*/
|
|
611
|
+
async waitUntilReachedTargetTimeframe({ ctx = new Context2(void 0, {
|
|
612
|
+
F: __dxlog_file5,
|
|
613
|
+
L: 129
|
|
614
|
+
}), timeout, breakOnStall = true } = {}) {
|
|
615
|
+
log4("waitUntilReachedTargetTimeframe", {
|
|
616
|
+
timeout,
|
|
617
|
+
current: this.timeframe,
|
|
618
|
+
target: this.targetTimeframe
|
|
619
|
+
}, {
|
|
620
|
+
F: __dxlog_file5,
|
|
621
|
+
L: 133,
|
|
622
|
+
S: this,
|
|
623
|
+
C: (f, a) => f(...a)
|
|
624
|
+
});
|
|
625
|
+
this._reachedTargetPromise ??= Promise.race([
|
|
626
|
+
this._timeframeClock.update.waitForCondition(() => {
|
|
627
|
+
return Timeframe2.dependencies(this.targetTimeframe, this.timeframe).isEmpty();
|
|
628
|
+
}),
|
|
629
|
+
...breakOnStall ? [
|
|
630
|
+
this.stalled.discardParameter().waitForCount(1)
|
|
631
|
+
] : []
|
|
632
|
+
]);
|
|
633
|
+
let done = false;
|
|
634
|
+
if (timeout) {
|
|
635
|
+
return Promise.race([
|
|
636
|
+
rejectOnDispose(ctx),
|
|
637
|
+
rejectOnDispose(this._ctx),
|
|
638
|
+
this._reachedTargetPromise.then(() => {
|
|
639
|
+
done = true;
|
|
640
|
+
this._reachedTarget = true;
|
|
641
|
+
}),
|
|
642
|
+
sleepWithContext(this._ctx, timeout).then(() => {
|
|
643
|
+
if (done) {
|
|
644
|
+
return;
|
|
645
|
+
}
|
|
646
|
+
log4.warn("waitUntilReachedTargetTimeframe timed out", {
|
|
647
|
+
timeout,
|
|
648
|
+
current: this.timeframe,
|
|
649
|
+
target: this.targetTimeframe,
|
|
650
|
+
dependencies: Timeframe2.dependencies(this.targetTimeframe, this.timeframe)
|
|
651
|
+
}, {
|
|
652
|
+
F: __dxlog_file5,
|
|
653
|
+
L: 161,
|
|
654
|
+
S: this,
|
|
655
|
+
C: (f, a) => f(...a)
|
|
656
|
+
});
|
|
657
|
+
})
|
|
658
|
+
]);
|
|
659
|
+
} else {
|
|
660
|
+
return this._reachedTargetPromise;
|
|
661
|
+
}
|
|
662
|
+
}
|
|
663
|
+
};
|
|
664
|
+
var Pipeline = class {
|
|
665
|
+
constructor() {
|
|
666
|
+
this._timeframeClock = new TimeframeClock(new Timeframe2());
|
|
667
|
+
this._feeds = new ComplexMap3(PublicKey3.hash);
|
|
668
|
+
// External state accessor.
|
|
669
|
+
this._state = new PipelineState(this._feeds, this._timeframeClock);
|
|
670
|
+
// Waits for the message consumer to process the message and yield control back to the pipeline.
|
|
671
|
+
this._processingTrigger = new Trigger().wake();
|
|
672
|
+
this._pauseTrigger = new Trigger().wake();
|
|
673
|
+
// Pending downloads.
|
|
674
|
+
this._downloads = new ComplexMap3((value) => PublicKey3.hash(value.key));
|
|
675
|
+
this._isStopping = false;
|
|
676
|
+
this._isStarted = false;
|
|
677
|
+
this._isBeingConsumed = false;
|
|
678
|
+
this._isPaused = false;
|
|
679
|
+
}
|
|
680
|
+
get state() {
|
|
681
|
+
return this._state;
|
|
682
|
+
}
|
|
683
|
+
get writer() {
|
|
684
|
+
invariant4(this._writer, "Writer not set.", {
|
|
685
|
+
F: __dxlog_file5,
|
|
686
|
+
L: 243,
|
|
687
|
+
S: this,
|
|
688
|
+
A: [
|
|
689
|
+
"this._writer",
|
|
690
|
+
"'Writer not set.'"
|
|
691
|
+
]
|
|
692
|
+
});
|
|
693
|
+
return this._writer;
|
|
694
|
+
}
|
|
695
|
+
hasFeed(feedKey) {
|
|
696
|
+
return this._feeds.has(feedKey);
|
|
697
|
+
}
|
|
698
|
+
getFeeds() {
|
|
699
|
+
return this._feedSetIterator.feeds;
|
|
700
|
+
}
|
|
701
|
+
// NOTE: This cannot be synchronized with `stop` because stop waits for the mutation processing to complete,
|
|
702
|
+
// which might be opening feeds during the mutation processing, which w
|
|
703
|
+
async addFeed(feed) {
|
|
704
|
+
this._feeds.set(feed.key, feed);
|
|
705
|
+
if (this._feedSetIterator) {
|
|
706
|
+
await this._feedSetIterator.addFeed(feed);
|
|
707
|
+
}
|
|
708
|
+
if (this._isStarted && !this._isPaused) {
|
|
709
|
+
this._setFeedDownloadState(feed);
|
|
710
|
+
}
|
|
711
|
+
}
|
|
712
|
+
setWriteFeed(feed) {
|
|
713
|
+
invariant4(!this._writer, "Writer already set.", {
|
|
714
|
+
F: __dxlog_file5,
|
|
715
|
+
L: 270,
|
|
716
|
+
S: this,
|
|
717
|
+
A: [
|
|
718
|
+
"!this._writer",
|
|
719
|
+
"'Writer already set.'"
|
|
720
|
+
]
|
|
721
|
+
});
|
|
722
|
+
invariant4(feed.properties.writable, "Feed must be writable.", {
|
|
723
|
+
F: __dxlog_file5,
|
|
724
|
+
L: 271,
|
|
725
|
+
S: this,
|
|
726
|
+
A: [
|
|
727
|
+
"feed.properties.writable",
|
|
728
|
+
"'Feed must be writable.'"
|
|
729
|
+
]
|
|
730
|
+
});
|
|
731
|
+
this._writer = createMappedFeedWriter((payload) => ({
|
|
732
|
+
timeframe: this._timeframeClock.timeframe,
|
|
733
|
+
payload
|
|
734
|
+
}), feed.createFeedWriter());
|
|
735
|
+
}
|
|
736
|
+
async start() {
|
|
737
|
+
invariant4(!this._isStarted, "Pipeline is already started.", {
|
|
738
|
+
F: __dxlog_file5,
|
|
739
|
+
L: 284,
|
|
740
|
+
S: this,
|
|
741
|
+
A: [
|
|
742
|
+
"!this._isStarted",
|
|
743
|
+
"'Pipeline is already started.'"
|
|
744
|
+
]
|
|
745
|
+
});
|
|
746
|
+
log4("starting...", void 0, {
|
|
747
|
+
F: __dxlog_file5,
|
|
748
|
+
L: 285,
|
|
749
|
+
S: this,
|
|
750
|
+
C: (f, a) => f(...a)
|
|
751
|
+
});
|
|
752
|
+
await this._initIterator();
|
|
753
|
+
await this._feedSetIterator.open();
|
|
754
|
+
this._isStarted = true;
|
|
755
|
+
log4("started", void 0, {
|
|
756
|
+
F: __dxlog_file5,
|
|
757
|
+
L: 289,
|
|
758
|
+
S: this,
|
|
759
|
+
C: (f, a) => f(...a)
|
|
760
|
+
});
|
|
761
|
+
if (!this._isPaused) {
|
|
762
|
+
for (const feed of this._feeds.values()) {
|
|
763
|
+
this._setFeedDownloadState(feed);
|
|
764
|
+
}
|
|
765
|
+
}
|
|
766
|
+
}
|
|
767
|
+
async stop() {
|
|
768
|
+
log4("stopping...", void 0, {
|
|
769
|
+
F: __dxlog_file5,
|
|
770
|
+
L: 300,
|
|
771
|
+
S: this,
|
|
772
|
+
C: (f, a) => f(...a)
|
|
773
|
+
});
|
|
774
|
+
this._isStopping = true;
|
|
775
|
+
for (const [feed, handle] of this._downloads.entries()) {
|
|
776
|
+
feed.undownload(handle);
|
|
777
|
+
}
|
|
778
|
+
this._downloads.clear();
|
|
779
|
+
await this._feedSetIterator?.close();
|
|
780
|
+
await this._processingTrigger.wait();
|
|
781
|
+
await this._state._ctx.dispose();
|
|
782
|
+
this._state._ctx = new Context2(void 0, {
|
|
783
|
+
F: __dxlog_file5,
|
|
784
|
+
L: 309
|
|
785
|
+
});
|
|
786
|
+
this._state._reachedTargetPromise = void 0;
|
|
787
|
+
this._state._reachedTarget = false;
|
|
788
|
+
this._isStarted = false;
|
|
789
|
+
log4("stopped", void 0, {
|
|
790
|
+
F: __dxlog_file5,
|
|
791
|
+
L: 313,
|
|
792
|
+
S: this,
|
|
793
|
+
C: (f, a) => f(...a)
|
|
794
|
+
});
|
|
795
|
+
}
|
|
796
|
+
/**
|
|
797
|
+
* @param timeframe Timeframe of already processed messages.
|
|
798
|
+
* The pipeline will start processing messages AFTER this timeframe.
|
|
799
|
+
*/
|
|
800
|
+
async setCursor(timeframe) {
|
|
801
|
+
invariant4(!this._isStarted || this._isPaused, "Invalid state.", {
|
|
802
|
+
F: __dxlog_file5,
|
|
803
|
+
L: 322,
|
|
804
|
+
S: this,
|
|
805
|
+
A: [
|
|
806
|
+
"!this._isStarted || this._isPaused",
|
|
807
|
+
"'Invalid state.'"
|
|
808
|
+
]
|
|
809
|
+
});
|
|
810
|
+
this._state._startTimeframe = timeframe;
|
|
811
|
+
this._timeframeClock.setTimeframe(timeframe);
|
|
812
|
+
if (this._feedSetIterator) {
|
|
813
|
+
await this._feedSetIterator.close();
|
|
814
|
+
await this._initIterator();
|
|
815
|
+
await this._feedSetIterator.open();
|
|
816
|
+
}
|
|
817
|
+
}
|
|
818
|
+
/**
|
|
819
|
+
* Calling pause while processing will cause a deadlock.
|
|
820
|
+
*/
|
|
821
|
+
async pause() {
|
|
822
|
+
if (this._isPaused) {
|
|
823
|
+
return;
|
|
824
|
+
}
|
|
825
|
+
this._pauseTrigger.reset();
|
|
826
|
+
await this._processingTrigger.wait();
|
|
827
|
+
this._isPaused = true;
|
|
828
|
+
}
|
|
829
|
+
async unpause() {
|
|
830
|
+
invariant4(this._isPaused, "Pipeline is not paused.", {
|
|
831
|
+
F: __dxlog_file5,
|
|
832
|
+
L: 351,
|
|
833
|
+
S: this,
|
|
834
|
+
A: [
|
|
835
|
+
"this._isPaused",
|
|
836
|
+
"'Pipeline is not paused.'"
|
|
837
|
+
]
|
|
838
|
+
});
|
|
839
|
+
this._pauseTrigger.wake();
|
|
840
|
+
this._isPaused = false;
|
|
841
|
+
for (const feed of this._feeds.values()) {
|
|
842
|
+
this._setFeedDownloadState(feed);
|
|
843
|
+
}
|
|
844
|
+
}
|
|
845
|
+
/**
|
|
846
|
+
* Starts to iterate over the ordered messages from the added feeds.
|
|
847
|
+
* Updates the timeframe clock after the message has bee processed.
|
|
848
|
+
*/
|
|
849
|
+
async *consume() {
|
|
850
|
+
invariant4(!this._isBeingConsumed, "Pipeline is already being consumed.", {
|
|
851
|
+
F: __dxlog_file5,
|
|
852
|
+
L: 366,
|
|
853
|
+
S: this,
|
|
854
|
+
A: [
|
|
855
|
+
"!this._isBeingConsumed",
|
|
856
|
+
"'Pipeline is already being consumed.'"
|
|
857
|
+
]
|
|
858
|
+
});
|
|
859
|
+
this._isBeingConsumed = true;
|
|
860
|
+
invariant4(this._feedSetIterator, "Iterator not initialized.", {
|
|
861
|
+
F: __dxlog_file5,
|
|
862
|
+
L: 369,
|
|
863
|
+
S: this,
|
|
864
|
+
A: [
|
|
865
|
+
"this._feedSetIterator",
|
|
866
|
+
"'Iterator not initialized.'"
|
|
867
|
+
]
|
|
868
|
+
});
|
|
869
|
+
let lastFeedSetIterator = this._feedSetIterator;
|
|
870
|
+
let iterable = lastFeedSetIterator[Symbol.asyncIterator]();
|
|
871
|
+
while (!this._isStopping) {
|
|
872
|
+
await this._pauseTrigger.wait();
|
|
873
|
+
if (lastFeedSetIterator !== this._feedSetIterator) {
|
|
874
|
+
invariant4(this._feedSetIterator, "Iterator not initialized.", {
|
|
875
|
+
F: __dxlog_file5,
|
|
876
|
+
L: 378,
|
|
877
|
+
S: this,
|
|
878
|
+
A: [
|
|
879
|
+
"this._feedSetIterator",
|
|
880
|
+
"'Iterator not initialized.'"
|
|
881
|
+
]
|
|
882
|
+
});
|
|
883
|
+
lastFeedSetIterator = this._feedSetIterator;
|
|
884
|
+
iterable = lastFeedSetIterator[Symbol.asyncIterator]();
|
|
885
|
+
}
|
|
886
|
+
const { done, value } = await iterable.next();
|
|
887
|
+
if (!done) {
|
|
888
|
+
const block = value ?? failUndefined();
|
|
889
|
+
this._processingTrigger.reset();
|
|
890
|
+
this._timeframeClock.updatePendingTimeframe(PublicKey3.from(block.feedKey), block.seq);
|
|
891
|
+
yield block;
|
|
892
|
+
this._processingTrigger.wake();
|
|
893
|
+
this._timeframeClock.updateTimeframe();
|
|
894
|
+
}
|
|
895
|
+
}
|
|
896
|
+
this._isBeingConsumed = false;
|
|
897
|
+
}
|
|
898
|
+
_setFeedDownloadState(feed) {
|
|
899
|
+
let handle = this._downloads.get(feed);
|
|
900
|
+
if (handle) {
|
|
901
|
+
feed.undownload(handle);
|
|
902
|
+
}
|
|
903
|
+
const timeframe = this._state._startTimeframe;
|
|
904
|
+
const seq = timeframe.get(feed.key) ?? -1;
|
|
905
|
+
log4("download", {
|
|
906
|
+
feed: feed.key.truncate(),
|
|
907
|
+
seq,
|
|
908
|
+
length: feed.length
|
|
909
|
+
}, {
|
|
910
|
+
F: __dxlog_file5,
|
|
911
|
+
L: 407,
|
|
912
|
+
S: this,
|
|
913
|
+
C: (f, a) => f(...a)
|
|
914
|
+
});
|
|
915
|
+
handle = feed.download({
|
|
916
|
+
start: seq + 1,
|
|
917
|
+
linear: true
|
|
918
|
+
}, (err, data) => {
|
|
919
|
+
if (err) {
|
|
920
|
+
} else {
|
|
921
|
+
log4.info("downloaded", {
|
|
922
|
+
data
|
|
923
|
+
}, {
|
|
924
|
+
F: __dxlog_file5,
|
|
925
|
+
L: 412,
|
|
926
|
+
S: this,
|
|
927
|
+
C: (f, a) => f(...a)
|
|
928
|
+
});
|
|
929
|
+
}
|
|
930
|
+
});
|
|
931
|
+
this._downloads.set(feed, handle);
|
|
932
|
+
}
|
|
933
|
+
async _initIterator() {
|
|
934
|
+
this._feedSetIterator = new FeedSetIterator(createMessageSelector(this._timeframeClock), {
|
|
935
|
+
start: startAfter(this._timeframeClock.timeframe),
|
|
936
|
+
stallTimeout: 1e3
|
|
937
|
+
});
|
|
938
|
+
this._feedSetIterator.stalled.on((iterator) => {
|
|
939
|
+
log4.warn(`Stalled after ${iterator.options.stallTimeout}ms with ${iterator.size} feeds.`, void 0, {
|
|
940
|
+
F: __dxlog_file5,
|
|
941
|
+
L: 426,
|
|
942
|
+
S: this,
|
|
943
|
+
C: (f, a) => f(...a)
|
|
944
|
+
});
|
|
945
|
+
this._state.stalled.emit();
|
|
946
|
+
});
|
|
947
|
+
for (const feed of this._feeds.values()) {
|
|
948
|
+
await this._feedSetIterator.addFeed(feed);
|
|
949
|
+
}
|
|
950
|
+
}
|
|
951
|
+
};
|
|
952
|
+
_ts_decorate3([
|
|
953
|
+
synchronized2
|
|
954
|
+
], Pipeline.prototype, "start", null);
|
|
955
|
+
_ts_decorate3([
|
|
956
|
+
synchronized2
|
|
957
|
+
], Pipeline.prototype, "stop", null);
|
|
958
|
+
_ts_decorate3([
|
|
959
|
+
synchronized2
|
|
960
|
+
], Pipeline.prototype, "setCursor", null);
|
|
961
|
+
_ts_decorate3([
|
|
962
|
+
synchronized2
|
|
963
|
+
], Pipeline.prototype, "pause", null);
|
|
964
|
+
_ts_decorate3([
|
|
965
|
+
synchronized2
|
|
966
|
+
], Pipeline.prototype, "unpause", null);
|
|
967
|
+
|
|
968
|
+
// packages/core/echo/echo-pipeline/src/space/auth.ts
|
|
969
|
+
import { runInContext, scheduleTask } from "@dxos/async";
|
|
970
|
+
import { Context as Context3 } from "@dxos/context";
|
|
971
|
+
import { randomBytes } from "@dxos/crypto";
|
|
972
|
+
import { invariant as invariant5 } from "@dxos/invariant";
|
|
973
|
+
import { log as log5 } from "@dxos/log";
|
|
974
|
+
import { schema as schema3 } from "@dxos/protocols/proto";
|
|
975
|
+
import { RpcExtension } from "@dxos/teleport";
|
|
976
|
+
var __dxlog_file6 = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/space/auth.ts";
|
|
977
|
+
var AuthExtension = class extends RpcExtension {
|
|
978
|
+
constructor(_authParams) {
|
|
979
|
+
super({
|
|
980
|
+
requested: {
|
|
981
|
+
AuthService: schema3.getService("dxos.mesh.teleport.auth.AuthService")
|
|
982
|
+
},
|
|
983
|
+
exposed: {
|
|
984
|
+
AuthService: schema3.getService("dxos.mesh.teleport.auth.AuthService")
|
|
985
|
+
},
|
|
986
|
+
timeout: 60 * 1e3
|
|
987
|
+
});
|
|
988
|
+
this._authParams = _authParams;
|
|
989
|
+
this._ctx = new Context3({
|
|
990
|
+
onError: (err) => {
|
|
991
|
+
log5.catch(err, void 0, {
|
|
992
|
+
F: __dxlog_file6,
|
|
993
|
+
L: 28,
|
|
994
|
+
S: this,
|
|
995
|
+
C: (f, a) => f(...a)
|
|
996
|
+
});
|
|
997
|
+
}
|
|
998
|
+
}, {
|
|
999
|
+
F: __dxlog_file6,
|
|
1000
|
+
L: 26
|
|
1001
|
+
});
|
|
1002
|
+
}
|
|
1003
|
+
async getHandlers() {
|
|
1004
|
+
return {
|
|
1005
|
+
AuthService: {
|
|
1006
|
+
authenticate: async ({ challenge }) => {
|
|
1007
|
+
try {
|
|
1008
|
+
const credential = await this._authParams.provider(challenge);
|
|
1009
|
+
if (!credential) {
|
|
1010
|
+
throw new Error("auth rejected");
|
|
1011
|
+
}
|
|
1012
|
+
return {
|
|
1013
|
+
credential
|
|
1014
|
+
};
|
|
1015
|
+
} catch (err) {
|
|
1016
|
+
log5.error("failed to generate auth credentials", err, {
|
|
1017
|
+
F: __dxlog_file6,
|
|
1018
|
+
L: 55,
|
|
1019
|
+
S: this,
|
|
1020
|
+
C: (f, a) => f(...a)
|
|
1021
|
+
});
|
|
1022
|
+
throw new Error("auth rejected");
|
|
1023
|
+
}
|
|
1024
|
+
}
|
|
1025
|
+
}
|
|
1026
|
+
};
|
|
1027
|
+
}
|
|
1028
|
+
async onOpen(context) {
|
|
1029
|
+
await super.onOpen(context);
|
|
1030
|
+
scheduleTask(this._ctx, async () => {
|
|
1031
|
+
try {
|
|
1032
|
+
const challenge = randomBytes(32);
|
|
1033
|
+
const { credential } = await this.rpc.AuthService.authenticate({
|
|
1034
|
+
challenge
|
|
1035
|
+
});
|
|
1036
|
+
invariant5(credential?.length > 0, "invalid credential", {
|
|
1037
|
+
F: __dxlog_file6,
|
|
1038
|
+
L: 69,
|
|
1039
|
+
S: this,
|
|
1040
|
+
A: [
|
|
1041
|
+
"credential?.length > 0",
|
|
1042
|
+
"'invalid credential'"
|
|
1043
|
+
]
|
|
1044
|
+
});
|
|
1045
|
+
const success = await this._authParams.verifier(challenge, credential);
|
|
1046
|
+
invariant5(success, "credential not verified", {
|
|
1047
|
+
F: __dxlog_file6,
|
|
1048
|
+
L: 71,
|
|
1049
|
+
S: this,
|
|
1050
|
+
A: [
|
|
1051
|
+
"success",
|
|
1052
|
+
"'credential not verified'"
|
|
1053
|
+
]
|
|
1054
|
+
});
|
|
1055
|
+
runInContext(this._ctx, () => this._authParams.onAuthSuccess());
|
|
1056
|
+
} catch (err) {
|
|
1057
|
+
log5("auth failed", err, {
|
|
1058
|
+
F: __dxlog_file6,
|
|
1059
|
+
L: 74,
|
|
1060
|
+
S: this,
|
|
1061
|
+
C: (f, a) => f(...a)
|
|
1062
|
+
});
|
|
1063
|
+
this.close();
|
|
1064
|
+
this._authParams.onAuthFailure();
|
|
1065
|
+
}
|
|
1066
|
+
});
|
|
1067
|
+
}
|
|
1068
|
+
async onClose() {
|
|
1069
|
+
await this._ctx.dispose();
|
|
1070
|
+
await super.onClose();
|
|
1071
|
+
}
|
|
1072
|
+
async onAbort() {
|
|
1073
|
+
await this._ctx.dispose();
|
|
1074
|
+
await super.onAbort();
|
|
1075
|
+
}
|
|
1076
|
+
};
|
|
1077
|
+
|
|
1078
|
+
// packages/core/echo/echo-pipeline/src/space/space.ts
|
|
1079
|
+
import { Event as Event4, scheduleMicroTask, synchronized as synchronized3, trackLeaks as trackLeaks2 } from "@dxos/async";
|
|
1080
|
+
import { Resource } from "@dxos/context";
|
|
1081
|
+
import { invariant as invariant6 } from "@dxos/invariant";
|
|
1082
|
+
import { log as log7, logInfo } from "@dxos/log";
|
|
1083
|
+
import { AdmittedFeed as AdmittedFeed2 } from "@dxos/protocols/proto/dxos/halo/credentials";
|
|
1084
|
+
import { trace as trace2 } from "@dxos/tracing";
|
|
1085
|
+
import { Callback as Callback2 } from "@dxos/util";
|
|
1086
|
+
|
|
1087
|
+
// packages/core/echo/echo-pipeline/src/space/control-pipeline.ts
|
|
1088
|
+
import { DeferredTask, sleepWithContext as sleepWithContext2, trackLeaks } from "@dxos/async";
|
|
1089
|
+
import { Context as Context4 } from "@dxos/context";
|
|
1090
|
+
import { SpaceStateMachine } from "@dxos/credentials";
|
|
1091
|
+
import { PublicKey as PublicKey4 } from "@dxos/keys";
|
|
1092
|
+
import { log as log6 } from "@dxos/log";
|
|
1093
|
+
import { AdmittedFeed } from "@dxos/protocols/proto/dxos/halo/credentials";
|
|
1094
|
+
import { Timeframe as Timeframe3 } from "@dxos/timeframe";
|
|
1095
|
+
import { TimeSeriesCounter, TimeUsageCounter, trace } from "@dxos/tracing";
|
|
1096
|
+
import { Callback, tracer } from "@dxos/util";
|
|
1097
|
+
function _ts_decorate4(decorators, target, key, desc) {
|
|
1098
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
1099
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
1100
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
1101
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
1102
|
+
}
|
|
1103
|
+
var __dxlog_file7 = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/space/control-pipeline.ts";
|
|
1104
|
+
var TIMEFRAME_SAVE_DEBOUNCE_INTERVAL = 500;
|
|
1105
|
+
var CONTROL_PIPELINE_SNAPSHOT_DELAY = 1e4;
|
|
1106
|
+
var USE_SNAPSHOTS = true;
|
|
1107
|
+
var ControlPipeline = class {
|
|
1108
|
+
constructor({ spaceKey, genesisFeed, feedProvider, metadataStore }) {
|
|
1109
|
+
this._ctx = new Context4(void 0, {
|
|
1110
|
+
F: __dxlog_file7,
|
|
1111
|
+
L: 47
|
|
1112
|
+
});
|
|
1113
|
+
this._lastTimeframeSaveTime = Date.now();
|
|
1114
|
+
this.onFeedAdmitted = new Callback();
|
|
1115
|
+
this._usage = new TimeUsageCounter();
|
|
1116
|
+
this._mutations = new TimeSeriesCounter();
|
|
1117
|
+
this._snapshotTask = new DeferredTask(this._ctx, async () => {
|
|
1118
|
+
await sleepWithContext2(this._ctx, CONTROL_PIPELINE_SNAPSHOT_DELAY);
|
|
1119
|
+
await this._saveSnapshot();
|
|
1120
|
+
});
|
|
1121
|
+
this._spaceKey = spaceKey;
|
|
1122
|
+
this._metadata = metadataStore;
|
|
1123
|
+
this._pipeline = new Pipeline();
|
|
1124
|
+
void this._pipeline.addFeed(genesisFeed);
|
|
1125
|
+
this._spaceStateMachine = new SpaceStateMachine(spaceKey);
|
|
1126
|
+
this._spaceStateMachine.onFeedAdmitted.set(async (info) => {
|
|
1127
|
+
log6("feed admitted", {
|
|
1128
|
+
key: info.key
|
|
1129
|
+
}, {
|
|
1130
|
+
F: __dxlog_file7,
|
|
1131
|
+
L: 82,
|
|
1132
|
+
S: this,
|
|
1133
|
+
C: (f, a) => f(...a)
|
|
1134
|
+
});
|
|
1135
|
+
if (info.assertion.designation === AdmittedFeed.Designation.CONTROL && !info.key.equals(genesisFeed.key)) {
|
|
1136
|
+
queueMicrotask(async () => {
|
|
1137
|
+
try {
|
|
1138
|
+
const feed = await feedProvider(info.key);
|
|
1139
|
+
if (!this._pipeline.hasFeed(feed.key)) {
|
|
1140
|
+
await this._pipeline.addFeed(feed);
|
|
1141
|
+
}
|
|
1142
|
+
} catch (err) {
|
|
1143
|
+
log6.catch(err, void 0, {
|
|
1144
|
+
F: __dxlog_file7,
|
|
1145
|
+
L: 93,
|
|
1146
|
+
S: this,
|
|
1147
|
+
C: (f, a) => f(...a)
|
|
1148
|
+
});
|
|
1149
|
+
}
|
|
1150
|
+
});
|
|
1151
|
+
}
|
|
1152
|
+
await this.onFeedAdmitted.callIfSet(info);
|
|
1153
|
+
});
|
|
1154
|
+
this.onMemberRoleChanged = this._spaceStateMachine.onMemberRoleChanged;
|
|
1155
|
+
this.onCredentialProcessed = this._spaceStateMachine.onCredentialProcessed;
|
|
1156
|
+
this.onDelegatedInvitation = this._spaceStateMachine.onDelegatedInvitation;
|
|
1157
|
+
this.onDelegatedInvitationRemoved = this._spaceStateMachine.onDelegatedInvitationRemoved;
|
|
1158
|
+
}
|
|
1159
|
+
get spaceState() {
|
|
1160
|
+
return this._spaceStateMachine;
|
|
1161
|
+
}
|
|
1162
|
+
get pipeline() {
|
|
1163
|
+
return this._pipeline;
|
|
1164
|
+
}
|
|
1165
|
+
async setWriteFeed(feed) {
|
|
1166
|
+
await this._pipeline.addFeed(feed);
|
|
1167
|
+
this._pipeline.setWriteFeed(feed);
|
|
1168
|
+
}
|
|
1169
|
+
async start() {
|
|
1170
|
+
const snapshot = this._metadata.getSpaceControlPipelineSnapshot(this._spaceKey);
|
|
1171
|
+
log6("load snapshot", {
|
|
1172
|
+
key: this._spaceKey,
|
|
1173
|
+
present: !!snapshot,
|
|
1174
|
+
tf: snapshot?.timeframe
|
|
1175
|
+
}, {
|
|
1176
|
+
F: __dxlog_file7,
|
|
1177
|
+
L: 123,
|
|
1178
|
+
S: this,
|
|
1179
|
+
C: (f, a) => f(...a)
|
|
1180
|
+
});
|
|
1181
|
+
if (USE_SNAPSHOTS && snapshot) {
|
|
1182
|
+
await this._processSnapshot(snapshot);
|
|
1183
|
+
}
|
|
1184
|
+
log6("starting...", void 0, {
|
|
1185
|
+
F: __dxlog_file7,
|
|
1186
|
+
L: 128,
|
|
1187
|
+
S: this,
|
|
1188
|
+
C: (f, a) => f(...a)
|
|
1189
|
+
});
|
|
1190
|
+
setTimeout(async () => {
|
|
1191
|
+
void this._consumePipeline(new Context4(void 0, {
|
|
1192
|
+
F: __dxlog_file7,
|
|
1193
|
+
L: 130
|
|
1194
|
+
}));
|
|
1195
|
+
});
|
|
1196
|
+
await this._pipeline.start();
|
|
1197
|
+
log6("started", void 0, {
|
|
1198
|
+
F: __dxlog_file7,
|
|
1199
|
+
L: 134,
|
|
1200
|
+
S: this,
|
|
1201
|
+
C: (f, a) => f(...a)
|
|
1202
|
+
});
|
|
1203
|
+
}
|
|
1204
|
+
async _processSnapshot(snapshot) {
|
|
1205
|
+
await this._pipeline.setCursor(snapshot.timeframe);
|
|
1206
|
+
for (const message of snapshot.messages ?? []) {
|
|
1207
|
+
const result = await this._spaceStateMachine.process(message.credential, {
|
|
1208
|
+
sourceFeed: message.feedKey,
|
|
1209
|
+
skipVerification: true
|
|
1210
|
+
});
|
|
1211
|
+
if (!result) {
|
|
1212
|
+
log6.warn("credential processing failed from snapshot", {
|
|
1213
|
+
message
|
|
1214
|
+
}, {
|
|
1215
|
+
F: __dxlog_file7,
|
|
1216
|
+
L: 147,
|
|
1217
|
+
S: this,
|
|
1218
|
+
C: (f, a) => f(...a)
|
|
1219
|
+
});
|
|
1220
|
+
}
|
|
1221
|
+
}
|
|
1222
|
+
}
|
|
1223
|
+
async _saveSnapshot() {
|
|
1224
|
+
await this._pipeline.pause();
|
|
1225
|
+
const snapshot = {
|
|
1226
|
+
timeframe: this._pipeline.state.timeframe,
|
|
1227
|
+
messages: this._spaceStateMachine.credentialEntries.map((entry) => ({
|
|
1228
|
+
feedKey: entry.sourceFeed,
|
|
1229
|
+
credential: entry.credential
|
|
1230
|
+
}))
|
|
1231
|
+
};
|
|
1232
|
+
await this._pipeline.unpause();
|
|
1233
|
+
log6("save snapshot", {
|
|
1234
|
+
key: this._spaceKey,
|
|
1235
|
+
snapshot: getSnapshotLoggerContext(snapshot)
|
|
1236
|
+
}, {
|
|
1237
|
+
F: __dxlog_file7,
|
|
1238
|
+
L: 163,
|
|
1239
|
+
S: this,
|
|
1240
|
+
C: (f, a) => f(...a)
|
|
1241
|
+
});
|
|
1242
|
+
await this._metadata.setSpaceControlPipelineSnapshot(this._spaceKey, snapshot);
|
|
1243
|
+
}
|
|
1244
|
+
async _consumePipeline(ctx) {
|
|
1245
|
+
for await (const msg of this._pipeline.consume()) {
|
|
1246
|
+
const span = this._usage.beginRecording();
|
|
1247
|
+
this._mutations.inc();
|
|
1248
|
+
try {
|
|
1249
|
+
await this._processMessage(ctx, msg);
|
|
1250
|
+
} catch (err) {
|
|
1251
|
+
log6.catch(err, void 0, {
|
|
1252
|
+
F: __dxlog_file7,
|
|
1253
|
+
L: 176,
|
|
1254
|
+
S: this,
|
|
1255
|
+
C: (f, a) => f(...a)
|
|
1256
|
+
});
|
|
1257
|
+
}
|
|
1258
|
+
span.end();
|
|
1259
|
+
}
|
|
1260
|
+
}
|
|
1261
|
+
async _processMessage(ctx, msg) {
|
|
1262
|
+
log6("processing", {
|
|
1263
|
+
key: msg.feedKey,
|
|
1264
|
+
seq: msg.seq
|
|
1265
|
+
}, {
|
|
1266
|
+
F: __dxlog_file7,
|
|
1267
|
+
L: 186,
|
|
1268
|
+
S: this,
|
|
1269
|
+
C: (f, a) => f(...a)
|
|
1270
|
+
});
|
|
1271
|
+
if (msg.data.payload.credential) {
|
|
1272
|
+
const timer = tracer.mark("dxos.echo.pipeline.control");
|
|
1273
|
+
const result = await this._spaceStateMachine.process(msg.data.payload.credential.credential, {
|
|
1274
|
+
sourceFeed: PublicKey4.from(msg.feedKey)
|
|
1275
|
+
});
|
|
1276
|
+
timer.end();
|
|
1277
|
+
if (!result) {
|
|
1278
|
+
log6.warn("processing failed", {
|
|
1279
|
+
msg
|
|
1280
|
+
}, {
|
|
1281
|
+
F: __dxlog_file7,
|
|
1282
|
+
L: 195,
|
|
1283
|
+
S: this,
|
|
1284
|
+
C: (f, a) => f(...a)
|
|
1285
|
+
});
|
|
1286
|
+
} else {
|
|
1287
|
+
await this._noteTargetStateIfNeeded(this._pipeline.state.pendingTimeframe);
|
|
1288
|
+
}
|
|
1289
|
+
this._snapshotTask.schedule();
|
|
1290
|
+
}
|
|
1291
|
+
}
|
|
1292
|
+
async _noteTargetStateIfNeeded(timeframe) {
|
|
1293
|
+
if (Date.now() - this._lastTimeframeSaveTime > TIMEFRAME_SAVE_DEBOUNCE_INTERVAL) {
|
|
1294
|
+
this._lastTimeframeSaveTime = Date.now();
|
|
1295
|
+
await this._saveTargetTimeframe(timeframe);
|
|
1296
|
+
}
|
|
1297
|
+
}
|
|
1298
|
+
async stop() {
|
|
1299
|
+
log6("stopping...", void 0, {
|
|
1300
|
+
F: __dxlog_file7,
|
|
1301
|
+
L: 215,
|
|
1302
|
+
S: this,
|
|
1303
|
+
C: (f, a) => f(...a)
|
|
1304
|
+
});
|
|
1305
|
+
await this._ctx.dispose();
|
|
1306
|
+
await this._pipeline.stop();
|
|
1307
|
+
await this._saveTargetTimeframe(this._pipeline.state.timeframe);
|
|
1308
|
+
log6("stopped", void 0, {
|
|
1309
|
+
F: __dxlog_file7,
|
|
1310
|
+
L: 219,
|
|
1311
|
+
S: this,
|
|
1312
|
+
C: (f, a) => f(...a)
|
|
1313
|
+
});
|
|
1314
|
+
}
|
|
1315
|
+
async _saveTargetTimeframe(timeframe) {
|
|
1316
|
+
try {
|
|
1317
|
+
const newTimeframe = Timeframe3.merge(this._targetTimeframe ?? new Timeframe3(), timeframe);
|
|
1318
|
+
await this._metadata.setSpaceControlLatestTimeframe(this._spaceKey, newTimeframe);
|
|
1319
|
+
this._targetTimeframe = newTimeframe;
|
|
1320
|
+
} catch (err) {
|
|
1321
|
+
log6(err, void 0, {
|
|
1322
|
+
F: __dxlog_file7,
|
|
1323
|
+
L: 228,
|
|
1324
|
+
S: this,
|
|
1325
|
+
C: (f, a) => f(...a)
|
|
1326
|
+
});
|
|
1327
|
+
}
|
|
1328
|
+
}
|
|
1329
|
+
};
|
|
1330
|
+
_ts_decorate4([
|
|
1331
|
+
trace.metricsCounter()
|
|
1332
|
+
], ControlPipeline.prototype, "_usage", void 0);
|
|
1333
|
+
_ts_decorate4([
|
|
1334
|
+
trace.metricsCounter()
|
|
1335
|
+
], ControlPipeline.prototype, "_mutations", void 0);
|
|
1336
|
+
_ts_decorate4([
|
|
1337
|
+
trace.span({
|
|
1338
|
+
showInBrowserTimeline: true
|
|
1339
|
+
})
|
|
1340
|
+
], ControlPipeline.prototype, "start", null);
|
|
1341
|
+
_ts_decorate4([
|
|
1342
|
+
trace.span()
|
|
1343
|
+
], ControlPipeline.prototype, "_consumePipeline", null);
|
|
1344
|
+
_ts_decorate4([
|
|
1345
|
+
trace.span()
|
|
1346
|
+
], ControlPipeline.prototype, "_processMessage", null);
|
|
1347
|
+
ControlPipeline = _ts_decorate4([
|
|
1348
|
+
trace.resource(),
|
|
1349
|
+
trackLeaks("start", "stop")
|
|
1350
|
+
], ControlPipeline);
|
|
1351
|
+
var getSnapshotLoggerContext = (snapshot) => {
|
|
1352
|
+
return snapshot.messages?.map((msg) => {
|
|
1353
|
+
const issuer = msg.credential.issuer;
|
|
1354
|
+
const subject = msg.credential.subject.id;
|
|
1355
|
+
const type = msg.credential.subject.assertion["@type"];
|
|
1356
|
+
return {
|
|
1357
|
+
issuer,
|
|
1358
|
+
subject,
|
|
1359
|
+
type
|
|
1360
|
+
};
|
|
1361
|
+
});
|
|
1362
|
+
};
|
|
1363
|
+
|
|
1364
|
+
// packages/core/echo/echo-pipeline/src/space/space.ts
|
|
1365
|
+
function _ts_decorate5(decorators, target, key, desc) {
|
|
1366
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
1367
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
1368
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
1369
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
1370
|
+
}
|
|
1371
|
+
var __dxlog_file8 = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/space/space.ts";
|
|
1372
|
+
var Space = class extends Resource {
|
|
1373
|
+
constructor(params) {
|
|
1374
|
+
super();
|
|
1375
|
+
this.onCredentialProcessed = new Callback2();
|
|
1376
|
+
this.stateUpdate = new Event4();
|
|
1377
|
+
invariant6(params.spaceKey && params.feedProvider, void 0, {
|
|
1378
|
+
F: __dxlog_file8,
|
|
1379
|
+
L: 71,
|
|
1380
|
+
S: this,
|
|
1381
|
+
A: [
|
|
1382
|
+
"params.spaceKey && params.feedProvider",
|
|
1383
|
+
""
|
|
1384
|
+
]
|
|
1385
|
+
});
|
|
1386
|
+
this._id = params.id;
|
|
1387
|
+
this._key = params.spaceKey;
|
|
1388
|
+
this._genesisFeedKey = params.genesisFeed.key;
|
|
1389
|
+
this._feedProvider = params.feedProvider;
|
|
1390
|
+
this._controlPipeline = new ControlPipeline({
|
|
1391
|
+
spaceKey: params.spaceKey,
|
|
1392
|
+
genesisFeed: params.genesisFeed,
|
|
1393
|
+
feedProvider: params.feedProvider,
|
|
1394
|
+
metadataStore: params.metadataStore
|
|
1395
|
+
});
|
|
1396
|
+
this._controlPipeline.onFeedAdmitted.set(async (info) => {
|
|
1397
|
+
const sparse = info.assertion.designation === AdmittedFeed2.Designation.DATA;
|
|
1398
|
+
if (!info.key.equals(params.genesisFeed.key)) {
|
|
1399
|
+
scheduleMicroTask(this._ctx, async () => {
|
|
1400
|
+
await this.protocol.addFeed(await params.feedProvider(info.key, {
|
|
1401
|
+
sparse
|
|
1402
|
+
}));
|
|
1403
|
+
});
|
|
1404
|
+
}
|
|
1405
|
+
});
|
|
1406
|
+
this._controlPipeline.onCredentialProcessed.set(async (credential) => {
|
|
1407
|
+
await this.onCredentialProcessed.callIfSet(credential);
|
|
1408
|
+
log7("onCredentialProcessed", {
|
|
1409
|
+
credential
|
|
1410
|
+
}, {
|
|
1411
|
+
F: __dxlog_file8,
|
|
1412
|
+
L: 98,
|
|
1413
|
+
S: this,
|
|
1414
|
+
C: (f, a) => f(...a)
|
|
1415
|
+
});
|
|
1416
|
+
this.stateUpdate.emit();
|
|
1417
|
+
});
|
|
1418
|
+
this._controlPipeline.onDelegatedInvitation.set(async (invitation) => {
|
|
1419
|
+
log7("onDelegatedInvitation", {
|
|
1420
|
+
invitation
|
|
1421
|
+
}, {
|
|
1422
|
+
F: __dxlog_file8,
|
|
1423
|
+
L: 102,
|
|
1424
|
+
S: this,
|
|
1425
|
+
C: (f, a) => f(...a)
|
|
1426
|
+
});
|
|
1427
|
+
await params.onDelegatedInvitationStatusChange(invitation, true);
|
|
1428
|
+
});
|
|
1429
|
+
this._controlPipeline.onDelegatedInvitationRemoved.set(async (invitation) => {
|
|
1430
|
+
log7("onDelegatedInvitationRemoved", {
|
|
1431
|
+
invitation
|
|
1432
|
+
}, {
|
|
1433
|
+
F: __dxlog_file8,
|
|
1434
|
+
L: 106,
|
|
1435
|
+
S: this,
|
|
1436
|
+
C: (f, a) => f(...a)
|
|
1437
|
+
});
|
|
1438
|
+
await params.onDelegatedInvitationStatusChange(invitation, false);
|
|
1439
|
+
});
|
|
1440
|
+
this._controlPipeline.onMemberRoleChanged.set(async (changedMembers) => {
|
|
1441
|
+
log7("onMemberRoleChanged", () => ({
|
|
1442
|
+
changedMembers: changedMembers.map((m) => [
|
|
1443
|
+
m.key,
|
|
1444
|
+
m.role
|
|
1445
|
+
])
|
|
1446
|
+
}), {
|
|
1447
|
+
F: __dxlog_file8,
|
|
1448
|
+
L: 110,
|
|
1449
|
+
S: this,
|
|
1450
|
+
C: (f, a) => f(...a)
|
|
1451
|
+
});
|
|
1452
|
+
await params.onMemberRolesChanged(changedMembers);
|
|
1453
|
+
});
|
|
1454
|
+
this.protocol = params.protocol;
|
|
1455
|
+
}
|
|
1456
|
+
get id() {
|
|
1457
|
+
return this._id;
|
|
1458
|
+
}
|
|
1459
|
+
get key() {
|
|
1460
|
+
return this._key;
|
|
1461
|
+
}
|
|
1462
|
+
get genesisFeedKey() {
|
|
1463
|
+
return this._genesisFeedKey;
|
|
1464
|
+
}
|
|
1465
|
+
get controlFeedKey() {
|
|
1466
|
+
return this._controlFeed?.key;
|
|
1467
|
+
}
|
|
1468
|
+
get dataFeedKey() {
|
|
1469
|
+
return this._dataFeed?.key;
|
|
1470
|
+
}
|
|
1471
|
+
get spaceState() {
|
|
1472
|
+
return this._controlPipeline.spaceState;
|
|
1473
|
+
}
|
|
1474
|
+
/**
|
|
1475
|
+
* @test-only
|
|
1476
|
+
*/
|
|
1477
|
+
get controlPipeline() {
|
|
1478
|
+
return this._controlPipeline.pipeline;
|
|
1479
|
+
}
|
|
1480
|
+
async setControlFeed(feed) {
|
|
1481
|
+
invariant6(!this._controlFeed, "Control feed already set.", {
|
|
1482
|
+
F: __dxlog_file8,
|
|
1483
|
+
L: 154,
|
|
1484
|
+
S: this,
|
|
1485
|
+
A: [
|
|
1486
|
+
"!this._controlFeed",
|
|
1487
|
+
"'Control feed already set.'"
|
|
1488
|
+
]
|
|
1489
|
+
});
|
|
1490
|
+
this._controlFeed = feed;
|
|
1491
|
+
await this._controlPipeline.setWriteFeed(feed);
|
|
1492
|
+
return this;
|
|
1493
|
+
}
|
|
1494
|
+
async setDataFeed(feed) {
|
|
1495
|
+
invariant6(!this._dataFeed, "Data feed already set.", {
|
|
1496
|
+
F: __dxlog_file8,
|
|
1497
|
+
L: 161,
|
|
1498
|
+
S: this,
|
|
1499
|
+
A: [
|
|
1500
|
+
"!this._dataFeed",
|
|
1501
|
+
"'Data feed already set.'"
|
|
1502
|
+
]
|
|
1503
|
+
});
|
|
1504
|
+
this._dataFeed = feed;
|
|
1505
|
+
return this;
|
|
1506
|
+
}
|
|
1507
|
+
/**
|
|
1508
|
+
* Use for diagnostics.
|
|
1509
|
+
*/
|
|
1510
|
+
getControlFeeds() {
|
|
1511
|
+
return Array.from(this._controlPipeline.spaceState.feeds.values());
|
|
1512
|
+
}
|
|
1513
|
+
async _open(ctx) {
|
|
1514
|
+
log7("opening...", void 0, {
|
|
1515
|
+
F: __dxlog_file8,
|
|
1516
|
+
L: 175,
|
|
1517
|
+
S: this,
|
|
1518
|
+
C: (f, a) => f(...a)
|
|
1519
|
+
});
|
|
1520
|
+
await this._controlPipeline.start();
|
|
1521
|
+
await this.protocol.start();
|
|
1522
|
+
await this.protocol.addFeed(await this._feedProvider(this._genesisFeedKey));
|
|
1523
|
+
log7("opened", void 0, {
|
|
1524
|
+
F: __dxlog_file8,
|
|
1525
|
+
L: 182,
|
|
1526
|
+
S: this,
|
|
1527
|
+
C: (f, a) => f(...a)
|
|
1528
|
+
});
|
|
1529
|
+
}
|
|
1530
|
+
async _close() {
|
|
1531
|
+
log7("closing...", {
|
|
1532
|
+
key: this._key
|
|
1533
|
+
}, {
|
|
1534
|
+
F: __dxlog_file8,
|
|
1535
|
+
L: 187,
|
|
1536
|
+
S: this,
|
|
1537
|
+
C: (f, a) => f(...a)
|
|
1538
|
+
});
|
|
1539
|
+
await this.protocol.stop();
|
|
1540
|
+
await this._controlPipeline.stop();
|
|
1541
|
+
log7("closed", void 0, {
|
|
1542
|
+
F: __dxlog_file8,
|
|
1543
|
+
L: 193,
|
|
1544
|
+
S: this,
|
|
1545
|
+
C: (f, a) => f(...a)
|
|
1546
|
+
});
|
|
1547
|
+
}
|
|
1548
|
+
};
|
|
1549
|
+
_ts_decorate5([
|
|
1550
|
+
trace2.info()
|
|
1551
|
+
], Space.prototype, "protocol", void 0);
|
|
1552
|
+
_ts_decorate5([
|
|
1553
|
+
trace2.info()
|
|
1554
|
+
], Space.prototype, "_controlPipeline", void 0);
|
|
1555
|
+
_ts_decorate5([
|
|
1556
|
+
logInfo,
|
|
1557
|
+
trace2.info()
|
|
1558
|
+
], Space.prototype, "id", null);
|
|
1559
|
+
_ts_decorate5([
|
|
1560
|
+
logInfo,
|
|
1561
|
+
trace2.info()
|
|
1562
|
+
], Space.prototype, "key", null);
|
|
1563
|
+
_ts_decorate5([
|
|
1564
|
+
trace2.span()
|
|
1565
|
+
], Space.prototype, "_open", null);
|
|
1566
|
+
_ts_decorate5([
|
|
1567
|
+
synchronized3
|
|
1568
|
+
], Space.prototype, "_close", null);
|
|
1569
|
+
Space = _ts_decorate5([
|
|
1570
|
+
trackLeaks2("open", "close"),
|
|
1571
|
+
trace2.resource()
|
|
1572
|
+
], Space);
|
|
1573
|
+
|
|
1574
|
+
// packages/core/echo/echo-pipeline/src/space/admission-discovery-extension.ts
|
|
1575
|
+
import { scheduleTask as scheduleTask2 } from "@dxos/async";
|
|
1576
|
+
import { Context as Context5 } from "@dxos/context";
|
|
1577
|
+
import { ProtocolError } from "@dxos/protocols";
|
|
1578
|
+
import { schema as schema4 } from "@dxos/protocols/proto";
|
|
1579
|
+
import { RpcExtension as RpcExtension2 } from "@dxos/teleport";
|
|
1580
|
+
var __dxlog_file9 = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/space/admission-discovery-extension.ts";
|
|
1581
|
+
var CredentialRetrieverExtension = class extends RpcExtension2 {
|
|
1582
|
+
constructor(_request, _onResult) {
|
|
1583
|
+
super({
|
|
1584
|
+
requested: {
|
|
1585
|
+
AdmissionDiscoveryService: schema4.getService("dxos.mesh.teleport.AdmissionDiscoveryService")
|
|
1586
|
+
}
|
|
1587
|
+
});
|
|
1588
|
+
this._request = _request;
|
|
1589
|
+
this._onResult = _onResult;
|
|
1590
|
+
this._ctx = new Context5(void 0, {
|
|
1591
|
+
F: __dxlog_file9,
|
|
1592
|
+
L: 26
|
|
1593
|
+
});
|
|
1594
|
+
}
|
|
1595
|
+
async getHandlers() {
|
|
1596
|
+
return {};
|
|
1597
|
+
}
|
|
1598
|
+
async onOpen(context) {
|
|
1599
|
+
await super.onOpen(context);
|
|
1600
|
+
scheduleTask2(this._ctx, async () => {
|
|
1601
|
+
try {
|
|
1602
|
+
const result = await this.rpc.AdmissionDiscoveryService.getAdmissionCredential(this._request);
|
|
1603
|
+
this._onResult.wake(result.admissionCredential);
|
|
1604
|
+
} catch (err) {
|
|
1605
|
+
context.close(err);
|
|
1606
|
+
}
|
|
1607
|
+
});
|
|
1608
|
+
}
|
|
1609
|
+
async onClose() {
|
|
1610
|
+
await this._ctx.dispose();
|
|
1611
|
+
}
|
|
1612
|
+
async onAbort() {
|
|
1613
|
+
await this._ctx.dispose();
|
|
1614
|
+
}
|
|
1615
|
+
};
|
|
1616
|
+
var CredentialServerExtension = class extends RpcExtension2 {
|
|
1617
|
+
constructor(_space) {
|
|
1618
|
+
super({
|
|
1619
|
+
exposed: {
|
|
1620
|
+
AdmissionDiscoveryService: schema4.getService("dxos.mesh.teleport.AdmissionDiscoveryService")
|
|
1621
|
+
}
|
|
1622
|
+
});
|
|
1623
|
+
this._space = _space;
|
|
1624
|
+
}
|
|
1625
|
+
async getHandlers() {
|
|
1626
|
+
return {
|
|
1627
|
+
AdmissionDiscoveryService: {
|
|
1628
|
+
getAdmissionCredential: async (request) => {
|
|
1629
|
+
const memberInfo = this._space.spaceState.members.get(request.memberKey);
|
|
1630
|
+
if (!memberInfo?.credential) {
|
|
1631
|
+
throw new ProtocolError("Space member not found.", request);
|
|
1632
|
+
}
|
|
1633
|
+
return {
|
|
1634
|
+
admissionCredential: memberInfo.credential
|
|
1635
|
+
};
|
|
1636
|
+
}
|
|
1637
|
+
}
|
|
1638
|
+
};
|
|
1639
|
+
}
|
|
1640
|
+
};
|
|
1641
|
+
|
|
1642
|
+
// packages/core/echo/echo-pipeline/src/space/space-protocol.ts
|
|
1643
|
+
import { discoveryKey, subtleCrypto as subtleCrypto2 } from "@dxos/crypto";
|
|
1644
|
+
import { PublicKey as PublicKey5 } from "@dxos/keys";
|
|
1645
|
+
import { log as log8, logInfo as logInfo2 } from "@dxos/log";
|
|
1646
|
+
import { MMSTTopology } from "@dxos/network-manager";
|
|
1647
|
+
import { Teleport } from "@dxos/teleport";
|
|
1648
|
+
import { BlobSync } from "@dxos/teleport-extension-object-sync";
|
|
1649
|
+
import { ReplicatorExtension } from "@dxos/teleport-extension-replicator";
|
|
1650
|
+
import { trace as trace3 } from "@dxos/tracing";
|
|
1651
|
+
import { CallbackCollection, ComplexMap as ComplexMap4 } from "@dxos/util";
|
|
1652
|
+
function _ts_decorate6(decorators, target, key, desc) {
|
|
1653
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
1654
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
1655
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
1656
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
1657
|
+
}
|
|
1658
|
+
var __dxlog_file10 = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/space/space-protocol.ts";
|
|
1659
|
+
var MOCK_AUTH_PROVIDER = async (nonce) => Buffer.from("mock");
|
|
1660
|
+
var MOCK_AUTH_VERIFIER = async (nonce, credential) => true;
|
|
1661
|
+
var SpaceProtocol = class {
|
|
1662
|
+
constructor({ topic, swarmIdentity, networkManager, onSessionAuth, onAuthFailure, blobStore, disableP2pReplication }) {
|
|
1663
|
+
this._feeds = /* @__PURE__ */ new Set();
|
|
1664
|
+
this._sessions = new ComplexMap4(PublicKey5.hash);
|
|
1665
|
+
// TODO(burdon): Move to config (with sensible defaults).
|
|
1666
|
+
this._topology = new MMSTTopology({
|
|
1667
|
+
originateConnections: 4,
|
|
1668
|
+
maxPeers: 10,
|
|
1669
|
+
sampleSize: 20
|
|
1670
|
+
});
|
|
1671
|
+
this.feedAdded = new CallbackCollection();
|
|
1672
|
+
this._spaceKey = topic;
|
|
1673
|
+
this._networkManager = networkManager;
|
|
1674
|
+
this._swarmIdentity = swarmIdentity;
|
|
1675
|
+
this._onSessionAuth = onSessionAuth;
|
|
1676
|
+
this._onAuthFailure = onAuthFailure;
|
|
1677
|
+
this.blobSync = new BlobSync({
|
|
1678
|
+
blobStore
|
|
1679
|
+
});
|
|
1680
|
+
this._topic = subtleCrypto2.digest("SHA-256", topic.asBuffer()).then(discoveryKey).then(PublicKey5.from);
|
|
1681
|
+
this._disableP2pReplication = disableP2pReplication ?? false;
|
|
1682
|
+
}
|
|
1683
|
+
get sessions() {
|
|
1684
|
+
return this._sessions;
|
|
1685
|
+
}
|
|
1686
|
+
get feeds() {
|
|
1687
|
+
return this._feeds;
|
|
1688
|
+
}
|
|
1689
|
+
get _ownPeerKey() {
|
|
1690
|
+
return this._swarmIdentity.peerKey;
|
|
1691
|
+
}
|
|
1692
|
+
// TODO(burdon): Create abstraction for Space (e.g., add keys and have provider).
|
|
1693
|
+
async addFeed(feed) {
|
|
1694
|
+
log8("addFeed", {
|
|
1695
|
+
key: feed.key
|
|
1696
|
+
}, {
|
|
1697
|
+
F: __dxlog_file10,
|
|
1698
|
+
L: 128,
|
|
1699
|
+
S: this,
|
|
1700
|
+
C: (f, a) => f(...a)
|
|
1701
|
+
});
|
|
1702
|
+
this._feeds.add(feed);
|
|
1703
|
+
for (const session of this._sessions.values()) {
|
|
1704
|
+
session.replicator.addFeed(feed);
|
|
1705
|
+
}
|
|
1706
|
+
await this.feedAdded.callSerial(feed);
|
|
1707
|
+
}
|
|
1708
|
+
// TODO(burdon): Rename open? Common open/close interfaces for all services?
|
|
1709
|
+
async start() {
|
|
1710
|
+
if (this._connection) {
|
|
1711
|
+
return;
|
|
1712
|
+
}
|
|
1713
|
+
const credentials = await this._swarmIdentity.credentialProvider(Buffer.from(""));
|
|
1714
|
+
await this.blobSync.open();
|
|
1715
|
+
log8("starting...", void 0, {
|
|
1716
|
+
F: __dxlog_file10,
|
|
1717
|
+
L: 149,
|
|
1718
|
+
S: this,
|
|
1719
|
+
C: (f, a) => f(...a)
|
|
1720
|
+
});
|
|
1721
|
+
const topic = await this._topic;
|
|
1722
|
+
this._connection = await this._networkManager.joinSwarm({
|
|
1723
|
+
protocolProvider: this._createProtocolProvider(credentials),
|
|
1724
|
+
peerInfo: {
|
|
1725
|
+
peerKey: this._swarmIdentity.peerKey.toHex(),
|
|
1726
|
+
identityKey: this._swarmIdentity.identityKey.toHex()
|
|
1727
|
+
},
|
|
1728
|
+
topic,
|
|
1729
|
+
topology: this._topology,
|
|
1730
|
+
label: `swarm ${topic.truncate()} for space ${this._spaceKey.truncate()}`
|
|
1731
|
+
});
|
|
1732
|
+
log8("started", void 0, {
|
|
1733
|
+
F: __dxlog_file10,
|
|
1734
|
+
L: 162,
|
|
1735
|
+
S: this,
|
|
1736
|
+
C: (f, a) => f(...a)
|
|
1737
|
+
});
|
|
1738
|
+
}
|
|
1739
|
+
updateTopology() {
|
|
1740
|
+
this._topology.forceUpdate();
|
|
1741
|
+
}
|
|
1742
|
+
async stop() {
|
|
1743
|
+
await this.blobSync.close();
|
|
1744
|
+
if (this._connection) {
|
|
1745
|
+
log8("stopping...", void 0, {
|
|
1746
|
+
F: __dxlog_file10,
|
|
1747
|
+
L: 173,
|
|
1748
|
+
S: this,
|
|
1749
|
+
C: (f, a) => f(...a)
|
|
1750
|
+
});
|
|
1751
|
+
await this._connection.close();
|
|
1752
|
+
log8("stopped", void 0, {
|
|
1753
|
+
F: __dxlog_file10,
|
|
1754
|
+
L: 175,
|
|
1755
|
+
S: this,
|
|
1756
|
+
C: (f, a) => f(...a)
|
|
1757
|
+
});
|
|
1758
|
+
}
|
|
1759
|
+
}
|
|
1760
|
+
_createProtocolProvider(credentials) {
|
|
1761
|
+
return (wireParams) => {
|
|
1762
|
+
const session = new SpaceProtocolSession({
|
|
1763
|
+
wireParams,
|
|
1764
|
+
swarmIdentity: this._swarmIdentity,
|
|
1765
|
+
onSessionAuth: this._onSessionAuth,
|
|
1766
|
+
onAuthFailure: this._onAuthFailure,
|
|
1767
|
+
blobSync: this.blobSync,
|
|
1768
|
+
disableP2pReplication: this._disableP2pReplication
|
|
1769
|
+
});
|
|
1770
|
+
this._sessions.set(wireParams.remotePeerId, session);
|
|
1771
|
+
for (const feed of this._feeds) {
|
|
1772
|
+
session.replicator.addFeed(feed);
|
|
1773
|
+
}
|
|
1774
|
+
return session;
|
|
1775
|
+
};
|
|
1776
|
+
}
|
|
1777
|
+
};
|
|
1778
|
+
_ts_decorate6([
|
|
1779
|
+
logInfo2,
|
|
1780
|
+
trace3.info()
|
|
1781
|
+
], SpaceProtocol.prototype, "_topic", void 0);
|
|
1782
|
+
_ts_decorate6([
|
|
1783
|
+
trace3.info()
|
|
1784
|
+
], SpaceProtocol.prototype, "_spaceKey", void 0);
|
|
1785
|
+
_ts_decorate6([
|
|
1786
|
+
logInfo2
|
|
1787
|
+
], SpaceProtocol.prototype, "_ownPeerKey", null);
|
|
1788
|
+
SpaceProtocol = _ts_decorate6([
|
|
1789
|
+
trace3.resource()
|
|
1790
|
+
], SpaceProtocol);
|
|
1791
|
+
var AuthStatus;
|
|
1792
|
+
(function(AuthStatus2) {
|
|
1793
|
+
AuthStatus2["INITIAL"] = "INITIAL";
|
|
1794
|
+
AuthStatus2["SUCCESS"] = "SUCCESS";
|
|
1795
|
+
AuthStatus2["FAILURE"] = "FAILURE";
|
|
1796
|
+
})(AuthStatus || (AuthStatus = {}));
|
|
1797
|
+
var SpaceProtocolSession = class {
|
|
1798
|
+
// TODO(dmaretskyi): Allow to pass in extra extensions.
|
|
1799
|
+
constructor({ wireParams, swarmIdentity, onSessionAuth, onAuthFailure, blobSync, disableP2pReplication }) {
|
|
1800
|
+
// TODO(dmaretskyi): Start with upload=false when switching it on the fly works.
|
|
1801
|
+
this.replicator = new ReplicatorExtension().setOptions({
|
|
1802
|
+
upload: true
|
|
1803
|
+
});
|
|
1804
|
+
this._authStatus = "INITIAL";
|
|
1805
|
+
this._wireParams = wireParams;
|
|
1806
|
+
this._swarmIdentity = swarmIdentity;
|
|
1807
|
+
this._onSessionAuth = onSessionAuth;
|
|
1808
|
+
this._onAuthFailure = onAuthFailure;
|
|
1809
|
+
this._blobSync = blobSync;
|
|
1810
|
+
this._teleport = new Teleport(wireParams);
|
|
1811
|
+
this._disableP2pReplication = disableP2pReplication ?? false;
|
|
1812
|
+
}
|
|
1813
|
+
get authStatus() {
|
|
1814
|
+
return this._authStatus;
|
|
1815
|
+
}
|
|
1816
|
+
get stats() {
|
|
1817
|
+
return this._teleport.stats;
|
|
1818
|
+
}
|
|
1819
|
+
get stream() {
|
|
1820
|
+
return this._teleport.stream;
|
|
1821
|
+
}
|
|
1822
|
+
async open(sessionId) {
|
|
1823
|
+
await this._teleport.open(sessionId);
|
|
1824
|
+
this._teleport.addExtension("dxos.mesh.teleport.auth", new AuthExtension({
|
|
1825
|
+
provider: this._swarmIdentity.credentialProvider,
|
|
1826
|
+
verifier: this._swarmIdentity.credentialAuthenticator,
|
|
1827
|
+
onAuthSuccess: () => {
|
|
1828
|
+
log8("Peer authenticated", void 0, {
|
|
1829
|
+
F: __dxlog_file10,
|
|
1830
|
+
L: 286,
|
|
1831
|
+
S: this,
|
|
1832
|
+
C: (f, a) => f(...a)
|
|
1833
|
+
});
|
|
1834
|
+
this._authStatus = "SUCCESS";
|
|
1835
|
+
this._onSessionAuth?.(this._teleport);
|
|
1836
|
+
},
|
|
1837
|
+
onAuthFailure: () => {
|
|
1838
|
+
this._authStatus = "FAILURE";
|
|
1839
|
+
this._onAuthFailure?.(this._teleport);
|
|
1840
|
+
}
|
|
1841
|
+
}));
|
|
1842
|
+
if (!this._disableP2pReplication) {
|
|
1843
|
+
this._teleport.addExtension("dxos.mesh.teleport.replicator", this.replicator);
|
|
1844
|
+
}
|
|
1845
|
+
this._teleport.addExtension("dxos.mesh.teleport.blobsync", this._blobSync.createExtension());
|
|
1846
|
+
}
|
|
1847
|
+
async close() {
|
|
1848
|
+
log8("close", void 0, {
|
|
1849
|
+
F: __dxlog_file10,
|
|
1850
|
+
L: 305,
|
|
1851
|
+
S: this,
|
|
1852
|
+
C: (f, a) => f(...a)
|
|
1853
|
+
});
|
|
1854
|
+
await this._teleport.close();
|
|
1855
|
+
}
|
|
1856
|
+
async abort() {
|
|
1857
|
+
await this._teleport.abort();
|
|
1858
|
+
}
|
|
1859
|
+
};
|
|
1860
|
+
_ts_decorate6([
|
|
1861
|
+
logInfo2
|
|
1862
|
+
], SpaceProtocolSession.prototype, "_wireParams", void 0);
|
|
1863
|
+
_ts_decorate6([
|
|
1864
|
+
logInfo2
|
|
1865
|
+
], SpaceProtocolSession.prototype, "authStatus", null);
|
|
1866
|
+
|
|
1867
|
+
// packages/core/echo/echo-pipeline/src/space/space-manager.ts
|
|
1868
|
+
import { synchronized as synchronized4, trackLeaks as trackLeaks3, Trigger as Trigger2 } from "@dxos/async";
|
|
1869
|
+
import { failUndefined as failUndefined2 } from "@dxos/debug";
|
|
1870
|
+
import { PublicKey as PublicKey6 } from "@dxos/keys";
|
|
1871
|
+
import { log as log9 } from "@dxos/log";
|
|
1872
|
+
import { trace as trace4 } from "@dxos/protocols";
|
|
1873
|
+
import { ComplexMap as ComplexMap5 } from "@dxos/util";
|
|
1874
|
+
function _ts_decorate7(decorators, target, key, desc) {
|
|
1875
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
1876
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
1877
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
1878
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
1879
|
+
}
|
|
1880
|
+
var __dxlog_file11 = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/space/space-manager.ts";
|
|
1881
|
+
var SpaceManager = class {
|
|
1882
|
+
constructor({ feedStore, networkManager, metadataStore, blobStore, disableP2pReplication }) {
|
|
1883
|
+
this._spaces = new ComplexMap5(PublicKey6.hash);
|
|
1884
|
+
this._instanceId = PublicKey6.random().toHex();
|
|
1885
|
+
this._feedStore = feedStore;
|
|
1886
|
+
this._networkManager = networkManager;
|
|
1887
|
+
this._metadataStore = metadataStore;
|
|
1888
|
+
this._blobStore = blobStore;
|
|
1889
|
+
this._disableP2pReplication = disableP2pReplication ?? false;
|
|
1890
|
+
}
|
|
1891
|
+
// TODO(burdon): Remove.
|
|
1892
|
+
get spaces() {
|
|
1893
|
+
return this._spaces;
|
|
1894
|
+
}
|
|
1895
|
+
async open() {
|
|
1896
|
+
}
|
|
1897
|
+
async close() {
|
|
1898
|
+
await Promise.all([
|
|
1899
|
+
...this._spaces.values()
|
|
1900
|
+
].map((space) => space.close()));
|
|
1901
|
+
}
|
|
1902
|
+
async constructSpace({ metadata, swarmIdentity, onAuthorizedConnection, onAuthFailure, onDelegatedInvitationStatusChange, onMemberRolesChanged, memberKey }) {
|
|
1903
|
+
log9.trace("dxos.echo.space-manager.construct-space", trace4.begin({
|
|
1904
|
+
id: this._instanceId
|
|
1905
|
+
}), {
|
|
1906
|
+
F: __dxlog_file11,
|
|
1907
|
+
L: 100,
|
|
1908
|
+
S: this,
|
|
1909
|
+
C: (f, a) => f(...a)
|
|
1910
|
+
});
|
|
1911
|
+
log9("constructing space...", {
|
|
1912
|
+
spaceKey: metadata.genesisFeedKey
|
|
1913
|
+
}, {
|
|
1914
|
+
F: __dxlog_file11,
|
|
1915
|
+
L: 101,
|
|
1916
|
+
S: this,
|
|
1917
|
+
C: (f, a) => f(...a)
|
|
1918
|
+
});
|
|
1919
|
+
const genesisFeed = await this._feedStore.openFeed(metadata.genesisFeedKey ?? failUndefined2());
|
|
1920
|
+
const spaceKey = metadata.key;
|
|
1921
|
+
const spaceId = await createIdFromSpaceKey(spaceKey);
|
|
1922
|
+
const protocol = new SpaceProtocol({
|
|
1923
|
+
topic: spaceKey,
|
|
1924
|
+
swarmIdentity,
|
|
1925
|
+
networkManager: this._networkManager,
|
|
1926
|
+
onSessionAuth: onAuthorizedConnection,
|
|
1927
|
+
onAuthFailure,
|
|
1928
|
+
blobStore: this._blobStore,
|
|
1929
|
+
disableP2pReplication: this._disableP2pReplication
|
|
1930
|
+
});
|
|
1931
|
+
const space = new Space({
|
|
1932
|
+
id: spaceId,
|
|
1933
|
+
spaceKey,
|
|
1934
|
+
protocol,
|
|
1935
|
+
genesisFeed,
|
|
1936
|
+
feedProvider: (feedKey, opts) => this._feedStore.openFeed(feedKey, opts),
|
|
1937
|
+
metadataStore: this._metadataStore,
|
|
1938
|
+
memberKey,
|
|
1939
|
+
onDelegatedInvitationStatusChange,
|
|
1940
|
+
onMemberRolesChanged
|
|
1941
|
+
});
|
|
1942
|
+
this._spaces.set(space.key, space);
|
|
1943
|
+
log9.trace("dxos.echo.space-manager.construct-space", trace4.end({
|
|
1944
|
+
id: this._instanceId
|
|
1945
|
+
}), {
|
|
1946
|
+
F: __dxlog_file11,
|
|
1947
|
+
L: 131,
|
|
1948
|
+
S: this,
|
|
1949
|
+
C: (f, a) => f(...a)
|
|
1950
|
+
});
|
|
1951
|
+
return space;
|
|
1952
|
+
}
|
|
1953
|
+
async requestSpaceAdmissionCredential(params) {
|
|
1954
|
+
const traceKey = "dxos.echo.space-manager.request-space-admission";
|
|
1955
|
+
log9.trace(traceKey, trace4.begin({
|
|
1956
|
+
id: this._instanceId
|
|
1957
|
+
}), {
|
|
1958
|
+
F: __dxlog_file11,
|
|
1959
|
+
L: 137,
|
|
1960
|
+
S: this,
|
|
1961
|
+
C: (f, a) => f(...a)
|
|
1962
|
+
});
|
|
1963
|
+
log9("requesting space admission credential...", {
|
|
1964
|
+
spaceKey: params.spaceKey
|
|
1965
|
+
}, {
|
|
1966
|
+
F: __dxlog_file11,
|
|
1967
|
+
L: 138,
|
|
1968
|
+
S: this,
|
|
1969
|
+
C: (f, a) => f(...a)
|
|
1970
|
+
});
|
|
1971
|
+
const onCredentialResolved = new Trigger2();
|
|
1972
|
+
const protocol = new SpaceProtocol({
|
|
1973
|
+
topic: params.spaceKey,
|
|
1974
|
+
swarmIdentity: params.swarmIdentity,
|
|
1975
|
+
networkManager: this._networkManager,
|
|
1976
|
+
onSessionAuth: (session) => {
|
|
1977
|
+
session.addExtension("dxos.mesh.teleport.admission-discovery", new CredentialRetrieverExtension({
|
|
1978
|
+
spaceKey: params.spaceKey,
|
|
1979
|
+
memberKey: params.identityKey
|
|
1980
|
+
}, onCredentialResolved));
|
|
1981
|
+
},
|
|
1982
|
+
onAuthFailure: (session) => session.close(),
|
|
1983
|
+
blobStore: this._blobStore,
|
|
1984
|
+
disableP2pReplication: this._disableP2pReplication
|
|
1985
|
+
});
|
|
1986
|
+
try {
|
|
1987
|
+
await protocol.start();
|
|
1988
|
+
const credential = await onCredentialResolved.wait({
|
|
1989
|
+
timeout: params.timeout
|
|
1990
|
+
});
|
|
1991
|
+
log9.trace(traceKey, trace4.end({
|
|
1992
|
+
id: this._instanceId
|
|
1993
|
+
}), {
|
|
1994
|
+
F: __dxlog_file11,
|
|
1995
|
+
L: 162,
|
|
1996
|
+
S: this,
|
|
1997
|
+
C: (f, a) => f(...a)
|
|
1998
|
+
});
|
|
1999
|
+
return credential;
|
|
2000
|
+
} catch (err) {
|
|
2001
|
+
log9.trace(traceKey, trace4.error({
|
|
2002
|
+
id: this._instanceId,
|
|
2003
|
+
error: err
|
|
2004
|
+
}), {
|
|
2005
|
+
F: __dxlog_file11,
|
|
2006
|
+
L: 165,
|
|
2007
|
+
S: this,
|
|
2008
|
+
C: (f, a) => f(...a)
|
|
2009
|
+
});
|
|
2010
|
+
throw err;
|
|
2011
|
+
} finally {
|
|
2012
|
+
await protocol.stop();
|
|
2013
|
+
}
|
|
2014
|
+
}
|
|
2015
|
+
};
|
|
2016
|
+
_ts_decorate7([
|
|
2017
|
+
synchronized4
|
|
2018
|
+
], SpaceManager.prototype, "open", null);
|
|
2019
|
+
_ts_decorate7([
|
|
2020
|
+
synchronized4
|
|
2021
|
+
], SpaceManager.prototype, "close", null);
|
|
2022
|
+
SpaceManager = _ts_decorate7([
|
|
2023
|
+
trackLeaks3("open", "close")
|
|
2024
|
+
], SpaceManager);
|
|
2025
|
+
|
|
2026
|
+
export {
|
|
2027
|
+
Buffer,
|
|
2028
|
+
codec,
|
|
2029
|
+
valueEncoding,
|
|
2030
|
+
createMappedFeedWriter,
|
|
2031
|
+
createIdFromSpaceKey,
|
|
2032
|
+
MetadataStore,
|
|
2033
|
+
hasInvitationExpired,
|
|
2034
|
+
mapTimeframeToFeedIndexes,
|
|
2035
|
+
mapFeedIndexesToTimeframe,
|
|
2036
|
+
startAfter,
|
|
2037
|
+
TimeframeClock,
|
|
2038
|
+
Pipeline,
|
|
2039
|
+
AuthExtension,
|
|
2040
|
+
Space,
|
|
2041
|
+
CredentialRetrieverExtension,
|
|
2042
|
+
CredentialServerExtension,
|
|
2043
|
+
MOCK_AUTH_PROVIDER,
|
|
2044
|
+
MOCK_AUTH_VERIFIER,
|
|
2045
|
+
SpaceProtocol,
|
|
2046
|
+
AuthStatus,
|
|
2047
|
+
SpaceProtocolSession,
|
|
2048
|
+
SpaceManager
|
|
2049
|
+
};
|
|
2050
|
+
//# sourceMappingURL=chunk-PESZVYAN.mjs.map
|