@relayfile/core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/acl.d.ts +34 -0
- package/dist/acl.js +163 -0
- package/dist/events.d.ts +23 -0
- package/dist/events.js +39 -0
- package/dist/export.d.ts +16 -0
- package/dist/export.js +137 -0
- package/dist/files.d.ts +66 -0
- package/dist/files.js +240 -0
- package/dist/index.d.ts +17 -0
- package/dist/index.js +17 -0
- package/dist/operations.d.ts +20 -0
- package/dist/operations.js +94 -0
- package/dist/query.d.ts +30 -0
- package/dist/query.js +138 -0
- package/dist/semantics.d.ts +15 -0
- package/dist/semantics.js +90 -0
- package/dist/storage.d.ts +113 -0
- package/dist/storage.js +9 -0
- package/dist/tree.d.ts +35 -0
- package/dist/tree.js +106 -0
- package/dist/utils.d.ts +11 -0
- package/dist/utils.js +32 -0
- package/dist/webhooks.d.ts +82 -0
- package/dist/webhooks.js +493 -0
- package/dist/writeback.d.ts +24 -0
- package/dist/writeback.js +148 -0
- package/package.json +39 -0
package/dist/files.js
ADDED
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File CRUD operations.
|
|
3
|
+
*
|
|
4
|
+
* Extract from workspace.ts:
|
|
5
|
+
* - handleWriteFile → writeFile()
|
|
6
|
+
* - handleReadFile → readFile()
|
|
7
|
+
* - handleDeleteFile → deleteFile()
|
|
8
|
+
* - normalizeIfMatchHeader → normalizeIfMatch()
|
|
9
|
+
* - revision conflict detection
|
|
10
|
+
* - content encoding normalization/validation
|
|
11
|
+
* - provider inference from path
|
|
12
|
+
*/
|
|
13
|
+
import { normalizePath } from "./utils.js";
|
|
14
|
+
import { normalizeSemantics } from "./semantics.js";
|
|
15
|
+
// ---------------------------------------------------------------------------
|
|
16
|
+
// Functions — agent-3: extract from workspace.ts
|
|
17
|
+
// ---------------------------------------------------------------------------
|
|
18
|
+
export function normalizeIfMatch(value) {
|
|
19
|
+
const trimmed = value.trim();
|
|
20
|
+
if (!trimmed) {
|
|
21
|
+
return "";
|
|
22
|
+
}
|
|
23
|
+
if (trimmed === "*" || trimmed === "0") {
|
|
24
|
+
return trimmed;
|
|
25
|
+
}
|
|
26
|
+
const weak = trimmed.startsWith("W/") ? trimmed.slice(2).trim() : trimmed;
|
|
27
|
+
if (weak.startsWith('"') && weak.endsWith('"') && weak.length >= 2) {
|
|
28
|
+
return weak.slice(1, -1);
|
|
29
|
+
}
|
|
30
|
+
return weak;
|
|
31
|
+
}
|
|
32
|
+
// Re-export normalizePath so existing importers (e.g. webhooks.ts) are not broken.
|
|
33
|
+
export { normalizePath };
|
|
34
|
+
export function inferProvider(path) {
|
|
35
|
+
const normalized = normalizePath(path).slice(1);
|
|
36
|
+
const [provider = ""] = normalized.split("/", 1);
|
|
37
|
+
return provider.trim().toLowerCase();
|
|
38
|
+
}
|
|
39
|
+
export function writeFile(storage, req) {
|
|
40
|
+
if (!req.path.trim()) {
|
|
41
|
+
return { ok: false, error: "invalid_input" };
|
|
42
|
+
}
|
|
43
|
+
const path = normalizePath(req.path);
|
|
44
|
+
const ifMatch = normalizeIfMatch(req.ifMatch);
|
|
45
|
+
if (!ifMatch) {
|
|
46
|
+
return { ok: false, error: "missing_precondition" };
|
|
47
|
+
}
|
|
48
|
+
if (typeof req.content !== "string") {
|
|
49
|
+
return { ok: false, error: "invalid_input" };
|
|
50
|
+
}
|
|
51
|
+
const encoding = normalizeEncoding(req.encoding);
|
|
52
|
+
if (!encoding || !validateEncodedContent(req.content, encoding)) {
|
|
53
|
+
return { ok: false, error: "invalid_input" };
|
|
54
|
+
}
|
|
55
|
+
if (encodedSize(req.content, encoding) > MAX_FILE_BYTES) {
|
|
56
|
+
return { ok: false, error: "invalid_input" };
|
|
57
|
+
}
|
|
58
|
+
const existing = storage.getFile(path);
|
|
59
|
+
if (!existing && ifMatch !== "0" && ifMatch !== "*") {
|
|
60
|
+
return { ok: false, error: "not_found" };
|
|
61
|
+
}
|
|
62
|
+
if (existing && ifMatch !== "*" && ifMatch !== existing.revision) {
|
|
63
|
+
return {
|
|
64
|
+
ok: false,
|
|
65
|
+
error: {
|
|
66
|
+
type: "conflict",
|
|
67
|
+
expectedRevision: ifMatch,
|
|
68
|
+
currentRevision: existing.revision,
|
|
69
|
+
},
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
const revision = storage.nextRevision();
|
|
73
|
+
const now = new Date().toISOString();
|
|
74
|
+
const provider = existing?.provider || inferProvider(path);
|
|
75
|
+
const correlationId = req.correlationId ?? "";
|
|
76
|
+
const opId = storage.nextOperationId();
|
|
77
|
+
storage.putFile({
|
|
78
|
+
path,
|
|
79
|
+
revision,
|
|
80
|
+
contentType: req.contentType?.trim() || DEFAULT_CONTENT_TYPE,
|
|
81
|
+
content: req.content,
|
|
82
|
+
encoding,
|
|
83
|
+
provider,
|
|
84
|
+
lastEditedAt: now,
|
|
85
|
+
semantics: normalizeSemantics(req.semantics),
|
|
86
|
+
});
|
|
87
|
+
storage.putOperation({
|
|
88
|
+
opId,
|
|
89
|
+
path,
|
|
90
|
+
revision,
|
|
91
|
+
action: "file_upsert",
|
|
92
|
+
provider,
|
|
93
|
+
status: "pending",
|
|
94
|
+
attemptCount: 0,
|
|
95
|
+
lastError: null,
|
|
96
|
+
nextAttemptAt: null,
|
|
97
|
+
correlationId,
|
|
98
|
+
});
|
|
99
|
+
storage.appendEvent({
|
|
100
|
+
eventId: storage.nextEventId(),
|
|
101
|
+
type: existing ? "file.updated" : "file.created",
|
|
102
|
+
path,
|
|
103
|
+
revision,
|
|
104
|
+
origin: "agent_write",
|
|
105
|
+
provider,
|
|
106
|
+
correlationId,
|
|
107
|
+
timestamp: now,
|
|
108
|
+
});
|
|
109
|
+
storage.enqueueWriteback({
|
|
110
|
+
id: opId,
|
|
111
|
+
workspaceId: storage.getWorkspaceId(),
|
|
112
|
+
path,
|
|
113
|
+
revision,
|
|
114
|
+
correlationId,
|
|
115
|
+
});
|
|
116
|
+
return {
|
|
117
|
+
ok: true,
|
|
118
|
+
result: {
|
|
119
|
+
opId,
|
|
120
|
+
status: "queued",
|
|
121
|
+
targetRevision: revision,
|
|
122
|
+
writeback: {
|
|
123
|
+
provider,
|
|
124
|
+
state: "pending",
|
|
125
|
+
},
|
|
126
|
+
},
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
export function readFile(storage, path) {
|
|
130
|
+
if (!path.trim()) {
|
|
131
|
+
return null;
|
|
132
|
+
}
|
|
133
|
+
return storage.getFile(normalizePath(path));
|
|
134
|
+
}
|
|
135
|
+
export function deleteFile(storage, req) {
|
|
136
|
+
if (!req.path.trim()) {
|
|
137
|
+
return { ok: false, error: "invalid_input" };
|
|
138
|
+
}
|
|
139
|
+
const path = normalizePath(req.path);
|
|
140
|
+
const ifMatch = normalizeIfMatch(req.ifMatch);
|
|
141
|
+
if (!ifMatch) {
|
|
142
|
+
return { ok: false, error: "missing_precondition" };
|
|
143
|
+
}
|
|
144
|
+
const existing = storage.getFile(path);
|
|
145
|
+
if (!existing) {
|
|
146
|
+
return { ok: false, error: "not_found" };
|
|
147
|
+
}
|
|
148
|
+
if (ifMatch !== "*" && ifMatch !== existing.revision) {
|
|
149
|
+
return {
|
|
150
|
+
ok: false,
|
|
151
|
+
error: {
|
|
152
|
+
type: "conflict",
|
|
153
|
+
expectedRevision: ifMatch,
|
|
154
|
+
currentRevision: existing.revision,
|
|
155
|
+
},
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
const revision = storage.nextRevision();
|
|
159
|
+
const now = new Date().toISOString();
|
|
160
|
+
const correlationId = req.correlationId ?? "";
|
|
161
|
+
const opId = storage.nextOperationId();
|
|
162
|
+
storage.deleteFile(path);
|
|
163
|
+
storage.putOperation({
|
|
164
|
+
opId,
|
|
165
|
+
path,
|
|
166
|
+
revision,
|
|
167
|
+
action: "file_delete",
|
|
168
|
+
provider: existing.provider,
|
|
169
|
+
status: "pending",
|
|
170
|
+
attemptCount: 0,
|
|
171
|
+
lastError: null,
|
|
172
|
+
nextAttemptAt: null,
|
|
173
|
+
correlationId,
|
|
174
|
+
});
|
|
175
|
+
storage.appendEvent({
|
|
176
|
+
eventId: storage.nextEventId(),
|
|
177
|
+
type: "file.deleted",
|
|
178
|
+
path,
|
|
179
|
+
revision,
|
|
180
|
+
origin: "agent_write",
|
|
181
|
+
provider: existing.provider,
|
|
182
|
+
correlationId,
|
|
183
|
+
timestamp: now,
|
|
184
|
+
});
|
|
185
|
+
storage.enqueueWriteback({
|
|
186
|
+
id: opId,
|
|
187
|
+
workspaceId: storage.getWorkspaceId(),
|
|
188
|
+
path,
|
|
189
|
+
revision,
|
|
190
|
+
correlationId,
|
|
191
|
+
});
|
|
192
|
+
return {
|
|
193
|
+
ok: true,
|
|
194
|
+
result: {
|
|
195
|
+
opId,
|
|
196
|
+
status: "queued",
|
|
197
|
+
targetRevision: revision,
|
|
198
|
+
writeback: {
|
|
199
|
+
provider: existing.provider,
|
|
200
|
+
state: "pending",
|
|
201
|
+
},
|
|
202
|
+
},
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
export const DEFAULT_CONTENT_TYPE = "text/markdown";
|
|
206
|
+
export const MAX_FILE_BYTES = 10 * 1024 * 1024;
|
|
207
|
+
function normalizeEncoding(value) {
|
|
208
|
+
const encoding = value?.trim().toLowerCase() ?? "";
|
|
209
|
+
if (!encoding || encoding === "utf-8" || encoding === "utf8") {
|
|
210
|
+
return "utf-8";
|
|
211
|
+
}
|
|
212
|
+
if (encoding === "base64") {
|
|
213
|
+
return "base64";
|
|
214
|
+
}
|
|
215
|
+
return null;
|
|
216
|
+
}
|
|
217
|
+
function validateEncodedContent(content, encoding) {
|
|
218
|
+
if (encoding !== "base64") {
|
|
219
|
+
return true;
|
|
220
|
+
}
|
|
221
|
+
try {
|
|
222
|
+
atob(content);
|
|
223
|
+
return true;
|
|
224
|
+
}
|
|
225
|
+
catch {
|
|
226
|
+
return false;
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
export function encodedSize(content, encoding) {
|
|
230
|
+
if (encoding === "base64") {
|
|
231
|
+
try {
|
|
232
|
+
return Uint8Array.from(atob(content), (char) => char.charCodeAt(0))
|
|
233
|
+
.byteLength;
|
|
234
|
+
}
|
|
235
|
+
catch {
|
|
236
|
+
return content.length;
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
return new TextEncoder().encode(content).byteLength;
|
|
240
|
+
}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @relayfile/core — shared business logic for relayfile
|
|
3
|
+
*
|
|
4
|
+
* Pure functions. No I/O. No runtime dependencies.
|
|
5
|
+
* Storage is injected via the StorageAdapter interface.
|
|
6
|
+
*/
|
|
7
|
+
export type { StorageAdapter, FileRow, FileSemantics, EventRow, OperationRow, WritebackItem, EnvelopeRow, PaginationOptions, Paginated, EnvelopeQueryOptions, } from "./storage.js";
|
|
8
|
+
export * from "./files.js";
|
|
9
|
+
export * from "./acl.js";
|
|
10
|
+
export * from "./semantics.js";
|
|
11
|
+
export * from "./query.js";
|
|
12
|
+
export * from "./tree.js";
|
|
13
|
+
export * from "./events.js";
|
|
14
|
+
export * from "./operations.js";
|
|
15
|
+
export * from "./writeback.js";
|
|
16
|
+
export * from "./webhooks.js";
|
|
17
|
+
export * from "./export.js";
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @relayfile/core — shared business logic for relayfile
|
|
3
|
+
*
|
|
4
|
+
* Pure functions. No I/O. No runtime dependencies.
|
|
5
|
+
* Storage is injected via the StorageAdapter interface.
|
|
6
|
+
*/
|
|
7
|
+
// Business logic modules (agent-3 will extract these from workspace.ts)
|
|
8
|
+
export * from "./files.js";
|
|
9
|
+
export * from "./acl.js";
|
|
10
|
+
export * from "./semantics.js";
|
|
11
|
+
export * from "./query.js";
|
|
12
|
+
export * from "./tree.js";
|
|
13
|
+
export * from "./events.js";
|
|
14
|
+
export * from "./operations.js";
|
|
15
|
+
export * from "./writeback.js";
|
|
16
|
+
export * from "./webhooks.js";
|
|
17
|
+
export * from "./export.js";
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Operation tracking state machine.
|
|
3
|
+
*
|
|
4
|
+
* Extract from workspace.ts:
|
|
5
|
+
* - operation creation during writes
|
|
6
|
+
* - status transitions: pending → running → succeeded/failed/dead_lettered
|
|
7
|
+
* - operation listing with filters
|
|
8
|
+
* - operation replay
|
|
9
|
+
*/
|
|
10
|
+
import type { StorageAdapter, OperationRow, Paginated, PaginationOptions } from "./storage.js";
|
|
11
|
+
export type DispatchWriteback = (opId: string) => void;
|
|
12
|
+
export declare function createOperation(storage: StorageAdapter, path: string, revision: string, action: string, provider: string, correlationId: string, dispatchWriteback?: DispatchWriteback): OperationRow;
|
|
13
|
+
export declare function getOperation(storage: StorageAdapter, opId: string): OperationRow | null;
|
|
14
|
+
export declare function listOperations(storage: StorageAdapter, options: PaginationOptions & {
|
|
15
|
+
status?: string;
|
|
16
|
+
action?: string;
|
|
17
|
+
provider?: string;
|
|
18
|
+
}): Paginated<OperationRow>;
|
|
19
|
+
export declare function acknowledgeOperation(storage: StorageAdapter, opId: string, success: boolean, errorMsg?: string): OperationRow | null;
|
|
20
|
+
export declare function replayOperation(storage: StorageAdapter, opId: string, dispatchWriteback?: DispatchWriteback): OperationRow | null;
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Operation tracking state machine.
|
|
3
|
+
*
|
|
4
|
+
* Extract from workspace.ts:
|
|
5
|
+
* - operation creation during writes
|
|
6
|
+
* - status transitions: pending → running → succeeded/failed/dead_lettered
|
|
7
|
+
* - operation listing with filters
|
|
8
|
+
* - operation replay
|
|
9
|
+
*/
|
|
10
|
+
export function createOperation(storage, path, revision, action, provider, correlationId, dispatchWriteback) {
|
|
11
|
+
const op = {
|
|
12
|
+
opId: storage.nextOperationId(),
|
|
13
|
+
path: normalizePath(path),
|
|
14
|
+
revision,
|
|
15
|
+
action: action.trim() || "file_upsert",
|
|
16
|
+
provider: normalizeProvider(provider),
|
|
17
|
+
status: "pending",
|
|
18
|
+
attemptCount: 0,
|
|
19
|
+
lastError: null,
|
|
20
|
+
nextAttemptAt: null,
|
|
21
|
+
correlationId: correlationId || "",
|
|
22
|
+
};
|
|
23
|
+
storage.putOperation(op);
|
|
24
|
+
dispatchWriteback?.(op.opId);
|
|
25
|
+
return op;
|
|
26
|
+
}
|
|
27
|
+
export function getOperation(storage, opId) {
|
|
28
|
+
const normalized = opId.trim();
|
|
29
|
+
if (!normalized) {
|
|
30
|
+
return null;
|
|
31
|
+
}
|
|
32
|
+
return storage.getOperation(normalized);
|
|
33
|
+
}
|
|
34
|
+
export function listOperations(storage, options) {
|
|
35
|
+
const status = options.status?.trim() || undefined;
|
|
36
|
+
const action = options.action?.trim() || undefined;
|
|
37
|
+
const provider = normalizeProvider(options.provider);
|
|
38
|
+
return storage.listOperations({
|
|
39
|
+
status,
|
|
40
|
+
action,
|
|
41
|
+
provider: provider || undefined,
|
|
42
|
+
cursor: options.cursor || undefined,
|
|
43
|
+
limit: clampLimit(options.limit),
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
export function acknowledgeOperation(storage, opId, success, errorMsg) {
|
|
47
|
+
const op = getOperation(storage, opId);
|
|
48
|
+
if (!op) {
|
|
49
|
+
return null;
|
|
50
|
+
}
|
|
51
|
+
const updated = {
|
|
52
|
+
...op,
|
|
53
|
+
status: success ? "succeeded" : "failed",
|
|
54
|
+
nextAttemptAt: null,
|
|
55
|
+
lastError: success ? null : normalizeError(errorMsg),
|
|
56
|
+
};
|
|
57
|
+
storage.putOperation(updated);
|
|
58
|
+
return updated;
|
|
59
|
+
}
|
|
60
|
+
export function replayOperation(storage, opId, dispatchWriteback) {
|
|
61
|
+
const op = getOperation(storage, opId);
|
|
62
|
+
if (!op || !isReplayableOperationStatus(op.status)) {
|
|
63
|
+
return null;
|
|
64
|
+
}
|
|
65
|
+
const updated = {
|
|
66
|
+
...op,
|
|
67
|
+
status: "pending",
|
|
68
|
+
nextAttemptAt: null,
|
|
69
|
+
lastError: null,
|
|
70
|
+
};
|
|
71
|
+
storage.putOperation(updated);
|
|
72
|
+
dispatchWriteback?.(updated.opId);
|
|
73
|
+
return updated;
|
|
74
|
+
}
|
|
75
|
+
function clampLimit(limit) {
|
|
76
|
+
return Math.max(1, Math.min(limit ?? 100, 1000));
|
|
77
|
+
}
|
|
78
|
+
function normalizeProvider(provider) {
|
|
79
|
+
return provider?.trim().toLowerCase() ?? "";
|
|
80
|
+
}
|
|
81
|
+
function normalizePath(path) {
|
|
82
|
+
const trimmed = path.trim();
|
|
83
|
+
if (!trimmed) {
|
|
84
|
+
return "/";
|
|
85
|
+
}
|
|
86
|
+
const prefixed = trimmed.startsWith("/") ? trimmed : `/${trimmed}`;
|
|
87
|
+
return prefixed.length > 1 ? prefixed.replace(/\/+$/, "") : "/";
|
|
88
|
+
}
|
|
89
|
+
function normalizeError(errorMsg) {
|
|
90
|
+
return errorMsg?.trim() || "provider reported failure";
|
|
91
|
+
}
|
|
92
|
+
function isReplayableOperationStatus(status) {
|
|
93
|
+
return status === "failed" || status === "dead_lettered" || status === "canceled";
|
|
94
|
+
}
|
package/dist/query.d.ts
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File query and filtering engine.
|
|
3
|
+
*
|
|
4
|
+
* Extract from workspace.ts:
|
|
5
|
+
* - handleQueryFiles() / queryFiles()
|
|
6
|
+
* - property, relation, permission, comment, path prefix filters
|
|
7
|
+
* - cursor pagination
|
|
8
|
+
*/
|
|
9
|
+
import type { StorageAdapter, Paginated, PaginationOptions } from "./storage.js";
|
|
10
|
+
import { type TokenClaims } from "./acl.js";
|
|
11
|
+
export interface QueryOptions extends PaginationOptions {
|
|
12
|
+
path?: string;
|
|
13
|
+
provider?: string;
|
|
14
|
+
properties?: Record<string, string>;
|
|
15
|
+
relation?: string;
|
|
16
|
+
permission?: string;
|
|
17
|
+
comment?: string;
|
|
18
|
+
}
|
|
19
|
+
export interface QueryResultItem {
|
|
20
|
+
path: string;
|
|
21
|
+
revision: string;
|
|
22
|
+
contentType: string;
|
|
23
|
+
provider: string;
|
|
24
|
+
lastEditedAt: string;
|
|
25
|
+
size: number;
|
|
26
|
+
properties?: Record<string, string>;
|
|
27
|
+
relations?: string[];
|
|
28
|
+
comments?: string[];
|
|
29
|
+
}
|
|
30
|
+
export declare function queryFiles(storage: StorageAdapter, options: QueryOptions, claims: TokenClaims | null): Paginated<QueryResultItem>;
|
package/dist/query.js
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File query and filtering engine.
|
|
3
|
+
*
|
|
4
|
+
* Extract from workspace.ts:
|
|
5
|
+
* - handleQueryFiles() / queryFiles()
|
|
6
|
+
* - property, relation, permission, comment, path prefix filters
|
|
7
|
+
* - cursor pagination
|
|
8
|
+
*/
|
|
9
|
+
import { filePermissionAllows, resolveFilePermissions, } from "./acl.js";
|
|
10
|
+
export function queryFiles(storage, options, claims) {
|
|
11
|
+
const base = normalizePath(options.path ?? "/");
|
|
12
|
+
const provider = normalizeProvider(options.provider);
|
|
13
|
+
const relation = options.relation?.trim() ?? "";
|
|
14
|
+
const permission = options.permission?.trim() ?? "";
|
|
15
|
+
const comment = options.comment?.trim() ?? "";
|
|
16
|
+
const limit = Math.max(1, Math.min(options.limit ?? 100, 1000));
|
|
17
|
+
const expectedProperties = normalizeProperties(options.properties);
|
|
18
|
+
const workspaceId = storage.getWorkspaceId();
|
|
19
|
+
let rows = storage
|
|
20
|
+
.listFiles()
|
|
21
|
+
.map(normalizeFileRow)
|
|
22
|
+
.filter((row) => row.path === base || isWithinBase(row.path, base))
|
|
23
|
+
.sort((left, right) => left.path.localeCompare(right.path));
|
|
24
|
+
if (options.cursor) {
|
|
25
|
+
const index = rows.findIndex((row) => row.path === normalizePath(options.cursor ?? "/"));
|
|
26
|
+
if (index < 0) {
|
|
27
|
+
return { items: [], nextCursor: null };
|
|
28
|
+
}
|
|
29
|
+
rows = rows.slice(index + 1);
|
|
30
|
+
}
|
|
31
|
+
const items = [];
|
|
32
|
+
for (const row of rows) {
|
|
33
|
+
if (provider && normalizeProvider(row.provider) !== provider) {
|
|
34
|
+
continue;
|
|
35
|
+
}
|
|
36
|
+
const semantics = row.semantics;
|
|
37
|
+
if (relation && !stringSliceContains(semantics.relations, relation)) {
|
|
38
|
+
continue;
|
|
39
|
+
}
|
|
40
|
+
const effectivePermissions = resolveFilePermissions(storage, row.path, true);
|
|
41
|
+
if (permission && !stringSliceContainsExact(effectivePermissions, permission)) {
|
|
42
|
+
continue;
|
|
43
|
+
}
|
|
44
|
+
if (comment && !stringSliceContains(semantics.comments, comment)) {
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
if (!propertiesMatch(semantics.properties, expectedProperties)) {
|
|
48
|
+
continue;
|
|
49
|
+
}
|
|
50
|
+
if (!filePermissionAllows(effectivePermissions, workspaceId, claims)) {
|
|
51
|
+
continue;
|
|
52
|
+
}
|
|
53
|
+
items.push({
|
|
54
|
+
path: row.path,
|
|
55
|
+
revision: row.revision,
|
|
56
|
+
contentType: row.contentType,
|
|
57
|
+
provider: row.provider,
|
|
58
|
+
lastEditedAt: row.lastEditedAt,
|
|
59
|
+
size: encodedSize(row.content, row.encoding),
|
|
60
|
+
properties: semantics.properties,
|
|
61
|
+
relations: semantics.relations,
|
|
62
|
+
comments: semantics.comments,
|
|
63
|
+
});
|
|
64
|
+
if (items.length >= limit) {
|
|
65
|
+
break;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
return {
|
|
69
|
+
items,
|
|
70
|
+
nextCursor: items.length >= limit ? (items[items.length - 1]?.path ?? null) : null,
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
function normalizeFileRow(row) {
|
|
74
|
+
return {
|
|
75
|
+
...row,
|
|
76
|
+
path: normalizePath(row.path),
|
|
77
|
+
provider: normalizeProvider(row.provider),
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
function normalizePath(path) {
|
|
81
|
+
const trimmed = path.trim();
|
|
82
|
+
if (!trimmed) {
|
|
83
|
+
return "/";
|
|
84
|
+
}
|
|
85
|
+
const prefixed = trimmed.startsWith("/") ? trimmed : `/${trimmed}`;
|
|
86
|
+
return prefixed.length > 1 ? prefixed.replace(/\/+$/, "") : "/";
|
|
87
|
+
}
|
|
88
|
+
function normalizeProvider(provider) {
|
|
89
|
+
return provider?.trim().toLowerCase() ?? "";
|
|
90
|
+
}
|
|
91
|
+
function normalizeProperties(input) {
|
|
92
|
+
if (!input) {
|
|
93
|
+
return undefined;
|
|
94
|
+
}
|
|
95
|
+
const out = {};
|
|
96
|
+
for (const [key, value] of Object.entries(input)) {
|
|
97
|
+
const normalizedKey = key.trim();
|
|
98
|
+
if (!normalizedKey) {
|
|
99
|
+
continue;
|
|
100
|
+
}
|
|
101
|
+
out[normalizedKey] = String(value).trim();
|
|
102
|
+
}
|
|
103
|
+
return Object.keys(out).length > 0 ? out : undefined;
|
|
104
|
+
}
|
|
105
|
+
function stringSliceContains(values, needle) {
|
|
106
|
+
const normalizedNeedle = needle.trim();
|
|
107
|
+
if (!normalizedNeedle) {
|
|
108
|
+
return false;
|
|
109
|
+
}
|
|
110
|
+
return Boolean(values?.some((value) => value.trim() === normalizedNeedle));
|
|
111
|
+
}
|
|
112
|
+
function stringSliceContainsExact(values, needle) {
|
|
113
|
+
return stringSliceContains(values, needle);
|
|
114
|
+
}
|
|
115
|
+
function propertiesMatch(actual, expected) {
|
|
116
|
+
if (!expected || Object.keys(expected).length === 0) {
|
|
117
|
+
return true;
|
|
118
|
+
}
|
|
119
|
+
if (!actual) {
|
|
120
|
+
return false;
|
|
121
|
+
}
|
|
122
|
+
return Object.entries(expected).every(([key, value]) => actual[key] === value);
|
|
123
|
+
}
|
|
124
|
+
function isWithinBase(path, base) {
|
|
125
|
+
return base === "/" ? path.startsWith("/") : path.startsWith(`${base}/`);
|
|
126
|
+
}
|
|
127
|
+
function encodedSize(content, encoding) {
|
|
128
|
+
if (encoding === "base64") {
|
|
129
|
+
try {
|
|
130
|
+
return Uint8Array.from(atob(content), (char) => char.charCodeAt(0))
|
|
131
|
+
.byteLength;
|
|
132
|
+
}
|
|
133
|
+
catch {
|
|
134
|
+
return content.length;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
return new TextEncoder().encode(content).byteLength;
|
|
138
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Semantic metadata model.
|
|
3
|
+
*
|
|
4
|
+
* Extract from workspace.ts:
|
|
5
|
+
* - normalizeSemantics()
|
|
6
|
+
* - isZeroSemantics()
|
|
7
|
+
* - semantic merge logic during file updates
|
|
8
|
+
*/
|
|
9
|
+
import type { FileSemantics } from "./storage.js";
|
|
10
|
+
export declare function normalizeSemantics(semantics?: FileSemantics): FileSemantics;
|
|
11
|
+
export declare function parseSemantics(raw?: string | null): FileSemantics;
|
|
12
|
+
export declare function normalizeProperties(input?: Record<string, string>): Record<string, string> | undefined;
|
|
13
|
+
export declare function normalizeStringArray(values?: string[]): string[] | undefined;
|
|
14
|
+
export declare function isZeroSemantics(semantics: FileSemantics): boolean;
|
|
15
|
+
export declare function mergeSemantics(existing: FileSemantics, incoming: FileSemantics): FileSemantics;
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Semantic metadata model.
|
|
3
|
+
*
|
|
4
|
+
* Extract from workspace.ts:
|
|
5
|
+
* - normalizeSemantics()
|
|
6
|
+
* - isZeroSemantics()
|
|
7
|
+
* - semantic merge logic during file updates
|
|
8
|
+
*/
|
|
9
|
+
const MAX_ARRAY_ENTRIES = 100;
|
|
10
|
+
const MAX_PROPERTIES_ENTRIES = 100;
|
|
11
|
+
export function normalizeSemantics(semantics) {
|
|
12
|
+
return {
|
|
13
|
+
properties: normalizeProperties(semantics?.properties),
|
|
14
|
+
relations: normalizeStringArray(semantics?.relations),
|
|
15
|
+
permissions: normalizeStringArray(semantics?.permissions),
|
|
16
|
+
comments: normalizeStringArray(semantics?.comments),
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
export function parseSemantics(raw) {
|
|
20
|
+
if (!raw) {
|
|
21
|
+
return normalizeSemantics();
|
|
22
|
+
}
|
|
23
|
+
try {
|
|
24
|
+
return normalizeSemantics(JSON.parse(raw));
|
|
25
|
+
}
|
|
26
|
+
catch {
|
|
27
|
+
return normalizeSemantics();
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
export function normalizeProperties(input) {
|
|
31
|
+
if (!input) {
|
|
32
|
+
return undefined;
|
|
33
|
+
}
|
|
34
|
+
const out = {};
|
|
35
|
+
let count = 0;
|
|
36
|
+
for (const [key, value] of Object.entries(input)) {
|
|
37
|
+
if (count >= MAX_PROPERTIES_ENTRIES)
|
|
38
|
+
break;
|
|
39
|
+
const normalizedKey = key.trim();
|
|
40
|
+
if (!normalizedKey) {
|
|
41
|
+
continue;
|
|
42
|
+
}
|
|
43
|
+
out[normalizedKey] = String(value).trim();
|
|
44
|
+
count++;
|
|
45
|
+
}
|
|
46
|
+
return Object.keys(out).length > 0 ? out : undefined;
|
|
47
|
+
}
|
|
48
|
+
export function normalizeStringArray(values) {
|
|
49
|
+
if (!values || values.length === 0) {
|
|
50
|
+
return undefined;
|
|
51
|
+
}
|
|
52
|
+
const normalized = Array.from(new Set(values.map((value) => value.trim()).filter(Boolean)));
|
|
53
|
+
normalized.sort((a, b) => a.localeCompare(b));
|
|
54
|
+
const limited = normalized.length > MAX_ARRAY_ENTRIES
|
|
55
|
+
? normalized.slice(0, MAX_ARRAY_ENTRIES)
|
|
56
|
+
: normalized;
|
|
57
|
+
return limited.length > 0 ? limited : undefined;
|
|
58
|
+
}
|
|
59
|
+
export function isZeroSemantics(semantics) {
|
|
60
|
+
const normalized = normalizeSemantics(semantics);
|
|
61
|
+
return (!normalized.properties &&
|
|
62
|
+
!normalized.relations &&
|
|
63
|
+
!normalized.permissions &&
|
|
64
|
+
!normalized.comments);
|
|
65
|
+
}
|
|
66
|
+
export function mergeSemantics(existing, incoming) {
|
|
67
|
+
const a = normalizeSemantics(existing);
|
|
68
|
+
const b = normalizeSemantics(incoming);
|
|
69
|
+
return {
|
|
70
|
+
properties: mergeProperties(a.properties, b.properties),
|
|
71
|
+
relations: mergeStringArrays(a.relations, b.relations),
|
|
72
|
+
permissions: mergeStringArrays(a.permissions, b.permissions),
|
|
73
|
+
comments: mergeStringArrays(a.comments, b.comments),
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
function mergeProperties(existing, incoming) {
|
|
77
|
+
if (!existing && !incoming)
|
|
78
|
+
return undefined;
|
|
79
|
+
if (!existing)
|
|
80
|
+
return incoming;
|
|
81
|
+
if (!incoming)
|
|
82
|
+
return existing;
|
|
83
|
+
const merged = { ...existing, ...incoming };
|
|
84
|
+
return Object.keys(merged).length > 0 ? merged : undefined;
|
|
85
|
+
}
|
|
86
|
+
function mergeStringArrays(existing, incoming) {
|
|
87
|
+
if (!existing && !incoming)
|
|
88
|
+
return undefined;
|
|
89
|
+
return normalizeStringArray([...(existing ?? []), ...(incoming ?? [])]);
|
|
90
|
+
}
|