@relayfile/core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/acl.d.ts +34 -0
- package/dist/acl.js +163 -0
- package/dist/events.d.ts +23 -0
- package/dist/events.js +39 -0
- package/dist/export.d.ts +16 -0
- package/dist/export.js +137 -0
- package/dist/files.d.ts +66 -0
- package/dist/files.js +240 -0
- package/dist/index.d.ts +17 -0
- package/dist/index.js +17 -0
- package/dist/operations.d.ts +20 -0
- package/dist/operations.js +94 -0
- package/dist/query.d.ts +30 -0
- package/dist/query.js +138 -0
- package/dist/semantics.d.ts +15 -0
- package/dist/semantics.js +90 -0
- package/dist/storage.d.ts +113 -0
- package/dist/storage.js +9 -0
- package/dist/tree.d.ts +35 -0
- package/dist/tree.js +106 -0
- package/dist/utils.d.ts +11 -0
- package/dist/utils.js +32 -0
- package/dist/webhooks.d.ts +82 -0
- package/dist/webhooks.js +493 -0
- package/dist/writeback.d.ts +24 -0
- package/dist/writeback.js +148 -0
- package/package.json +39 -0
package/dist/acl.d.ts
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ACL permission resolution and enforcement.
|
|
3
|
+
*
|
|
4
|
+
* Extract from workspace.ts:
|
|
5
|
+
* - resolveFilePermissions() — walk ancestor dirs for .relayfile.acl markers
|
|
6
|
+
* - parsePermissionRule() — parse "scope:X", "deny:agent:Y", "public", etc.
|
|
7
|
+
* - filePermissionAllows() — evaluate allow/deny rules against agent claims
|
|
8
|
+
*/
|
|
9
|
+
import type { StorageAdapter } from "./storage.js";
|
|
10
|
+
export declare const DIRECTORY_PERMISSION_MARKER = ".relayfile.acl";
|
|
11
|
+
export interface TokenClaims {
|
|
12
|
+
workspaceId: string;
|
|
13
|
+
agentName: string;
|
|
14
|
+
scopes: Set<string>;
|
|
15
|
+
}
|
|
16
|
+
export interface ParsedPermissionRule {
|
|
17
|
+
effect: "allow" | "deny";
|
|
18
|
+
kind: "scope" | "agent" | "workspace" | "public";
|
|
19
|
+
value: string;
|
|
20
|
+
}
|
|
21
|
+
export declare function parsePermissionRule(raw: string): ParsedPermissionRule | null;
|
|
22
|
+
/**
|
|
23
|
+
* Evaluate ACL rules against agent claims.
|
|
24
|
+
*
|
|
25
|
+
* Security model: **default-open**. When no permissions array is provided
|
|
26
|
+
* (or it is empty), access is allowed — this is intentional so that files
|
|
27
|
+
* without explicit ACL markers remain accessible. When enforceable rules
|
|
28
|
+
* exist but none match the caller's claims, access is **denied**.
|
|
29
|
+
*
|
|
30
|
+
* Callers that need a default-deny posture should ensure every path has
|
|
31
|
+
* at least one ACL marker in its ancestor directories.
|
|
32
|
+
*/
|
|
33
|
+
export declare function filePermissionAllows(permissions: string[] | undefined, workspaceId: string, claims: TokenClaims | null): boolean;
|
|
34
|
+
export declare function resolveFilePermissions(storage: StorageAdapter, path: string, includeTarget: boolean): string[];
|
package/dist/acl.js
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ACL permission resolution and enforcement.
|
|
3
|
+
*
|
|
4
|
+
* Extract from workspace.ts:
|
|
5
|
+
* - resolveFilePermissions() — walk ancestor dirs for .relayfile.acl markers
|
|
6
|
+
* - parsePermissionRule() — parse "scope:X", "deny:agent:Y", "public", etc.
|
|
7
|
+
* - filePermissionAllows() — evaluate allow/deny rules against agent claims
|
|
8
|
+
*/
|
|
9
|
+
// ---------------------------------------------------------------------------
|
|
10
|
+
// Constants
|
|
11
|
+
// ---------------------------------------------------------------------------
|
|
12
|
+
export const DIRECTORY_PERMISSION_MARKER = ".relayfile.acl";
|
|
13
|
+
// ---------------------------------------------------------------------------
|
|
14
|
+
// Functions — agent-3: extract from workspace.ts
|
|
15
|
+
// ---------------------------------------------------------------------------
|
|
16
|
+
export function parsePermissionRule(raw) {
|
|
17
|
+
let rule = raw.trim();
|
|
18
|
+
if (!rule) {
|
|
19
|
+
return null;
|
|
20
|
+
}
|
|
21
|
+
let effect = "allow";
|
|
22
|
+
const lower = rule.toLowerCase();
|
|
23
|
+
if (lower.startsWith("allow:")) {
|
|
24
|
+
rule = rule.slice("allow:".length).trim();
|
|
25
|
+
}
|
|
26
|
+
else if (lower.startsWith("deny:")) {
|
|
27
|
+
effect = "deny";
|
|
28
|
+
rule = rule.slice("deny:".length).trim();
|
|
29
|
+
}
|
|
30
|
+
const normalized = rule.toLowerCase();
|
|
31
|
+
if (normalized === "public" || normalized === "any" || normalized === "*") {
|
|
32
|
+
return { effect, kind: "public", value: "*" };
|
|
33
|
+
}
|
|
34
|
+
const [kindRaw, ...rest] = rule.split(":");
|
|
35
|
+
const kind = kindRaw?.trim().toLowerCase();
|
|
36
|
+
const value = rest.join(":").trim();
|
|
37
|
+
if (!kind || !value) {
|
|
38
|
+
return null;
|
|
39
|
+
}
|
|
40
|
+
if (kind !== "scope" && kind !== "agent" && kind !== "workspace") {
|
|
41
|
+
return null;
|
|
42
|
+
}
|
|
43
|
+
return {
|
|
44
|
+
effect,
|
|
45
|
+
kind,
|
|
46
|
+
value,
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Evaluate ACL rules against agent claims.
|
|
51
|
+
*
|
|
52
|
+
* Security model: **default-open**. When no permissions array is provided
|
|
53
|
+
* (or it is empty), access is allowed — this is intentional so that files
|
|
54
|
+
* without explicit ACL markers remain accessible. When enforceable rules
|
|
55
|
+
* exist but none match the caller's claims, access is **denied**.
|
|
56
|
+
*
|
|
57
|
+
* Callers that need a default-deny posture should ensure every path has
|
|
58
|
+
* at least one ACL marker in its ancestor directories.
|
|
59
|
+
*/
|
|
60
|
+
export function filePermissionAllows(permissions, workspaceId, claims) {
|
|
61
|
+
if (!permissions || permissions.length === 0) {
|
|
62
|
+
return true;
|
|
63
|
+
}
|
|
64
|
+
let enforceableRuleSeen = false;
|
|
65
|
+
let allowMatch = false;
|
|
66
|
+
for (const raw of permissions) {
|
|
67
|
+
const rule = parsePermissionRule(raw);
|
|
68
|
+
if (!rule) {
|
|
69
|
+
continue;
|
|
70
|
+
}
|
|
71
|
+
enforceableRuleSeen = true;
|
|
72
|
+
let match = false;
|
|
73
|
+
switch (rule.kind) {
|
|
74
|
+
case "public":
|
|
75
|
+
match = true;
|
|
76
|
+
break;
|
|
77
|
+
case "scope":
|
|
78
|
+
match = claims?.scopes.has(rule.value) ?? false;
|
|
79
|
+
break;
|
|
80
|
+
case "agent":
|
|
81
|
+
match = claims?.agentName === rule.value;
|
|
82
|
+
break;
|
|
83
|
+
case "workspace":
|
|
84
|
+
match = workspaceId === rule.value;
|
|
85
|
+
break;
|
|
86
|
+
}
|
|
87
|
+
if (!match) {
|
|
88
|
+
continue;
|
|
89
|
+
}
|
|
90
|
+
if (rule.effect === "deny") {
|
|
91
|
+
return false;
|
|
92
|
+
}
|
|
93
|
+
allowMatch = true;
|
|
94
|
+
}
|
|
95
|
+
if (allowMatch) {
|
|
96
|
+
return true;
|
|
97
|
+
}
|
|
98
|
+
return !enforceableRuleSeen;
|
|
99
|
+
}
|
|
100
|
+
export function resolveFilePermissions(storage, path, includeTarget) {
|
|
101
|
+
const target = normalizePath(path);
|
|
102
|
+
const permissions = [];
|
|
103
|
+
for (const dir of ancestorDirectories(target)) {
|
|
104
|
+
const markerPath = joinPath(dir, DIRECTORY_PERMISSION_MARKER);
|
|
105
|
+
if (markerPath === target) {
|
|
106
|
+
continue;
|
|
107
|
+
}
|
|
108
|
+
const marker = storage.getFile(markerPath);
|
|
109
|
+
if (!marker) {
|
|
110
|
+
continue;
|
|
111
|
+
}
|
|
112
|
+
if ((marker.semantics.permissions?.length ?? 0) === 0) {
|
|
113
|
+
continue;
|
|
114
|
+
}
|
|
115
|
+
permissions.push(...(marker.semantics.permissions ?? []));
|
|
116
|
+
}
|
|
117
|
+
if (includeTarget) {
|
|
118
|
+
const file = storage.getFile(target);
|
|
119
|
+
if (file && (file.semantics.permissions?.length ?? 0) > 0) {
|
|
120
|
+
permissions.push(...(file.semantics.permissions ?? []));
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
return permissions;
|
|
124
|
+
}
|
|
125
|
+
function normalizePath(path) {
|
|
126
|
+
const trimmed = path.trim();
|
|
127
|
+
if (!trimmed) {
|
|
128
|
+
return "/";
|
|
129
|
+
}
|
|
130
|
+
const prefixed = trimmed.startsWith("/") ? trimmed : `/${trimmed}`;
|
|
131
|
+
const parts = prefixed.split("/");
|
|
132
|
+
const resolved = [];
|
|
133
|
+
for (const part of parts) {
|
|
134
|
+
if (part === "." || part === "") {
|
|
135
|
+
continue;
|
|
136
|
+
}
|
|
137
|
+
else if (part === "..") {
|
|
138
|
+
resolved.pop();
|
|
139
|
+
}
|
|
140
|
+
else {
|
|
141
|
+
resolved.push(part);
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
const result = "/" + resolved.join("/");
|
|
145
|
+
return result.length > 1 ? result.replace(/\/+$/, "") : "/";
|
|
146
|
+
}
|
|
147
|
+
function joinPath(base, child) {
|
|
148
|
+
const normalizedBase = normalizePath(base);
|
|
149
|
+
return normalizedBase === "/"
|
|
150
|
+
? normalizePath(`/${child}`)
|
|
151
|
+
: normalizePath(`${normalizedBase}/${child}`);
|
|
152
|
+
}
|
|
153
|
+
function ancestorDirectories(path) {
|
|
154
|
+
const normalized = normalizePath(path);
|
|
155
|
+
const parts = normalized.split("/").filter(Boolean);
|
|
156
|
+
const dirs = ["/"];
|
|
157
|
+
let current = "";
|
|
158
|
+
for (let index = 0; index < Math.max(0, parts.length - 1); index += 1) {
|
|
159
|
+
current = joinPath(current || "/", parts[index] ?? "");
|
|
160
|
+
dirs.push(current);
|
|
161
|
+
}
|
|
162
|
+
return dirs;
|
|
163
|
+
}
|
package/dist/events.d.ts
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Event recording and feed.
|
|
3
|
+
*
|
|
4
|
+
* Extract from workspace.ts:
|
|
5
|
+
* - event creation during writes/deletes
|
|
6
|
+
* - event feed listing with pagination
|
|
7
|
+
* - recent events retrieval (for WebSocket catch-up)
|
|
8
|
+
*/
|
|
9
|
+
import type { StorageAdapter, EventRow, Paginated, PaginationOptions } from "./storage.js";
|
|
10
|
+
export interface CreateEventInput {
|
|
11
|
+
type: string;
|
|
12
|
+
path: string;
|
|
13
|
+
revision: string;
|
|
14
|
+
origin: string;
|
|
15
|
+
provider: string;
|
|
16
|
+
correlationId: string;
|
|
17
|
+
timestamp?: string;
|
|
18
|
+
}
|
|
19
|
+
export declare function createEvent(storage: StorageAdapter, input: CreateEventInput): EventRow;
|
|
20
|
+
export declare function listEvents(storage: StorageAdapter, options: PaginationOptions & {
|
|
21
|
+
provider?: string;
|
|
22
|
+
}): Paginated<EventRow>;
|
|
23
|
+
export declare function getRecentEvents(storage: StorageAdapter, limit: number): EventRow[];
|
package/dist/events.js
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Event recording and feed.
|
|
3
|
+
*
|
|
4
|
+
* Extract from workspace.ts:
|
|
5
|
+
* - event creation during writes/deletes
|
|
6
|
+
* - event feed listing with pagination
|
|
7
|
+
* - recent events retrieval (for WebSocket catch-up)
|
|
8
|
+
*/
|
|
9
|
+
export function createEvent(storage, input) {
|
|
10
|
+
const event = {
|
|
11
|
+
eventId: storage.nextEventId(),
|
|
12
|
+
type: input.type,
|
|
13
|
+
path: input.path,
|
|
14
|
+
revision: input.revision,
|
|
15
|
+
origin: input.origin,
|
|
16
|
+
provider: normalizeProvider(input.provider),
|
|
17
|
+
correlationId: input.correlationId || "",
|
|
18
|
+
timestamp: input.timestamp ?? new Date().toISOString(),
|
|
19
|
+
};
|
|
20
|
+
storage.appendEvent(event);
|
|
21
|
+
return event;
|
|
22
|
+
}
|
|
23
|
+
export function listEvents(storage, options) {
|
|
24
|
+
const provider = normalizeProvider(options.provider);
|
|
25
|
+
return storage.listEvents({
|
|
26
|
+
provider: provider || undefined,
|
|
27
|
+
cursor: options.cursor || undefined,
|
|
28
|
+
limit: clampLimit(options.limit),
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
export function getRecentEvents(storage, limit) {
|
|
32
|
+
return storage.getRecentEvents(clampLimit(limit));
|
|
33
|
+
}
|
|
34
|
+
function normalizeProvider(provider) {
|
|
35
|
+
return provider?.trim().toLowerCase() ?? "";
|
|
36
|
+
}
|
|
37
|
+
function clampLimit(limit) {
|
|
38
|
+
return Math.max(1, Math.min(limit ?? 200, 1000));
|
|
39
|
+
}
|
package/dist/export.d.ts
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Workspace export logic.
|
|
3
|
+
*
|
|
4
|
+
* Extract from workspace.ts:
|
|
5
|
+
* - JSON export (file list with metadata)
|
|
6
|
+
* - tar export (gzipped archive)
|
|
7
|
+
* - patch export
|
|
8
|
+
*/
|
|
9
|
+
import type { StorageAdapter, FileRow } from "./storage.js";
|
|
10
|
+
import type { TokenClaims } from "./acl.js";
|
|
11
|
+
export type ExportFormat = "json" | "tar" | "patch";
|
|
12
|
+
export declare function exportWorkspaceJson(storage: StorageAdapter, claims: TokenClaims | null): FileRow[];
|
|
13
|
+
export declare function exportWorkspacePatch(storage: StorageAdapter, claims: TokenClaims | null): string;
|
|
14
|
+
export declare function exportWorkspaceTarGzip(storage: StorageAdapter, claims: TokenClaims | null): Promise<ArrayBuffer>;
|
|
15
|
+
export declare function buildUnifiedPatch(files: FileRow[]): string;
|
|
16
|
+
export declare function buildTarGzip(files: FileRow[]): Promise<ArrayBuffer>;
|
package/dist/export.js
ADDED
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Workspace export logic.
|
|
3
|
+
*
|
|
4
|
+
* Extract from workspace.ts:
|
|
5
|
+
* - JSON export (file list with metadata)
|
|
6
|
+
* - tar export (gzipped archive)
|
|
7
|
+
* - patch export
|
|
8
|
+
*/
|
|
9
|
+
import { filePermissionAllows, resolveFilePermissions } from "./acl.js";
|
|
10
|
+
export function exportWorkspaceJson(storage, claims) {
|
|
11
|
+
const workspaceId = storage.getWorkspaceId();
|
|
12
|
+
return storage
|
|
13
|
+
.listFiles()
|
|
14
|
+
.slice()
|
|
15
|
+
.sort((left, right) => left.path.localeCompare(right.path))
|
|
16
|
+
.filter((row) => filePermissionAllows(resolveFilePermissions(storage, row.path, true), workspaceId, claims))
|
|
17
|
+
.map((row) => materializeFile(storage, row));
|
|
18
|
+
}
|
|
19
|
+
export function exportWorkspacePatch(storage, claims) {
|
|
20
|
+
return buildUnifiedPatch(exportWorkspaceJson(storage, claims));
|
|
21
|
+
}
|
|
22
|
+
export async function exportWorkspaceTarGzip(storage, claims) {
|
|
23
|
+
return buildTarGzip(exportWorkspaceJson(storage, claims));
|
|
24
|
+
}
|
|
25
|
+
export function buildUnifiedPatch(files) {
|
|
26
|
+
if (files.length === 0) {
|
|
27
|
+
return "";
|
|
28
|
+
}
|
|
29
|
+
return files
|
|
30
|
+
.map((file) => {
|
|
31
|
+
const lines = file.content.split("\n");
|
|
32
|
+
return [
|
|
33
|
+
`--- ${file.path}`,
|
|
34
|
+
`+++ ${file.path}`,
|
|
35
|
+
"@@",
|
|
36
|
+
...lines.map((line) => `+${line}`),
|
|
37
|
+
].join("\n");
|
|
38
|
+
})
|
|
39
|
+
.join("\n");
|
|
40
|
+
}
|
|
41
|
+
export async function buildTarGzip(files) {
|
|
42
|
+
const chunks = [];
|
|
43
|
+
for (const file of files) {
|
|
44
|
+
const content = file.encoding === "base64"
|
|
45
|
+
? Uint8Array.from(atob(file.content), (char) => char.charCodeAt(0))
|
|
46
|
+
: new TextEncoder().encode(file.content);
|
|
47
|
+
const header = buildTarHeader(file.path.replace(/^\/+/, ""), content.byteLength, file.lastEditedAt);
|
|
48
|
+
chunks.push(header);
|
|
49
|
+
chunks.push(content);
|
|
50
|
+
const remainder = content.byteLength % 512;
|
|
51
|
+
if (remainder > 0) {
|
|
52
|
+
chunks.push(new Uint8Array(512 - remainder));
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
chunks.push(new Uint8Array(1024));
|
|
56
|
+
const tar = concatBytes(chunks);
|
|
57
|
+
const sourceBody = new Response(toArrayBuffer(tar)).body;
|
|
58
|
+
if (!sourceBody) {
|
|
59
|
+
throw new Error("failed to create readable stream for tar archive");
|
|
60
|
+
}
|
|
61
|
+
const compressed = new Response(sourceBody.pipeThrough(new CompressionStream("gzip")));
|
|
62
|
+
return compressed.arrayBuffer();
|
|
63
|
+
}
|
|
64
|
+
function cloneSemantics(file) {
|
|
65
|
+
return {
|
|
66
|
+
properties: file.semantics.properties
|
|
67
|
+
? { ...file.semantics.properties }
|
|
68
|
+
: undefined,
|
|
69
|
+
relations: file.semantics.relations
|
|
70
|
+
? [...file.semantics.relations]
|
|
71
|
+
: undefined,
|
|
72
|
+
permissions: file.semantics.permissions
|
|
73
|
+
? [...file.semantics.permissions]
|
|
74
|
+
: undefined,
|
|
75
|
+
comments: file.semantics.comments ? [...file.semantics.comments] : undefined,
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
function materializeFile(storage, file) {
|
|
79
|
+
const loaded = storage.loadFileContent?.(file);
|
|
80
|
+
const contentState = typeof loaded === "string"
|
|
81
|
+
? { content: loaded, encoding: file.encoding }
|
|
82
|
+
: loaded ?? { content: file.content, encoding: file.encoding };
|
|
83
|
+
return {
|
|
84
|
+
...file,
|
|
85
|
+
content: contentState.content,
|
|
86
|
+
encoding: contentState.encoding ?? file.encoding,
|
|
87
|
+
semantics: cloneSemantics(file),
|
|
88
|
+
};
|
|
89
|
+
}
|
|
90
|
+
function buildTarHeader(name, size, updatedAt) {
|
|
91
|
+
const header = new Uint8Array(512);
|
|
92
|
+
writeTarString(header, 0, 100, name.slice(0, 100));
|
|
93
|
+
writeTarOctal(header, 100, 8, 0o644);
|
|
94
|
+
writeTarOctal(header, 108, 8, 0);
|
|
95
|
+
writeTarOctal(header, 116, 8, 0);
|
|
96
|
+
writeTarOctal(header, 124, 12, size);
|
|
97
|
+
writeTarOctal(header, 136, 12, Math.floor((updatedAt ? Date.parse(updatedAt) : Date.now()) / 1000));
|
|
98
|
+
for (let index = 148; index < 156; index += 1) {
|
|
99
|
+
header[index] = 0x20;
|
|
100
|
+
}
|
|
101
|
+
header[156] = "0".charCodeAt(0);
|
|
102
|
+
writeTarString(header, 257, 6, "ustar");
|
|
103
|
+
writeTarString(header, 263, 2, "00");
|
|
104
|
+
const checksum = header.reduce((sum, byte) => sum + byte, 0);
|
|
105
|
+
writeTarChecksum(header, checksum);
|
|
106
|
+
return header;
|
|
107
|
+
}
|
|
108
|
+
function writeTarString(buffer, offset, length, value) {
|
|
109
|
+
const bytes = new TextEncoder().encode(value);
|
|
110
|
+
buffer.set(bytes.slice(0, length), offset);
|
|
111
|
+
}
|
|
112
|
+
function writeTarOctal(buffer, offset, length, value) {
|
|
113
|
+
const octal = value.toString(8).padStart(length - 1, "0");
|
|
114
|
+
writeTarString(buffer, offset, length - 1, octal.slice(-length + 1));
|
|
115
|
+
buffer[offset + length - 1] = 0;
|
|
116
|
+
}
|
|
117
|
+
function writeTarChecksum(buffer, checksum) {
|
|
118
|
+
const octal = checksum.toString(8).padStart(6, "0");
|
|
119
|
+
writeTarString(buffer, 148, 6, octal);
|
|
120
|
+
buffer[154] = 0;
|
|
121
|
+
buffer[155] = 0x20;
|
|
122
|
+
}
|
|
123
|
+
function concatBytes(chunks) {
|
|
124
|
+
const total = chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0);
|
|
125
|
+
const out = new Uint8Array(total);
|
|
126
|
+
let offset = 0;
|
|
127
|
+
for (const chunk of chunks) {
|
|
128
|
+
out.set(chunk, offset);
|
|
129
|
+
offset += chunk.byteLength;
|
|
130
|
+
}
|
|
131
|
+
return out;
|
|
132
|
+
}
|
|
133
|
+
function toArrayBuffer(bytes) {
|
|
134
|
+
const copy = new Uint8Array(bytes.byteLength);
|
|
135
|
+
copy.set(bytes);
|
|
136
|
+
return copy.buffer;
|
|
137
|
+
}
|
package/dist/files.d.ts
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File CRUD operations.
|
|
3
|
+
*
|
|
4
|
+
* Extract from workspace.ts:
|
|
5
|
+
* - handleWriteFile → writeFile()
|
|
6
|
+
* - handleReadFile → readFile()
|
|
7
|
+
* - handleDeleteFile → deleteFile()
|
|
8
|
+
* - normalizeIfMatchHeader → normalizeIfMatch()
|
|
9
|
+
* - revision conflict detection
|
|
10
|
+
* - content encoding normalization/validation
|
|
11
|
+
* - provider inference from path
|
|
12
|
+
*/
|
|
13
|
+
import type { StorageAdapter, FileRow, FileSemantics } from "./storage.js";
|
|
14
|
+
import { normalizePath } from "./utils.js";
|
|
15
|
+
export interface WriteFileRequest {
|
|
16
|
+
path: string;
|
|
17
|
+
ifMatch: string;
|
|
18
|
+
content: string;
|
|
19
|
+
contentType?: string;
|
|
20
|
+
encoding?: string;
|
|
21
|
+
semantics?: FileSemantics;
|
|
22
|
+
correlationId?: string;
|
|
23
|
+
}
|
|
24
|
+
export interface WriteFileResult {
|
|
25
|
+
opId: string;
|
|
26
|
+
status: string;
|
|
27
|
+
targetRevision: string;
|
|
28
|
+
writeback: {
|
|
29
|
+
provider: string;
|
|
30
|
+
state: string;
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
export interface ConflictError {
|
|
34
|
+
type: "conflict";
|
|
35
|
+
expectedRevision: string;
|
|
36
|
+
currentRevision: string;
|
|
37
|
+
currentContentPreview?: string;
|
|
38
|
+
}
|
|
39
|
+
export interface DeleteFileRequest {
|
|
40
|
+
path: string;
|
|
41
|
+
ifMatch: string;
|
|
42
|
+
correlationId?: string;
|
|
43
|
+
}
|
|
44
|
+
export type WriteResult = {
|
|
45
|
+
ok: true;
|
|
46
|
+
result: WriteFileResult;
|
|
47
|
+
} | {
|
|
48
|
+
ok: false;
|
|
49
|
+
error: "not_found" | "missing_precondition" | "invalid_input" | ConflictError;
|
|
50
|
+
};
|
|
51
|
+
export type DeleteResult = {
|
|
52
|
+
ok: true;
|
|
53
|
+
result: WriteFileResult;
|
|
54
|
+
} | {
|
|
55
|
+
ok: false;
|
|
56
|
+
error: "not_found" | "missing_precondition" | "invalid_input" | ConflictError;
|
|
57
|
+
};
|
|
58
|
+
export declare function normalizeIfMatch(value: string): string;
|
|
59
|
+
export { normalizePath };
|
|
60
|
+
export declare function inferProvider(path: string): string;
|
|
61
|
+
export declare function writeFile(storage: StorageAdapter, req: WriteFileRequest): WriteResult;
|
|
62
|
+
export declare function readFile(storage: StorageAdapter, path: string): FileRow | null;
|
|
63
|
+
export declare function deleteFile(storage: StorageAdapter, req: DeleteFileRequest): DeleteResult;
|
|
64
|
+
export declare const DEFAULT_CONTENT_TYPE = "text/markdown";
|
|
65
|
+
export declare const MAX_FILE_BYTES: number;
|
|
66
|
+
export declare function encodedSize(content: string, encoding: "utf-8" | "base64"): number;
|