@zenfs/core 1.7.2 → 1.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/backends/backend.js +3 -4
- package/dist/backends/fetch.d.ts +17 -18
- package/dist/backends/fetch.js +95 -58
- package/dist/backends/index.d.ts +2 -1
- package/dist/backends/index.js +2 -1
- package/dist/backends/memory.d.ts +1 -1
- package/dist/backends/overlay.d.ts +7 -2
- package/dist/backends/overlay.js +32 -9
- package/dist/backends/passthrough.d.ts +4 -0
- package/dist/backends/passthrough.js +128 -0
- package/dist/backends/port/fs.d.ts +9 -44
- package/dist/backends/port/fs.js +93 -116
- package/dist/backends/port/rpc.d.ts +8 -5
- package/dist/backends/port/rpc.js +9 -7
- package/dist/backends/store/file_index.d.ts +38 -0
- package/dist/backends/store/file_index.js +76 -0
- package/dist/backends/store/fs.d.ts +55 -34
- package/dist/backends/store/fs.js +417 -233
- package/dist/backends/store/index_fs.d.ts +34 -0
- package/dist/backends/store/index_fs.js +67 -0
- package/dist/backends/store/inode.d.ts +26 -8
- package/dist/backends/store/inode.js +92 -91
- package/dist/backends/store/simple.d.ts +20 -20
- package/dist/backends/store/simple.js +3 -4
- package/dist/backends/store/store.d.ts +12 -12
- package/dist/backends/store/store.js +4 -6
- package/dist/devices.d.ts +11 -10
- package/dist/devices.js +15 -11
- package/dist/file.d.ts +111 -7
- package/dist/file.js +319 -71
- package/dist/filesystem.d.ts +22 -4
- package/dist/mixins/mutexed.d.ts +7 -2
- package/dist/mixins/mutexed.js +56 -0
- package/dist/mixins/sync.d.ts +1 -1
- package/dist/stats.d.ts +12 -6
- package/dist/stats.js +14 -6
- package/dist/utils.d.ts +17 -3
- package/dist/utils.js +32 -10
- package/dist/vfs/constants.d.ts +2 -2
- package/dist/vfs/constants.js +2 -2
- package/dist/vfs/dir.js +3 -1
- package/dist/vfs/index.js +4 -1
- package/dist/vfs/promises.js +31 -11
- package/dist/vfs/shared.js +2 -0
- package/dist/vfs/sync.js +25 -13
- package/dist/vfs/types.d.ts +15 -0
- package/package.json +2 -3
- package/readme.md +2 -2
- package/scripts/test.js +73 -11
- package/tests/common/mutex.test.ts +1 -1
- package/tests/fetch/run.sh +16 -0
- package/tests/fetch/server.ts +49 -0
- package/tests/fetch/setup.ts +13 -0
- package/tests/fs/read.test.ts +10 -10
- package/tests/fs/times.test.ts +2 -2
- package/tests/setup/index.ts +38 -0
- package/tests/setup/port.ts +15 -0
- package/dist/backends/file_index.d.ts +0 -63
- package/dist/backends/file_index.js +0 -163
- package/tests/common/async.test.ts +0 -31
- package/tests/setup/cow+fetch.ts +0 -45
- /package/tests/fs/{appendFile.test.ts → append.test.ts} +0 -0
package/dist/backends/backend.js
CHANGED
|
@@ -20,8 +20,8 @@ export async function checkOptions(backend, options) {
|
|
|
20
20
|
}
|
|
21
21
|
throw new ErrnoError(Errno.EINVAL, 'Missing required option: ' + optName);
|
|
22
22
|
}
|
|
23
|
-
const isType = (value) => (typeof
|
|
24
|
-
if (Array.isArray(opt.type) ? !opt.type.some(isType) : !isType(
|
|
23
|
+
const isType = (type, _ = value) => (typeof type == 'function' ? value instanceof type : typeof value === type);
|
|
24
|
+
if (Array.isArray(opt.type) ? !opt.type.some(v => isType(v)) : !isType(opt.type)) {
|
|
25
25
|
// The type of the value as a string
|
|
26
26
|
const type = typeof value == 'object' && 'constructor' in value ? value.constructor.name : typeof value;
|
|
27
27
|
// The expected type (as a string)
|
|
@@ -29,9 +29,8 @@ export async function checkOptions(backend, options) {
|
|
|
29
29
|
const expected = Array.isArray(opt.type) ? `one of ${opt.type.map(name).join(', ')}` : name(opt.type);
|
|
30
30
|
throw new ErrnoError(Errno.EINVAL, `Incorrect type for "${optName}": ${type} (expected ${expected})`);
|
|
31
31
|
}
|
|
32
|
-
if (opt.validator)
|
|
32
|
+
if (opt.validator)
|
|
33
33
|
await opt.validator(value);
|
|
34
|
-
}
|
|
35
34
|
// Otherwise: All good!
|
|
36
35
|
}
|
|
37
36
|
}
|
package/dist/backends/fetch.d.ts
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
|
-
import
|
|
2
|
-
import type
|
|
3
|
-
import type {
|
|
4
|
-
import { IndexFS } from './file_index.js';
|
|
1
|
+
import { StoreFS } from './store/fs.js';
|
|
2
|
+
import { type IndexData } from './store/file_index.js';
|
|
3
|
+
import type { Store } from './store/store.js';
|
|
5
4
|
/**
|
|
6
5
|
* Configuration options for FetchFS.
|
|
7
6
|
*/
|
|
@@ -19,6 +18,11 @@ export interface FetchOptions {
|
|
|
19
18
|
* Default: Fetch files relative to the index.
|
|
20
19
|
*/
|
|
21
20
|
baseUrl?: string;
|
|
21
|
+
/**
|
|
22
|
+
* A store to use for caching content.
|
|
23
|
+
* Defaults to an in-memory store
|
|
24
|
+
*/
|
|
25
|
+
cache?: Store;
|
|
22
26
|
}
|
|
23
27
|
/**
|
|
24
28
|
* A simple filesystem backed by HTTP using the `fetch` API.
|
|
@@ -31,7 +35,7 @@ export interface FetchOptions {
|
|
|
31
35
|
* "version": 1,
|
|
32
36
|
* "entries": {
|
|
33
37
|
* "/home": { ... },
|
|
34
|
-
* "/home/
|
|
38
|
+
* "/home/john": { ... },
|
|
35
39
|
* "/home/james": { ... }
|
|
36
40
|
* }
|
|
37
41
|
* }
|
|
@@ -39,21 +43,12 @@ export interface FetchOptions {
|
|
|
39
43
|
*
|
|
40
44
|
* Each entry contains the stats associated with the file.
|
|
41
45
|
*/
|
|
42
|
-
export declare class FetchFS extends
|
|
46
|
+
export declare class FetchFS extends StoreFS {
|
|
43
47
|
readonly baseUrl: string;
|
|
44
|
-
readonly requestInit?: RequestInit;
|
|
48
|
+
readonly requestInit?: RequestInit | undefined;
|
|
49
|
+
private indexData;
|
|
45
50
|
ready(): Promise<void>;
|
|
46
|
-
constructor(
|
|
47
|
-
metadata(): FileSystemMetadata;
|
|
48
|
-
/**
|
|
49
|
-
* Preload the `path` into the index.
|
|
50
|
-
*/
|
|
51
|
-
preload(path: string, buffer: Uint8Array): void;
|
|
52
|
-
/**
|
|
53
|
-
* @todo Be lazier about actually requesting the data?
|
|
54
|
-
*/
|
|
55
|
-
protected getData(path: string, stats: Stats): Promise<Uint8Array>;
|
|
56
|
-
protected getDataSync(path: string, stats: Stats): Uint8Array;
|
|
51
|
+
constructor(index?: IndexData | string, cache?: Store, baseUrl?: string, requestInit?: RequestInit | undefined);
|
|
57
52
|
}
|
|
58
53
|
declare const _Fetch: {
|
|
59
54
|
readonly name: "Fetch";
|
|
@@ -70,6 +65,10 @@ declare const _Fetch: {
|
|
|
70
65
|
readonly type: "object";
|
|
71
66
|
readonly required: false;
|
|
72
67
|
};
|
|
68
|
+
readonly cache: {
|
|
69
|
+
readonly type: "object";
|
|
70
|
+
readonly required: false;
|
|
71
|
+
};
|
|
73
72
|
};
|
|
74
73
|
readonly isAvailable: () => boolean;
|
|
75
74
|
readonly create: (options: FetchOptions) => FetchFS;
|
package/dist/backends/fetch.js
CHANGED
|
@@ -1,5 +1,61 @@
|
|
|
1
|
+
var __addDisposableResource = (this && this.__addDisposableResource) || function (env, value, async) {
|
|
2
|
+
if (value !== null && value !== void 0) {
|
|
3
|
+
if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
|
|
4
|
+
var dispose, inner;
|
|
5
|
+
if (async) {
|
|
6
|
+
if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
|
|
7
|
+
dispose = value[Symbol.asyncDispose];
|
|
8
|
+
}
|
|
9
|
+
if (dispose === void 0) {
|
|
10
|
+
if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
|
|
11
|
+
dispose = value[Symbol.dispose];
|
|
12
|
+
if (async) inner = dispose;
|
|
13
|
+
}
|
|
14
|
+
if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
|
|
15
|
+
if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };
|
|
16
|
+
env.stack.push({ value: value, dispose: dispose, async: async });
|
|
17
|
+
}
|
|
18
|
+
else if (async) {
|
|
19
|
+
env.stack.push({ async: true });
|
|
20
|
+
}
|
|
21
|
+
return value;
|
|
22
|
+
};
|
|
23
|
+
var __disposeResources = (this && this.__disposeResources) || (function (SuppressedError) {
|
|
24
|
+
return function (env) {
|
|
25
|
+
function fail(e) {
|
|
26
|
+
env.error = env.hasError ? new SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
|
|
27
|
+
env.hasError = true;
|
|
28
|
+
}
|
|
29
|
+
var r, s = 0;
|
|
30
|
+
function next() {
|
|
31
|
+
while (r = env.stack.pop()) {
|
|
32
|
+
try {
|
|
33
|
+
if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);
|
|
34
|
+
if (r.dispose) {
|
|
35
|
+
var result = r.dispose.call(r.value);
|
|
36
|
+
if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
|
|
37
|
+
}
|
|
38
|
+
else s |= 1;
|
|
39
|
+
}
|
|
40
|
+
catch (e) {
|
|
41
|
+
fail(e);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();
|
|
45
|
+
if (env.hasError) throw env.error;
|
|
46
|
+
}
|
|
47
|
+
return next();
|
|
48
|
+
};
|
|
49
|
+
})(typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
|
|
50
|
+
var e = new Error(message);
|
|
51
|
+
return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
|
|
52
|
+
});
|
|
1
53
|
import { Errno, ErrnoError } from '../error.js';
|
|
2
|
-
import {
|
|
54
|
+
import { S_IFREG } from '../vfs/constants.js';
|
|
55
|
+
import { InMemoryStore } from './memory.js';
|
|
56
|
+
import { StoreFS } from './store/fs.js';
|
|
57
|
+
import { Index } from './store/file_index.js';
|
|
58
|
+
import { normalizePath } from '../utils.js';
|
|
3
59
|
async function fetchFile(path, type, init) {
|
|
4
60
|
const response = await fetch(path, init).catch((e) => {
|
|
5
61
|
throw new ErrnoError(Errno.EIO, e.message, path);
|
|
@@ -33,7 +89,7 @@ async function fetchFile(path, type, init) {
|
|
|
33
89
|
* "version": 1,
|
|
34
90
|
* "entries": {
|
|
35
91
|
* "/home": { ... },
|
|
36
|
-
* "/home/
|
|
92
|
+
* "/home/john": { ... },
|
|
37
93
|
* "/home/james": { ... }
|
|
38
94
|
* }
|
|
39
95
|
* }
|
|
@@ -41,68 +97,46 @@ async function fetchFile(path, type, init) {
|
|
|
41
97
|
*
|
|
42
98
|
* Each entry contains the stats associated with the file.
|
|
43
99
|
*/
|
|
44
|
-
export class FetchFS extends
|
|
100
|
+
export class FetchFS extends StoreFS {
|
|
45
101
|
async ready() {
|
|
46
|
-
|
|
47
|
-
|
|
102
|
+
const env_1 = { stack: [], error: void 0, hasError: false };
|
|
103
|
+
try {
|
|
104
|
+
if (this._initialized)
|
|
105
|
+
return;
|
|
106
|
+
await super.ready();
|
|
107
|
+
const index = new Index();
|
|
108
|
+
index.fromJSON(await this.indexData);
|
|
109
|
+
await this.loadIndex(index);
|
|
110
|
+
if (this._disableSync)
|
|
111
|
+
return;
|
|
112
|
+
const tx = __addDisposableResource(env_1, this.store.transaction(), true);
|
|
113
|
+
// Iterate over all of the files and cache their contents
|
|
114
|
+
for (const [path, node] of index) {
|
|
115
|
+
if (!(node.mode & S_IFREG))
|
|
116
|
+
continue;
|
|
117
|
+
const content = await fetchFile(this.baseUrl + path, 'buffer', this.requestInit);
|
|
118
|
+
await tx.set(node.data, content);
|
|
119
|
+
}
|
|
120
|
+
await tx.commit();
|
|
48
121
|
}
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
122
|
+
catch (e_1) {
|
|
123
|
+
env_1.error = e_1;
|
|
124
|
+
env_1.hasError = true;
|
|
52
125
|
}
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
await this.getData(path, stats);
|
|
126
|
+
finally {
|
|
127
|
+
const result_1 = __disposeResources(env_1);
|
|
128
|
+
if (result_1)
|
|
129
|
+
await result_1;
|
|
58
130
|
}
|
|
59
131
|
}
|
|
60
|
-
constructor(
|
|
61
|
-
|
|
62
|
-
if (baseUrl.at(-1) != '/') {
|
|
63
|
-
baseUrl += '/';
|
|
64
|
-
}
|
|
65
|
-
super(typeof index != 'string' ? index : fetchFile(index, 'json', requestInit));
|
|
132
|
+
constructor(index = 'index.json', cache = new InMemoryStore('fetch'), baseUrl = '', requestInit) {
|
|
133
|
+
super(cache);
|
|
66
134
|
this.baseUrl = baseUrl;
|
|
67
135
|
this.requestInit = requestInit;
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
name: FetchFS.name,
|
|
73
|
-
readonly: true,
|
|
74
|
-
};
|
|
75
|
-
}
|
|
76
|
-
/**
|
|
77
|
-
* Preload the `path` into the index.
|
|
78
|
-
*/
|
|
79
|
-
preload(path, buffer) {
|
|
80
|
-
const stats = this.index.get(path);
|
|
81
|
-
if (!stats) {
|
|
82
|
-
throw ErrnoError.With('ENOENT', path, 'preload');
|
|
83
|
-
}
|
|
84
|
-
if (!stats.isFile()) {
|
|
85
|
-
throw ErrnoError.With('EISDIR', path, 'preload');
|
|
86
|
-
}
|
|
87
|
-
stats.size = buffer.length;
|
|
88
|
-
stats.fileData = buffer;
|
|
89
|
-
}
|
|
90
|
-
/**
|
|
91
|
-
* @todo Be lazier about actually requesting the data?
|
|
92
|
-
*/
|
|
93
|
-
async getData(path, stats) {
|
|
94
|
-
if (stats.fileData) {
|
|
95
|
-
return stats.fileData;
|
|
96
|
-
}
|
|
97
|
-
const data = await fetchFile(this.baseUrl + (path.startsWith('/') ? path.slice(1) : path), 'buffer', this.requestInit);
|
|
98
|
-
stats.fileData = data;
|
|
99
|
-
return data;
|
|
100
|
-
}
|
|
101
|
-
getDataSync(path, stats) {
|
|
102
|
-
if (stats.fileData) {
|
|
103
|
-
return stats.fileData;
|
|
104
|
-
}
|
|
105
|
-
throw new ErrnoError(Errno.ENODATA, '', path, 'getData');
|
|
136
|
+
// prefix url must end in a directory separator.
|
|
137
|
+
if (baseUrl.at(-1) == '/')
|
|
138
|
+
this.baseUrl = baseUrl.slice(0, -1);
|
|
139
|
+
this.indexData = typeof index != 'string' ? index : fetchFile(index, 'json', requestInit);
|
|
106
140
|
}
|
|
107
141
|
}
|
|
108
142
|
const _Fetch = {
|
|
@@ -111,12 +145,15 @@ const _Fetch = {
|
|
|
111
145
|
index: { type: ['string', 'object'], required: false },
|
|
112
146
|
baseUrl: { type: 'string', required: false },
|
|
113
147
|
requestInit: { type: 'object', required: false },
|
|
148
|
+
cache: { type: 'object', required: false },
|
|
114
149
|
},
|
|
115
150
|
isAvailable() {
|
|
116
151
|
return typeof globalThis.fetch == 'function';
|
|
117
152
|
},
|
|
118
153
|
create(options) {
|
|
119
|
-
|
|
154
|
+
const url = new URL(options.baseUrl || '');
|
|
155
|
+
url.pathname = normalizePath(url.pathname);
|
|
156
|
+
return new FetchFS(options.index, options.cache, url.toString(), options.requestInit);
|
|
120
157
|
},
|
|
121
158
|
};
|
|
122
159
|
export const Fetch = _Fetch;
|
package/dist/backends/index.d.ts
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
export * from './backend.js';
|
|
2
2
|
export * from './fetch.js';
|
|
3
|
-
export * from './file_index.js';
|
|
4
3
|
export * from './memory.js';
|
|
5
4
|
export * from './overlay.js';
|
|
6
5
|
export * from './passthrough.js';
|
|
7
6
|
export * from './port/fs.js';
|
|
8
7
|
export * from './store/fs.js';
|
|
8
|
+
export * from './store/file_index.js';
|
|
9
|
+
export * from './store/index_fs.js';
|
|
9
10
|
export * from './store/inode.js';
|
|
10
11
|
export * from './store/simple.js';
|
|
11
12
|
export * from './store/store.js';
|
package/dist/backends/index.js
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
export * from './backend.js';
|
|
2
2
|
export * from './fetch.js';
|
|
3
|
-
export * from './file_index.js';
|
|
4
3
|
export * from './memory.js';
|
|
5
4
|
export * from './overlay.js';
|
|
6
5
|
export * from './passthrough.js';
|
|
7
6
|
export * from './port/fs.js';
|
|
8
7
|
export * from './store/fs.js';
|
|
8
|
+
export * from './store/file_index.js';
|
|
9
|
+
export * from './store/index_fs.js';
|
|
9
10
|
export * from './store/inode.js';
|
|
10
11
|
export * from './store/simple.js';
|
|
11
12
|
export * from './store/store.js';
|
|
@@ -3,7 +3,7 @@ import { SimpleTransaction, type SimpleSyncStore } from './store/simple.js';
|
|
|
3
3
|
/**
|
|
4
4
|
* A simple in-memory store
|
|
5
5
|
*/
|
|
6
|
-
export declare class InMemoryStore extends Map<
|
|
6
|
+
export declare class InMemoryStore extends Map<number, Uint8Array> implements SimpleSyncStore {
|
|
7
7
|
name: string;
|
|
8
8
|
constructor(name?: string);
|
|
9
9
|
sync(): Promise<void>;
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import type { File } from '../file.js';
|
|
2
2
|
import type { CreationOptions, FileSystemMetadata } from '../filesystem.js';
|
|
3
3
|
import type { Stats } from '../stats.js';
|
|
4
|
+
import type { InodeLike } from './store/inode.js';
|
|
4
5
|
import { FileSystem } from '../filesystem.js';
|
|
5
6
|
/**
|
|
6
7
|
* Configuration options for OverlayFS instances.
|
|
@@ -36,8 +37,12 @@ export declare class UnmutexedOverlayFS extends FileSystem {
|
|
|
36
37
|
private _ready;
|
|
37
38
|
constructor({ writable, readable }: OverlayOptions);
|
|
38
39
|
metadata(): FileSystemMetadata;
|
|
39
|
-
sync(path: string, data: Uint8Array, stats: Readonly<
|
|
40
|
-
syncSync(path: string, data: Uint8Array, stats: Readonly<
|
|
40
|
+
sync(path: string, data: Uint8Array, stats: Readonly<InodeLike>): Promise<void>;
|
|
41
|
+
syncSync(path: string, data: Uint8Array, stats: Readonly<InodeLike>): void;
|
|
42
|
+
read(path: string, offset: number, length: number): Promise<Uint8Array>;
|
|
43
|
+
readSync(path: string, offset: number, length: number): Uint8Array;
|
|
44
|
+
write(path: string, buffer: Uint8Array, offset: number): Promise<void>;
|
|
45
|
+
writeSync(path: string, buffer: Uint8Array, offset: number): void;
|
|
41
46
|
/**
|
|
42
47
|
* Called once to load up metadata stored on the writable file system.
|
|
43
48
|
* @internal
|
package/dist/backends/overlay.js
CHANGED
|
@@ -54,8 +54,8 @@ import { Errno, ErrnoError } from '../error.js';
|
|
|
54
54
|
import { PreloadFile, parseFlag } from '../file.js';
|
|
55
55
|
import { FileSystem } from '../filesystem.js';
|
|
56
56
|
import { Mutexed } from '../mixins/mutexed.js';
|
|
57
|
-
import { decodeUTF8, encodeUTF8 } from '../utils.js';
|
|
58
|
-
import { dirname } from '../vfs/path.js';
|
|
57
|
+
import { canary, decodeUTF8, encodeUTF8 } from '../utils.js';
|
|
58
|
+
import { dirname, join } from '../vfs/path.js';
|
|
59
59
|
/** @internal */
|
|
60
60
|
const deletionLogPath = '/.deleted';
|
|
61
61
|
/**
|
|
@@ -97,15 +97,26 @@ export class UnmutexedOverlayFS extends FileSystem {
|
|
|
97
97
|
}
|
|
98
98
|
async sync(path, data, stats) {
|
|
99
99
|
await this.copyForWrite(path);
|
|
100
|
-
if (!(await this.writable.exists(path))) {
|
|
101
|
-
await this.writable.createFile(path, 'w', 0o644, stats);
|
|
102
|
-
}
|
|
103
100
|
await this.writable.sync(path, data, stats);
|
|
104
101
|
}
|
|
105
102
|
syncSync(path, data, stats) {
|
|
106
103
|
this.copyForWriteSync(path);
|
|
107
104
|
this.writable.syncSync(path, data, stats);
|
|
108
105
|
}
|
|
106
|
+
async read(path, offset, length) {
|
|
107
|
+
return (await this.writable.exists(path)) ? await this.writable.read(path, offset, length) : await this.readable.read(path, offset, length);
|
|
108
|
+
}
|
|
109
|
+
readSync(path, offset, length) {
|
|
110
|
+
return this.writable.existsSync(path) ? this.writable.readSync(path, offset, length) : this.readable.readSync(path, offset, length);
|
|
111
|
+
}
|
|
112
|
+
async write(path, buffer, offset) {
|
|
113
|
+
await this.copyForWrite(path);
|
|
114
|
+
return await this.writable.write(path, buffer, offset);
|
|
115
|
+
}
|
|
116
|
+
writeSync(path, buffer, offset) {
|
|
117
|
+
this.copyForWriteSync(path);
|
|
118
|
+
return this.writable.writeSync(path, buffer, offset);
|
|
119
|
+
}
|
|
109
120
|
/**
|
|
110
121
|
* Called once to load up metadata stored on the writable file system.
|
|
111
122
|
* @internal
|
|
@@ -421,10 +432,12 @@ export class UnmutexedOverlayFS extends FileSystem {
|
|
|
421
432
|
createParentDirectoriesSync(path) {
|
|
422
433
|
let parent = dirname(path);
|
|
423
434
|
const toCreate = [];
|
|
435
|
+
const silence = canary(path);
|
|
424
436
|
while (!this.writable.existsSync(parent)) {
|
|
425
437
|
toCreate.push(parent);
|
|
426
438
|
parent = dirname(parent);
|
|
427
439
|
}
|
|
440
|
+
silence();
|
|
428
441
|
for (const path of toCreate.reverse()) {
|
|
429
442
|
const { uid, gid, mode } = this.statSync(path);
|
|
430
443
|
this.writable.mkdirSync(path, mode, { uid, gid });
|
|
@@ -437,10 +450,12 @@ export class UnmutexedOverlayFS extends FileSystem {
|
|
|
437
450
|
async createParentDirectories(path) {
|
|
438
451
|
let parent = dirname(path);
|
|
439
452
|
const toCreate = [];
|
|
453
|
+
const silence = canary(path);
|
|
440
454
|
while (!(await this.writable.exists(parent))) {
|
|
441
455
|
toCreate.push(parent);
|
|
442
456
|
parent = dirname(parent);
|
|
443
457
|
}
|
|
458
|
+
silence();
|
|
444
459
|
for (const path of toCreate.reverse()) {
|
|
445
460
|
const { uid, gid, mode } = await this.stat(path);
|
|
446
461
|
await this.writable.mkdir(path, mode, { uid, gid });
|
|
@@ -453,7 +468,7 @@ export class UnmutexedOverlayFS extends FileSystem {
|
|
|
453
468
|
*/
|
|
454
469
|
copyForWriteSync(path) {
|
|
455
470
|
if (!this.existsSync(path)) {
|
|
456
|
-
throw ErrnoError.With('ENOENT', path, 'copyForWrite');
|
|
471
|
+
throw ErrnoError.With('ENOENT', path, '[copyForWrite]');
|
|
457
472
|
}
|
|
458
473
|
if (!this.writable.existsSync(dirname(path))) {
|
|
459
474
|
this.createParentDirectoriesSync(path);
|
|
@@ -464,7 +479,7 @@ export class UnmutexedOverlayFS extends FileSystem {
|
|
|
464
479
|
}
|
|
465
480
|
async copyForWrite(path) {
|
|
466
481
|
if (!(await this.exists(path))) {
|
|
467
|
-
throw ErrnoError.With('ENOENT', path, 'copyForWrite');
|
|
482
|
+
throw ErrnoError.With('ENOENT', path, '[copyForWrite]');
|
|
468
483
|
}
|
|
469
484
|
if (!(await this.writable.exists(dirname(path)))) {
|
|
470
485
|
await this.createParentDirectories(path);
|
|
@@ -481,14 +496,18 @@ export class UnmutexedOverlayFS extends FileSystem {
|
|
|
481
496
|
const env_1 = { stack: [], error: void 0, hasError: false };
|
|
482
497
|
try {
|
|
483
498
|
const stats = this.statSync(path);
|
|
499
|
+
stats.mode |= 0o222;
|
|
484
500
|
if (stats.isDirectory()) {
|
|
485
501
|
this.writable.mkdirSync(path, stats.mode, stats);
|
|
502
|
+
for (const k of this.readable.readdirSync(path)) {
|
|
503
|
+
this.copyToWritableSync(join(path, k));
|
|
504
|
+
}
|
|
486
505
|
return;
|
|
487
506
|
}
|
|
488
507
|
const data = new Uint8Array(stats.size);
|
|
489
508
|
const readable = __addDisposableResource(env_1, this.readable.openFileSync(path, 'r'), false);
|
|
490
509
|
readable.readSync(data);
|
|
491
|
-
const writable = __addDisposableResource(env_1, this.writable.createFileSync(path, 'w', stats.mode
|
|
510
|
+
const writable = __addDisposableResource(env_1, this.writable.createFileSync(path, 'w', stats.mode, stats), false);
|
|
492
511
|
writable.writeSync(data);
|
|
493
512
|
}
|
|
494
513
|
catch (e_1) {
|
|
@@ -503,14 +522,18 @@ export class UnmutexedOverlayFS extends FileSystem {
|
|
|
503
522
|
const env_2 = { stack: [], error: void 0, hasError: false };
|
|
504
523
|
try {
|
|
505
524
|
const stats = await this.stat(path);
|
|
525
|
+
stats.mode |= 0o222;
|
|
506
526
|
if (stats.isDirectory()) {
|
|
507
527
|
await this.writable.mkdir(path, stats.mode, stats);
|
|
528
|
+
for (const k of await this.readable.readdir(path)) {
|
|
529
|
+
await this.copyToWritable(join(path, k));
|
|
530
|
+
}
|
|
508
531
|
return;
|
|
509
532
|
}
|
|
510
533
|
const data = new Uint8Array(stats.size);
|
|
511
534
|
const readable = __addDisposableResource(env_2, await this.readable.openFile(path, 'r'), true);
|
|
512
535
|
await readable.read(data);
|
|
513
|
-
const writable = __addDisposableResource(env_2, await this.writable.createFile(path, 'w', stats.mode
|
|
536
|
+
const writable = __addDisposableResource(env_2, await this.writable.createFile(path, 'w', stats.mode, stats), true);
|
|
514
537
|
await writable.write(data);
|
|
515
538
|
}
|
|
516
539
|
catch (e_2) {
|
|
@@ -93,6 +93,10 @@ export declare class PassthroughFS extends FileSystem {
|
|
|
93
93
|
* Create a hard link synchronously.
|
|
94
94
|
*/
|
|
95
95
|
linkSync(target: string, link: string): void;
|
|
96
|
+
read(path: string, offset: number, length: number): Promise<Uint8Array>;
|
|
97
|
+
readSync(path: string, offset: number, length: number): Uint8Array;
|
|
98
|
+
write(path: string, buffer: Uint8Array, offset: number): Promise<void>;
|
|
99
|
+
writeSync(path: string, buffer: Uint8Array, offset: number): void;
|
|
96
100
|
}
|
|
97
101
|
declare const _Passthrough: {
|
|
98
102
|
readonly name: "Passthrough";
|
|
@@ -1,3 +1,55 @@
|
|
|
1
|
+
var __addDisposableResource = (this && this.__addDisposableResource) || function (env, value, async) {
|
|
2
|
+
if (value !== null && value !== void 0) {
|
|
3
|
+
if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
|
|
4
|
+
var dispose, inner;
|
|
5
|
+
if (async) {
|
|
6
|
+
if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
|
|
7
|
+
dispose = value[Symbol.asyncDispose];
|
|
8
|
+
}
|
|
9
|
+
if (dispose === void 0) {
|
|
10
|
+
if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
|
|
11
|
+
dispose = value[Symbol.dispose];
|
|
12
|
+
if (async) inner = dispose;
|
|
13
|
+
}
|
|
14
|
+
if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
|
|
15
|
+
if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };
|
|
16
|
+
env.stack.push({ value: value, dispose: dispose, async: async });
|
|
17
|
+
}
|
|
18
|
+
else if (async) {
|
|
19
|
+
env.stack.push({ async: true });
|
|
20
|
+
}
|
|
21
|
+
return value;
|
|
22
|
+
};
|
|
23
|
+
var __disposeResources = (this && this.__disposeResources) || (function (SuppressedError) {
|
|
24
|
+
return function (env) {
|
|
25
|
+
function fail(e) {
|
|
26
|
+
env.error = env.hasError ? new SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
|
|
27
|
+
env.hasError = true;
|
|
28
|
+
}
|
|
29
|
+
var r, s = 0;
|
|
30
|
+
function next() {
|
|
31
|
+
while (r = env.stack.pop()) {
|
|
32
|
+
try {
|
|
33
|
+
if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);
|
|
34
|
+
if (r.dispose) {
|
|
35
|
+
var result = r.dispose.call(r.value);
|
|
36
|
+
if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
|
|
37
|
+
}
|
|
38
|
+
else s |= 1;
|
|
39
|
+
}
|
|
40
|
+
catch (e) {
|
|
41
|
+
fail(e);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();
|
|
45
|
+
if (env.hasError) throw env.error;
|
|
46
|
+
}
|
|
47
|
+
return next();
|
|
48
|
+
};
|
|
49
|
+
})(typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
|
|
50
|
+
var e = new Error(message);
|
|
51
|
+
return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
|
|
52
|
+
});
|
|
1
53
|
import { FileSystem } from '../filesystem.js';
|
|
2
54
|
import { ErrnoError } from '../error.js';
|
|
3
55
|
import { Stats } from '../stats.js';
|
|
@@ -320,6 +372,82 @@ export class PassthroughFS extends FileSystem {
|
|
|
320
372
|
this.error(err, target);
|
|
321
373
|
}
|
|
322
374
|
}
|
|
375
|
+
async read(path, offset, length) {
|
|
376
|
+
try {
|
|
377
|
+
const env_1 = { stack: [], error: void 0, hasError: false };
|
|
378
|
+
try {
|
|
379
|
+
const handle = __addDisposableResource(env_1, await this.nodeFS.promises.open(this.path(path), 'r'), true);
|
|
380
|
+
const buffer = new Uint8Array(length);
|
|
381
|
+
await handle.read({ buffer, offset, length });
|
|
382
|
+
return buffer;
|
|
383
|
+
}
|
|
384
|
+
catch (e_1) {
|
|
385
|
+
env_1.error = e_1;
|
|
386
|
+
env_1.hasError = true;
|
|
387
|
+
}
|
|
388
|
+
finally {
|
|
389
|
+
const result_1 = __disposeResources(env_1);
|
|
390
|
+
if (result_1)
|
|
391
|
+
await result_1;
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
catch (err) {
|
|
395
|
+
this.error(err, path);
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
readSync(path, offset, length) {
|
|
399
|
+
let fd;
|
|
400
|
+
try {
|
|
401
|
+
fd = this.nodeFS.openSync(this.path(path), 'r');
|
|
402
|
+
const buffer = new Uint8Array(length);
|
|
403
|
+
this.nodeFS.readSync(fd, buffer, { offset, length });
|
|
404
|
+
return buffer;
|
|
405
|
+
}
|
|
406
|
+
catch (err) {
|
|
407
|
+
this.error(err, path);
|
|
408
|
+
}
|
|
409
|
+
finally {
|
|
410
|
+
if (fd)
|
|
411
|
+
this.nodeFS.closeSync(fd);
|
|
412
|
+
}
|
|
413
|
+
// unreachable
|
|
414
|
+
throw ErrnoError.With('EIO', path, 'read');
|
|
415
|
+
}
|
|
416
|
+
async write(path, buffer, offset) {
|
|
417
|
+
try {
|
|
418
|
+
const env_2 = { stack: [], error: void 0, hasError: false };
|
|
419
|
+
try {
|
|
420
|
+
const handle = __addDisposableResource(env_2, await this.nodeFS.promises.open(this.path(path), 'w'), true);
|
|
421
|
+
await handle.write(buffer, offset);
|
|
422
|
+
}
|
|
423
|
+
catch (e_2) {
|
|
424
|
+
env_2.error = e_2;
|
|
425
|
+
env_2.hasError = true;
|
|
426
|
+
}
|
|
427
|
+
finally {
|
|
428
|
+
const result_2 = __disposeResources(env_2);
|
|
429
|
+
if (result_2)
|
|
430
|
+
await result_2;
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
catch (err) {
|
|
434
|
+
this.error(err, path);
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
writeSync(path, buffer, offset) {
|
|
438
|
+
let fd;
|
|
439
|
+
try {
|
|
440
|
+
fd = this.nodeFS.openSync(this.path(path), 'w');
|
|
441
|
+
this.nodeFS.writeSync(fd, buffer, offset);
|
|
442
|
+
}
|
|
443
|
+
catch (err) {
|
|
444
|
+
this.error(err, path);
|
|
445
|
+
}
|
|
446
|
+
finally {
|
|
447
|
+
if (fd)
|
|
448
|
+
this.nodeFS.closeSync(fd);
|
|
449
|
+
}
|
|
450
|
+
}
|
|
323
451
|
}
|
|
324
452
|
const _Passthrough = {
|
|
325
453
|
name: 'Passthrough',
|