@wp-playground/storage 0.9.30 → 0.9.31
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +1 -1
- package/index.js +125 -95
- package/package.json +9 -5
package/index.cjs
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const h=require("@php-wasm/util"),
|
|
1
|
+
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const h=require("@php-wasm/util"),P=require("octokit");function q(e){return new P.Octokit({auth:e})}function A(e,t=""){t.length&&!t.endsWith("/")&&(t+="/");const r={};for(const a of e)a.path.startsWith(t)&&(r[a.path.substring(t.length)]=a.content);return r}async function w(e,t,r,a,n,s={}){s.progress||(s.progress={foundFiles:0,downloadedFiles:0});const{onProgress:o}=s,i=[],f=[],{data:l}=await e.rest.repos.getContent({owner:t,repo:r,path:n,ref:a});if(!Array.isArray(l))throw new Error(`Expected the list of files to be an array, but got ${typeof l}`);for(const c of l)c.type==="file"?(++s.progress.foundFiles,o==null||o(s.progress),i.push(C(e,t,r,a,c).then(F=>(++s.progress.downloadedFiles,o==null||o(s.progress),F)))):c.type==="dir"&&f.push(w(e,t,r,a,c.path,s));const u=await Promise.all(i),T=(await Promise.all(f)).flatMap(c=>c);return[...u,...T]}const D=new h.Semaphore({concurrency:15});async function C(e,t,r,a,n){const s=await D.acquire();try{const{data:o}=await e.rest.repos.getContent({owner:t,repo:r,ref:a,path:n.path});if(!("content"in o))throw new Error(`No content found for ${n.path}`);return{name:n.name,path:n.path,content:E(o.content)}}finally{s()}}function E(e){const t=window.atob(e),r=t.length,a=new Uint8Array(r);for(let n=0;n<r;n++)a[n]=t.charCodeAt(n);return a}async function O(e,t,r,a,n){var u;const{data:s}=await e.rest.pulls.get({owner:t,repo:r,pull_number:a}),i=(u=(await e.rest.actions.listWorkflowRuns({owner:t,repo:r,branch:s.head.ref,workflow_id:n})).data.workflow_runs[0])==null?void 0:u.id,f=await e.rest.actions.listWorkflowRunArtifacts({owner:t,repo:r,run_id:i});return(await e.rest.actions.downloadArtifact({owner:t,repo:r,artifact_id:f.data.artifacts[0].id,archive_format:"zip"})).data}async function S(e,t,r){var s;const{data:a,headers:n}=await e.request("GET /repos/{owner}/{repo}",{owner:t,repo:r});return!(!n["x-oauth-scopes"]||!((s=a.permissions)!=null&&s.push))}async function v(e,t,r,a,n){await e.request("GET /repos/{owner}/{repo}/branches/{branch}",{owner:t,repo:r,branch:a}).then(()=>!0,()=>!1)?await e.request("PATCH /repos/{owner}/{repo}/git/refs/{ref}",{owner:t,repo:r,sha:n,ref:`heads/${a}`}):await e.request("POST /repos/{owner}/{repo}/git/refs",{owner:t,repo:r,sha:n,ref:`refs/heads/${a}`})}async function H(e,t,r){const a=await e.request("GET /user");return(await e.request("GET /repos/{owner}/{repo}/forks",{owner:t,repo:r})).data.find(o=>o.owner&&o.owner.login===a.data.login)||await e.request("POST /repos/{owner}/{repo}/forks",{owner:t,repo:r}),a.data.login}async function M(e,t,r,a,n,s){const{data:{sha:o}}=await e.request("POST /repos/{owner}/{repo}/git/commits",{owner:t,repo:r,message:a,tree:s,parents:[n]});return o}async function k(e,t,r,a,n){const s=await p(e,t,r,a,n);if(s.length===0)return null;const{data:{sha:o}}=await e.request("POST /repos/{owner}/{repo}/git/trees",{owner:t,repo:r,base_tree:a,tree:s});return o}async function p(e,t,r,a,n){const s=[];for(const[o,i]of n.create)s.push(d(e,t,r,o,i));for(const[o,i]of n.update)s.push(d(e,t,r,o,i));for(const o of n.delete)s.push(g(e,t,r,a,o));return Promise.all(s).then(o=>o.filter(i=>!!i))}const y=new h.Semaphore({concurrency:10});async function d(e,t,r,a,n){const s=await y.acquire();try{if(ArrayBuffer.isView(n))try{const o=new TextDecoder("utf-8",{fatal:!0}).decode(n);return{path:a,content:o,mode:"100644"}}catch{const{data:{sha:i}}=await e.rest.git.createBlob({owner:t,repo:r,encoding:"base64",content:x(n)});return{path:a,sha:i,mode:"100644"}}else return{path:a,content:n,mode:"100644"}}finally{s()}}async function g(e,t,r,a,n){const s=await y.acquire();try{return await e.request("HEAD /repos/{owner}/{repo}/contents/:path",{owner:t,repo:r,ref:a,path:n}),{path:n,mode:"100644",sha:null}}catch{return}finally{s()}}function x(e){const t=[],r=e.byteLength;for(let a=0;a<r;a++)t.push(String.fromCharCode(e[a]));return window.btoa(t.join(""))}async function*B(e,t,{exceptPaths:r=[]}={}){if(t=h.normalizePath(t),!await e.isDir(t)){await e.fileExists(t)&&(yield{path:t,read:async()=>await e.readFileAsBuffer(t)});return}const a=[t];for(;a.length;){const n=a.pop();if(!n)return;const s=await e.listFiles(n);for(const o of s){const i=h.joinPaths(n,o);r.includes(i.substring(t.length+1))||(await e.isDir(i)?a.push(i):yield{path:i,read:async()=>await e.readFileAsBuffer(i)})}}}async function j(e,t){const r={create:new Map,update:new Map,delete:new Set},a=new Set;for await(const n of t){a.add(n.path);const s=e.get(n.path),o=await n.read();s?N(s,o)||r.update.set(n.path,o):r.create.set(n.path,o)}for(const n of e.keys())a.has(n)||r.delete.add(n);return r}function N(e,t){return e.length===t.length&&e.every((r,a)=>r===t[a])}async function m(e){return e.type==="local-fs"?e.handle:b(e.path)}async function b(e){const t=e.split("/").filter(a=>a.length>0);let r=await navigator.storage.getDirectory();for(const a of t)r=await r.getDirectoryHandle(a);return r}async function $(e){const r=await(await navigator.storage.getDirectory()).resolve(e);if(r===null)throw new DOMException("Unable to resolve path of OPFS directory handle.","NotFoundError");return"/"+r.join("/")}async function R(e){const t=await m(e);for await(const r of t.keys())await t.removeEntry(r,{recursive:!0})}exports.changeset=j;exports.clearContentsFromMountDevice=R;exports.createClient=q;exports.createCommit=M;exports.createOrUpdateBranch=v;exports.createTree=k;exports.createTreeNode=d;exports.createTreeNodes=p;exports.deleteFile=g;exports.directoryHandleFromMountDevice=m;exports.directoryHandleToOpfsPath=$;exports.filesListToObject=A;exports.fork=H;exports.getArtifact=O;exports.getFilesFromDirectory=w;exports.iterateFiles=B;exports.mayPush=S;exports.opfsPathToDirectoryHandle=b;
|
package/index.js
CHANGED
|
@@ -5,23 +5,23 @@ function B(t) {
|
|
|
5
5
|
auth: t
|
|
6
6
|
});
|
|
7
7
|
}
|
|
8
|
-
function
|
|
8
|
+
function H(t, e = "") {
|
|
9
9
|
e.length && !e.endsWith("/") && (e += "/");
|
|
10
|
-
const
|
|
11
|
-
for (const
|
|
12
|
-
|
|
13
|
-
return
|
|
10
|
+
const r = {};
|
|
11
|
+
for (const a of t)
|
|
12
|
+
a.path.startsWith(e) && (r[a.path.substring(e.length)] = a.content);
|
|
13
|
+
return r;
|
|
14
14
|
}
|
|
15
|
-
async function
|
|
15
|
+
async function F(t, e, r, a, n, s = {}) {
|
|
16
16
|
s.progress || (s.progress = {
|
|
17
17
|
foundFiles: 0,
|
|
18
18
|
downloadedFiles: 0
|
|
19
19
|
});
|
|
20
20
|
const { onProgress: i } = s, o = [], f = [], { data: u } = await t.rest.repos.getContent({
|
|
21
21
|
owner: e,
|
|
22
|
-
repo:
|
|
22
|
+
repo: r,
|
|
23
23
|
path: n,
|
|
24
|
-
ref:
|
|
24
|
+
ref: a
|
|
25
25
|
});
|
|
26
26
|
if (!Array.isArray(u))
|
|
27
27
|
throw new Error(
|
|
@@ -29,13 +29,13 @@ async function T(t, e, a, r, n, s = {}) {
|
|
|
29
29
|
);
|
|
30
30
|
for (const c of u)
|
|
31
31
|
c.type === "file" ? (++s.progress.foundFiles, i == null || i(s.progress), o.push(
|
|
32
|
-
|
|
32
|
+
P(t, e, r, a, c).then((y) => (++s.progress.downloadedFiles, i == null || i(s.progress), y))
|
|
33
33
|
)) : c.type === "dir" && f.push(
|
|
34
|
-
|
|
34
|
+
F(
|
|
35
35
|
t,
|
|
36
36
|
e,
|
|
37
|
-
a,
|
|
38
37
|
r,
|
|
38
|
+
a,
|
|
39
39
|
c.path,
|
|
40
40
|
s
|
|
41
41
|
)
|
|
@@ -45,14 +45,14 @@ async function T(t, e, a, r, n, s = {}) {
|
|
|
45
45
|
);
|
|
46
46
|
return [...l, ...p];
|
|
47
47
|
}
|
|
48
|
-
const
|
|
49
|
-
async function
|
|
50
|
-
const s = await
|
|
48
|
+
const T = new d({ concurrency: 15 });
|
|
49
|
+
async function P(t, e, r, a, n) {
|
|
50
|
+
const s = await T.acquire();
|
|
51
51
|
try {
|
|
52
52
|
const { data: i } = await t.rest.repos.getContent({
|
|
53
53
|
owner: e,
|
|
54
|
-
repo:
|
|
55
|
-
ref:
|
|
54
|
+
repo: r,
|
|
55
|
+
ref: a,
|
|
56
56
|
path: n.path
|
|
57
57
|
});
|
|
58
58
|
if (!("content" in i))
|
|
@@ -60,102 +60,102 @@ async function q(t, e, a, r, n) {
|
|
|
60
60
|
return {
|
|
61
61
|
name: n.name,
|
|
62
62
|
path: n.path,
|
|
63
|
-
content:
|
|
63
|
+
content: A(i.content)
|
|
64
64
|
};
|
|
65
65
|
} finally {
|
|
66
66
|
s();
|
|
67
67
|
}
|
|
68
68
|
}
|
|
69
|
-
function
|
|
70
|
-
const e = window.atob(t),
|
|
71
|
-
for (let n = 0; n <
|
|
72
|
-
|
|
73
|
-
return
|
|
69
|
+
function A(t) {
|
|
70
|
+
const e = window.atob(t), r = e.length, a = new Uint8Array(r);
|
|
71
|
+
for (let n = 0; n < r; n++)
|
|
72
|
+
a[n] = e.charCodeAt(n);
|
|
73
|
+
return a;
|
|
74
74
|
}
|
|
75
|
-
async function
|
|
75
|
+
async function M(t, e, r, a, n) {
|
|
76
76
|
var l;
|
|
77
77
|
const { data: s } = await t.rest.pulls.get({
|
|
78
78
|
owner: e,
|
|
79
|
-
repo:
|
|
80
|
-
pull_number:
|
|
79
|
+
repo: r,
|
|
80
|
+
pull_number: a
|
|
81
81
|
}), o = (l = (await t.rest.actions.listWorkflowRuns({
|
|
82
82
|
owner: e,
|
|
83
|
-
repo:
|
|
83
|
+
repo: r,
|
|
84
84
|
branch: s.head.ref,
|
|
85
85
|
workflow_id: n
|
|
86
86
|
})).data.workflow_runs[0]) == null ? void 0 : l.id, f = await t.rest.actions.listWorkflowRunArtifacts({
|
|
87
87
|
owner: e,
|
|
88
|
-
repo:
|
|
88
|
+
repo: r,
|
|
89
89
|
run_id: o
|
|
90
90
|
});
|
|
91
91
|
return (await t.rest.actions.downloadArtifact({
|
|
92
92
|
owner: e,
|
|
93
|
-
repo:
|
|
93
|
+
repo: r,
|
|
94
94
|
artifact_id: f.data.artifacts[0].id,
|
|
95
95
|
archive_format: "zip"
|
|
96
96
|
})).data;
|
|
97
97
|
}
|
|
98
|
-
async function
|
|
98
|
+
async function k(t, e, r) {
|
|
99
99
|
var s;
|
|
100
|
-
const { data:
|
|
100
|
+
const { data: a, headers: n } = await t.request(
|
|
101
101
|
"GET /repos/{owner}/{repo}",
|
|
102
102
|
{
|
|
103
103
|
owner: e,
|
|
104
|
-
repo:
|
|
104
|
+
repo: r
|
|
105
105
|
}
|
|
106
106
|
);
|
|
107
|
-
return !(!n["x-oauth-scopes"] || !((s =
|
|
107
|
+
return !(!n["x-oauth-scopes"] || !((s = a.permissions) != null && s.push));
|
|
108
108
|
}
|
|
109
|
-
async function
|
|
109
|
+
async function R(t, e, r, a, n) {
|
|
110
110
|
await t.request("GET /repos/{owner}/{repo}/branches/{branch}", {
|
|
111
111
|
owner: e,
|
|
112
|
-
repo:
|
|
113
|
-
branch:
|
|
112
|
+
repo: r,
|
|
113
|
+
branch: a
|
|
114
114
|
}).then(
|
|
115
115
|
() => !0,
|
|
116
116
|
() => !1
|
|
117
117
|
) ? await t.request("PATCH /repos/{owner}/{repo}/git/refs/{ref}", {
|
|
118
118
|
owner: e,
|
|
119
|
-
repo:
|
|
119
|
+
repo: r,
|
|
120
120
|
sha: n,
|
|
121
|
-
ref: `heads/${
|
|
121
|
+
ref: `heads/${a}`
|
|
122
122
|
}) : await t.request("POST /repos/{owner}/{repo}/git/refs", {
|
|
123
123
|
owner: e,
|
|
124
|
-
repo:
|
|
124
|
+
repo: r,
|
|
125
125
|
sha: n,
|
|
126
|
-
ref: `refs/heads/${
|
|
126
|
+
ref: `refs/heads/${a}`
|
|
127
127
|
});
|
|
128
128
|
}
|
|
129
|
-
async function
|
|
130
|
-
const
|
|
129
|
+
async function _(t, e, r) {
|
|
130
|
+
const a = await t.request("GET /user");
|
|
131
131
|
return (await t.request("GET /repos/{owner}/{repo}/forks", {
|
|
132
132
|
owner: e,
|
|
133
|
-
repo:
|
|
133
|
+
repo: r
|
|
134
134
|
})).data.find(
|
|
135
|
-
(i) => i.owner && i.owner.login ===
|
|
135
|
+
(i) => i.owner && i.owner.login === a.data.login
|
|
136
136
|
) || await t.request("POST /repos/{owner}/{repo}/forks", {
|
|
137
137
|
owner: e,
|
|
138
|
-
repo:
|
|
139
|
-
}),
|
|
138
|
+
repo: r
|
|
139
|
+
}), a.data.login;
|
|
140
140
|
}
|
|
141
|
-
async function
|
|
141
|
+
async function j(t, e, r, a, n, s) {
|
|
142
142
|
const {
|
|
143
143
|
data: { sha: i }
|
|
144
144
|
} = await t.request("POST /repos/{owner}/{repo}/git/commits", {
|
|
145
145
|
owner: e,
|
|
146
|
-
repo:
|
|
147
|
-
message:
|
|
146
|
+
repo: r,
|
|
147
|
+
message: a,
|
|
148
148
|
tree: s,
|
|
149
149
|
parents: [n]
|
|
150
150
|
});
|
|
151
151
|
return i;
|
|
152
152
|
}
|
|
153
|
-
async function
|
|
154
|
-
const s = await
|
|
153
|
+
async function G(t, e, r, a, n) {
|
|
154
|
+
const s = await q(
|
|
155
155
|
t,
|
|
156
156
|
e,
|
|
157
|
-
a,
|
|
158
157
|
r,
|
|
158
|
+
a,
|
|
159
159
|
n
|
|
160
160
|
);
|
|
161
161
|
if (s.length === 0)
|
|
@@ -164,26 +164,26 @@ async function $(t, e, a, r, n) {
|
|
|
164
164
|
data: { sha: i }
|
|
165
165
|
} = await t.request("POST /repos/{owner}/{repo}/git/trees", {
|
|
166
166
|
owner: e,
|
|
167
|
-
repo:
|
|
168
|
-
base_tree:
|
|
167
|
+
repo: r,
|
|
168
|
+
base_tree: a,
|
|
169
169
|
tree: s
|
|
170
170
|
});
|
|
171
171
|
return i;
|
|
172
172
|
}
|
|
173
|
-
async function
|
|
173
|
+
async function q(t, e, r, a, n) {
|
|
174
174
|
const s = [];
|
|
175
175
|
for (const [i, o] of n.create)
|
|
176
|
-
s.push(h(t, e,
|
|
176
|
+
s.push(h(t, e, r, i, o));
|
|
177
177
|
for (const [i, o] of n.update)
|
|
178
|
-
s.push(h(t, e,
|
|
178
|
+
s.push(h(t, e, r, i, o));
|
|
179
179
|
for (const i of n.delete)
|
|
180
|
-
s.push(E(t, e,
|
|
180
|
+
s.push(E(t, e, r, a, i));
|
|
181
181
|
return Promise.all(s).then(
|
|
182
182
|
(i) => i.filter((o) => !!o)
|
|
183
183
|
);
|
|
184
184
|
}
|
|
185
185
|
const w = new d({ concurrency: 10 });
|
|
186
|
-
async function h(t, e,
|
|
186
|
+
async function h(t, e, r, a, n) {
|
|
187
187
|
const s = await w.acquire();
|
|
188
188
|
try {
|
|
189
189
|
if (ArrayBuffer.isView(n))
|
|
@@ -192,7 +192,7 @@ async function h(t, e, a, r, n) {
|
|
|
192
192
|
fatal: !0
|
|
193
193
|
}).decode(n);
|
|
194
194
|
return {
|
|
195
|
-
path:
|
|
195
|
+
path: a,
|
|
196
196
|
content: i,
|
|
197
197
|
mode: "100644"
|
|
198
198
|
};
|
|
@@ -201,19 +201,19 @@ async function h(t, e, a, r, n) {
|
|
|
201
201
|
data: { sha: o }
|
|
202
202
|
} = await t.rest.git.createBlob({
|
|
203
203
|
owner: e,
|
|
204
|
-
repo:
|
|
204
|
+
repo: r,
|
|
205
205
|
encoding: "base64",
|
|
206
|
-
content:
|
|
206
|
+
content: D(n)
|
|
207
207
|
});
|
|
208
208
|
return {
|
|
209
|
-
path:
|
|
209
|
+
path: a,
|
|
210
210
|
sha: o,
|
|
211
211
|
mode: "100644"
|
|
212
212
|
};
|
|
213
213
|
}
|
|
214
214
|
else
|
|
215
215
|
return {
|
|
216
|
-
path:
|
|
216
|
+
path: a,
|
|
217
217
|
content: n,
|
|
218
218
|
mode: "100644"
|
|
219
219
|
};
|
|
@@ -221,13 +221,13 @@ async function h(t, e, a, r, n) {
|
|
|
221
221
|
s();
|
|
222
222
|
}
|
|
223
223
|
}
|
|
224
|
-
async function E(t, e,
|
|
224
|
+
async function E(t, e, r, a, n) {
|
|
225
225
|
const s = await w.acquire();
|
|
226
226
|
try {
|
|
227
227
|
return await t.request("HEAD /repos/{owner}/{repo}/contents/:path", {
|
|
228
228
|
owner: e,
|
|
229
|
-
repo:
|
|
230
|
-
ref:
|
|
229
|
+
repo: r,
|
|
230
|
+
ref: a,
|
|
231
231
|
path: n
|
|
232
232
|
}), {
|
|
233
233
|
path: n,
|
|
@@ -240,13 +240,13 @@ async function E(t, e, a, r, n) {
|
|
|
240
240
|
s();
|
|
241
241
|
}
|
|
242
242
|
}
|
|
243
|
-
function
|
|
244
|
-
const e = [],
|
|
245
|
-
for (let
|
|
246
|
-
e.push(String.fromCharCode(t[
|
|
243
|
+
function D(t) {
|
|
244
|
+
const e = [], r = t.byteLength;
|
|
245
|
+
for (let a = 0; a < r; a++)
|
|
246
|
+
e.push(String.fromCharCode(t[a]));
|
|
247
247
|
return window.btoa(e.join(""));
|
|
248
248
|
}
|
|
249
|
-
async function*
|
|
249
|
+
async function* N(t, e, { exceptPaths: r = [] } = {}) {
|
|
250
250
|
if (e = g(e), !await t.isDir(e)) {
|
|
251
251
|
await t.fileExists(e) && (yield {
|
|
252
252
|
path: e,
|
|
@@ -254,52 +254,82 @@ async function* j(t, e, { exceptPaths: a = [] } = {}) {
|
|
|
254
254
|
});
|
|
255
255
|
return;
|
|
256
256
|
}
|
|
257
|
-
const
|
|
258
|
-
for (;
|
|
259
|
-
const n =
|
|
257
|
+
const a = [e];
|
|
258
|
+
for (; a.length; ) {
|
|
259
|
+
const n = a.pop();
|
|
260
260
|
if (!n)
|
|
261
261
|
return;
|
|
262
262
|
const s = await t.listFiles(n);
|
|
263
263
|
for (const i of s) {
|
|
264
264
|
const o = m(n, i);
|
|
265
|
-
|
|
265
|
+
r.includes(o.substring(e.length + 1)) || (await t.isDir(o) ? a.push(o) : yield {
|
|
266
266
|
path: o,
|
|
267
267
|
read: async () => await t.readFileAsBuffer(o)
|
|
268
268
|
});
|
|
269
269
|
}
|
|
270
270
|
}
|
|
271
271
|
}
|
|
272
|
-
async function
|
|
273
|
-
const
|
|
272
|
+
async function U(t, e) {
|
|
273
|
+
const r = {
|
|
274
274
|
create: /* @__PURE__ */ new Map(),
|
|
275
275
|
update: /* @__PURE__ */ new Map(),
|
|
276
276
|
delete: /* @__PURE__ */ new Set()
|
|
277
|
-
},
|
|
277
|
+
}, a = /* @__PURE__ */ new Set();
|
|
278
278
|
for await (const n of e) {
|
|
279
|
-
|
|
279
|
+
a.add(n.path);
|
|
280
280
|
const s = t.get(n.path), i = await n.read();
|
|
281
|
-
s ?
|
|
281
|
+
s ? C(s, i) || r.update.set(n.path, i) : r.create.set(n.path, i);
|
|
282
282
|
}
|
|
283
283
|
for (const n of t.keys())
|
|
284
|
-
|
|
285
|
-
return
|
|
284
|
+
a.has(n) || r.delete.add(n);
|
|
285
|
+
return r;
|
|
286
|
+
}
|
|
287
|
+
function C(t, e) {
|
|
288
|
+
return t.length === e.length && t.every((r, a) => r === e[a]);
|
|
289
|
+
}
|
|
290
|
+
async function S(t) {
|
|
291
|
+
return t.type === "local-fs" ? t.handle : O(t.path);
|
|
292
|
+
}
|
|
293
|
+
async function O(t) {
|
|
294
|
+
const e = t.split("/").filter((a) => a.length > 0);
|
|
295
|
+
let r = await navigator.storage.getDirectory();
|
|
296
|
+
for (const a of e)
|
|
297
|
+
r = await r.getDirectoryHandle(a);
|
|
298
|
+
return r;
|
|
299
|
+
}
|
|
300
|
+
async function W(t) {
|
|
301
|
+
const r = await (await navigator.storage.getDirectory()).resolve(t);
|
|
302
|
+
if (r === null)
|
|
303
|
+
throw new DOMException(
|
|
304
|
+
"Unable to resolve path of OPFS directory handle.",
|
|
305
|
+
"NotFoundError"
|
|
306
|
+
);
|
|
307
|
+
return "/" + r.join("/");
|
|
286
308
|
}
|
|
287
|
-
function
|
|
288
|
-
|
|
309
|
+
async function $(t) {
|
|
310
|
+
const e = await S(t);
|
|
311
|
+
for await (const r of e.keys())
|
|
312
|
+
await e.removeEntry(r, {
|
|
313
|
+
recursive: !0
|
|
314
|
+
});
|
|
289
315
|
}
|
|
290
316
|
export {
|
|
291
|
-
|
|
317
|
+
U as changeset,
|
|
318
|
+
$ as clearContentsFromMountDevice,
|
|
292
319
|
B as createClient,
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
320
|
+
j as createCommit,
|
|
321
|
+
R as createOrUpdateBranch,
|
|
322
|
+
G as createTree,
|
|
296
323
|
h as createTreeNode,
|
|
297
|
-
|
|
324
|
+
q as createTreeNodes,
|
|
298
325
|
E as deleteFile,
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
326
|
+
S as directoryHandleFromMountDevice,
|
|
327
|
+
W as directoryHandleToOpfsPath,
|
|
328
|
+
H as filesListToObject,
|
|
329
|
+
_ as fork,
|
|
330
|
+
M as getArtifact,
|
|
331
|
+
F as getFilesFromDirectory,
|
|
332
|
+
N as iterateFiles,
|
|
333
|
+
k as mayPush,
|
|
334
|
+
O as opfsPathToDirectoryHandle
|
|
305
335
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@wp-playground/storage",
|
|
3
|
-
"version": "0.9.
|
|
3
|
+
"version": "0.9.31",
|
|
4
4
|
"description": "Bindings for storing WordPress Playground on different backends.",
|
|
5
5
|
"repository": {
|
|
6
6
|
"type": "git",
|
|
@@ -34,14 +34,18 @@
|
|
|
34
34
|
},
|
|
35
35
|
"license": "GPL-2.0-or-later",
|
|
36
36
|
"type": "module",
|
|
37
|
-
"gitHead": "
|
|
37
|
+
"gitHead": "296845449383439f6304f0559eaacf14cef68989",
|
|
38
38
|
"dependencies": {
|
|
39
39
|
"comlink": "^4.4.1",
|
|
40
|
+
"express": "4.19.2",
|
|
40
41
|
"ini": "4.1.2",
|
|
41
42
|
"octokit": "3.1.1",
|
|
42
|
-
"
|
|
43
|
-
"
|
|
44
|
-
"@php-wasm/
|
|
43
|
+
"ws": "8.18.0",
|
|
44
|
+
"yargs": "17.7.2",
|
|
45
|
+
"@php-wasm/web": "0.9.31",
|
|
46
|
+
"@php-wasm/universal": "0.9.31",
|
|
47
|
+
"@php-wasm/util": "0.9.31",
|
|
48
|
+
"@php-wasm/node-polyfills": "0.9.31"
|
|
45
49
|
},
|
|
46
50
|
"main": "index.js"
|
|
47
51
|
}
|