sliftutils 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/.cursorrules +161 -0
  2. package/.eslintrc.js +38 -0
  3. package/.vscode/settings.json +39 -0
  4. package/bundler/buffer.js +2370 -0
  5. package/bundler/bundleEntry.ts +32 -0
  6. package/bundler/bundleEntryCaller.ts +8 -0
  7. package/bundler/bundleRequire.ts +244 -0
  8. package/bundler/bundleWrapper.ts +115 -0
  9. package/bundler/bundler.ts +72 -0
  10. package/bundler/flattenSourceMaps.ts +0 -0
  11. package/bundler/sourceMaps.ts +261 -0
  12. package/misc/environment.ts +11 -0
  13. package/misc/types.ts +3 -0
  14. package/misc/zip.ts +37 -0
  15. package/package.json +24 -0
  16. package/spec.txt +33 -0
  17. package/storage/CachedStorage.ts +32 -0
  18. package/storage/DelayedStorage.ts +30 -0
  19. package/storage/DiskCollection.ts +272 -0
  20. package/storage/FileFolderAPI.tsx +427 -0
  21. package/storage/IStorage.ts +40 -0
  22. package/storage/IndexedDBFileFolderAPI.ts +170 -0
  23. package/storage/JSONStorage.ts +35 -0
  24. package/storage/PendingManager.tsx +63 -0
  25. package/storage/PendingStorage.ts +47 -0
  26. package/storage/PrivateFileSystemStorage.ts +192 -0
  27. package/storage/StorageObservable.ts +122 -0
  28. package/storage/TransactionStorage.ts +485 -0
  29. package/storage/fileSystemPointer.ts +81 -0
  30. package/storage/storage.d.ts +41 -0
  31. package/tsconfig.json +31 -0
  32. package/web/DropdownCustom.tsx +150 -0
  33. package/web/FullscreenModal.tsx +75 -0
  34. package/web/GenericFormat.tsx +186 -0
  35. package/web/Input.tsx +350 -0
  36. package/web/InputLabel.tsx +288 -0
  37. package/web/InputPicker.tsx +158 -0
  38. package/web/LocalStorageParam.ts +56 -0
  39. package/web/SyncedController.ts +405 -0
  40. package/web/SyncedLoadingIndicator.tsx +37 -0
  41. package/web/Table.tsx +188 -0
  42. package/web/URLParam.ts +84 -0
  43. package/web/asyncObservable.ts +40 -0
  44. package/web/colors.tsx +14 -0
  45. package/web/mobxTyped.ts +29 -0
  46. package/web/modal.tsx +18 -0
  47. package/web/observer.tsx +35 -0
@@ -0,0 +1,261 @@
1
+
2
+ export type SourceMap = {
3
+ version: number;
4
+ file: string;
5
+ sourceRoot: "",
6
+ sources: string[];
7
+ sourcesContent: string[];
8
+ names: never[];
9
+ mappings: string;
10
+ };
11
+ export type SourceMapping = {
12
+ generatedLine: number;
13
+ generatedColumn: number;
14
+ sourceIndex: number;
15
+ originalLine: number;
16
+ originalColumn: number;
17
+ };
18
+ export type InProgressSourceMap = {
19
+ sources: {
20
+ name: string;
21
+ contents: string;
22
+ }[];
23
+ mappings: SourceMapping[];
24
+ };
25
+
26
+ export function removeSourceMap(content: string): {
27
+ sourceMap: SourceMap | undefined;
28
+ code: string;
29
+ } {
30
+ // Remove any url mappings (so NOT data ones)
31
+ content = content.replace(/\/\/# sourceMappingURL=(?!data:)[^\s]+$/m, "// removed url sourcemap");
32
+
33
+ const sourceMapRegex = /\/\/# sourceMappingURL=data:application\/json;base64,([^\s]+)$/m;
34
+ const match = content.match(sourceMapRegex);
35
+
36
+ if (!match) {
37
+ return { sourceMap: undefined, code: content };
38
+ }
39
+
40
+ let sourceMapJson = Buffer.from(match[1], "base64").toString();
41
+ // HACK: If the sourcemap is invalid, try to remove trailing characters. For some reason we sometimes have
42
+ // extra characters at the end? Also try to add some characters too?
43
+ function isJSON(str: string): boolean {
44
+ try {
45
+ JSON.parse(str);
46
+ return true;
47
+ } catch {
48
+ return false;
49
+ }
50
+ }
51
+ for (let i = 0; i < 3; i++) {
52
+ if (isJSON(sourceMapJson)) {
53
+ break;
54
+ }
55
+ sourceMapJson = sourceMapJson.slice(0, -1);
56
+ }
57
+ if (!isJSON(sourceMapJson)) {
58
+ if (isJSON(sourceMapJson + "]}")) {
59
+ sourceMapJson = sourceMapJson + "]}";
60
+ }
61
+ }
62
+
63
+ try {
64
+ const sourceMap = JSON.parse(sourceMapJson) as SourceMap;
65
+
66
+ // Remove the sourcemap line but keep the code
67
+ content = content.replace(sourceMapRegex, "// merged inline sourcemap");
68
+
69
+ return { sourceMap, code: content };
70
+ } catch {
71
+ console.log(`Invalid source map: ${sourceMapJson}`);
72
+ return { sourceMap: undefined, code: content };
73
+ }
74
+ }
75
+ function decodeMappings(mappings: string): SourceMapping[] {
76
+ const vlqTable = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
77
+ const vlqDecode = new Map(Array.from(vlqTable).map((c, i) => [c, i]));
78
+
79
+ function decodeVLQ(str: string, pos: { value: number }): number {
80
+ let result = 0;
81
+ let shift = 0;
82
+ let continuation: boolean;
83
+
84
+ do {
85
+ const c = str[pos.value++];
86
+ const digit = vlqDecode.get(c);
87
+ if (digit === undefined) {
88
+ throw new Error(`Invalid VLQ character: ${JSON.stringify(c)}`);
89
+ }
90
+ continuation = (digit & 32) > 0;
91
+ const value = digit & 31;
92
+ result += value << shift;
93
+ shift += 5;
94
+ } while (continuation);
95
+
96
+ const shouldNegate = result & 1;
97
+ result >>>= 1;
98
+ return shouldNegate ? -result : result;
99
+ }
100
+
101
+ const result: SourceMapping[] = [];
102
+ let generatedLine = 1;
103
+ let generatedColumn = 0;
104
+ let sourceIndex = 0;
105
+ let originalLine = 1;
106
+ let originalColumn = 0;
107
+
108
+ const segments = mappings.split(";");
109
+ for (let i = 0; i < segments.length; i++) {
110
+ const line = segments[i];
111
+ if (!line) {
112
+ generatedLine++;
113
+ continue;
114
+ }
115
+
116
+ generatedColumn = 0;
117
+ const fields = line.split(",");
118
+
119
+ for (const field of fields) {
120
+ if (!field) continue;
121
+
122
+ const pos = { value: 0 };
123
+ const segmentData = [];
124
+
125
+ while (pos.value < field.length) {
126
+ segmentData.push(decodeVLQ(field, pos));
127
+ }
128
+
129
+ if (segmentData.length < 4) continue;
130
+
131
+ generatedColumn += segmentData[0];
132
+ sourceIndex += segmentData[1];
133
+ originalLine += segmentData[2];
134
+ originalColumn += segmentData[3];
135
+
136
+ result.push({
137
+ generatedLine,
138
+ generatedColumn,
139
+ sourceIndex,
140
+ originalLine,
141
+ originalColumn,
142
+ });
143
+ }
144
+ generatedLine++;
145
+ }
146
+
147
+ return result;
148
+ }
149
+ export function getInProgressSourceMap(sourceMap: SourceMap): InProgressSourceMap {
150
+ const sources = sourceMap.sources.map((name, i) => ({
151
+ name,
152
+ contents: sourceMap.sourcesContent[i] || "",
153
+ }));
154
+
155
+ const mappings = decodeMappings(sourceMap.mappings);
156
+
157
+ return {
158
+ sources,
159
+ mappings,
160
+ };
161
+ }
162
+
163
+ export function addToInProgressSourceMap(inProgress: InProgressSourceMap, newMappings: InProgressSourceMap) {
164
+ const sourceIndexOffset = inProgress.sources.length;
165
+
166
+ // Add new sources
167
+ inProgress.sources.push(...newMappings.sources);
168
+
169
+ // Add mappings with adjusted source indices
170
+ for (const mapping of newMappings.mappings) {
171
+ inProgress.mappings.push({
172
+ ...mapping,
173
+ sourceIndex: mapping.sourceIndex + sourceIndexOffset,
174
+ });
175
+ }
176
+ }
177
+
178
+ export function finalizeInProgressSourceMap(inProgress: InProgressSourceMap): SourceMap {
179
+ const vlqTable = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
180
+
181
+ function encodeVLQ(value: number): string {
182
+ // Convert to zigzag encoding
183
+ value = value < 0 ? (-value << 1) | 1 : value << 1;
184
+
185
+ let result = "";
186
+ do {
187
+ let digit = value & 31;
188
+ value >>>= 5;
189
+ if (value > 0) {
190
+ digit |= 32;
191
+ }
192
+ result += vlqTable[digit];
193
+ } while (value > 0);
194
+
195
+ return result;
196
+ }
197
+
198
+ // Sort mappings by generated position
199
+ const sortedMappings = [...inProgress.mappings].sort((a, b) => {
200
+ if (a.generatedLine !== b.generatedLine) {
201
+ return a.generatedLine - b.generatedLine;
202
+ }
203
+ return a.generatedColumn - b.generatedColumn;
204
+ });
205
+
206
+ // Generate the mappings string
207
+ let prevGenLine = 1;
208
+ let prevGenColumn = 0;
209
+ let prevSourceIndex = 0;
210
+ let prevOrigLine = 1;
211
+ let prevOrigColumn = 0;
212
+
213
+ const lines: string[] = [];
214
+ let currentLine: string[] = [];
215
+
216
+ for (const mapping of sortedMappings) {
217
+ if (mapping.generatedLine > prevGenLine) {
218
+ lines.push(currentLine.join(","));
219
+ for (let i = prevGenLine + 1; i < mapping.generatedLine; i++) {
220
+ lines.push("");
221
+ }
222
+ currentLine = [];
223
+ prevGenColumn = 0;
224
+ }
225
+
226
+ const segment = [
227
+ encodeVLQ(mapping.generatedColumn - prevGenColumn),
228
+ encodeVLQ(mapping.sourceIndex - prevSourceIndex),
229
+ encodeVLQ(mapping.originalLine - prevOrigLine),
230
+ encodeVLQ(mapping.originalColumn - prevOrigColumn),
231
+ ];
232
+
233
+ currentLine.push(segment.join(""));
234
+
235
+ prevGenLine = mapping.generatedLine;
236
+ prevGenColumn = mapping.generatedColumn;
237
+ prevSourceIndex = mapping.sourceIndex;
238
+ prevOrigLine = mapping.originalLine;
239
+ prevOrigColumn = mapping.originalColumn;
240
+ }
241
+
242
+ if (currentLine.length > 0) {
243
+ lines.push(currentLine.join(","));
244
+ }
245
+
246
+ return {
247
+ version: 3,
248
+ file: "",
249
+ sourceRoot: "",
250
+ sources: inProgress.sources.map(s => s.name),
251
+ sourcesContent: inProgress.sources.map(s => s.contents),
252
+ names: [],
253
+ mappings: lines.join(";"),
254
+ };
255
+ }
256
+ export function encodeSourceMapLineComment(sourceMap: SourceMap): string {
257
+ const sourceMapJson = JSON.stringify(sourceMap);
258
+ const base64 = Buffer.from(sourceMapJson).toString("base64");
259
+ // NOTE: Don't write it as one string, as then we are detected as a sourcemap, and break sourcemaps...
260
+ return "//" + `# sourceMappingURL=data:application/json;base64,${base64}`;
261
+ }
@@ -0,0 +1,11 @@
1
+ /// <reference path="../node_modules/@types/chrome/index.d.ts" />
2
+ export function isInChromeExtension() {
3
+ return typeof chrome !== "undefined" && chrome.runtime && chrome.runtime.id;
4
+ }
5
+ let isInBuildFlag = false;
6
+ export function triggerIsInBuild() {
7
+ isInBuildFlag = true;
8
+ }
9
+ export function isInBuild() {
10
+ return isInBuildFlag;
11
+ }
package/misc/types.ts ADDED
@@ -0,0 +1,3 @@
1
+ export function isDefined<T>(value: T | undefined | null): value is T {
2
+ return value !== undefined && value !== null;
3
+ }
package/misc/zip.ts ADDED
@@ -0,0 +1,37 @@
1
+
2
+
3
+ export class Zip {
4
+ public static async gzip(buffer: Buffer, level?: number): Promise<Buffer> {
5
+ return await doStream(new CompressionStream("gzip"), buffer);
6
+ }
7
+ public static async gunzip(buffer: Buffer): Promise<Buffer> {
8
+ return await doStream(new DecompressionStream("gzip"), buffer);
9
+ }
10
+
11
+ public static async gunzipBatch(buffers: Buffer[]): Promise<Buffer[]> {
12
+ let time = Date.now();
13
+ buffers = await Promise.all(buffers.map(Zip.gunzip));
14
+ time = Date.now() - time;
15
+ //let totalSize = buffers.reduce((acc, buffer) => acc + buffer.length, 0);
16
+ //console.log(`Gunzip ${formatNumber(totalSize)}B at ${formatNumber(totalSize / time * 1000)}B/s`);
17
+ return buffers;
18
+ }
19
+ }
20
+
21
+ async function doStream(stream: GenericTransformStream, buffer: Buffer): Promise<Buffer> {
22
+ let reader = stream.readable.getReader();
23
+ let writer = stream.writable.getWriter();
24
+ let writePromise = writer.write(buffer);
25
+ let closePromise = writer.close();
26
+
27
+ let outputBuffers: Buffer[] = [];
28
+ while (true) {
29
+ let { value, done } = await reader.read();
30
+ if (done) {
31
+ await writePromise;
32
+ await closePromise;
33
+ return Buffer.concat(outputBuffers);
34
+ }
35
+ outputBuffers.push(Buffer.from(value));
36
+ }
37
+ }
package/package.json ADDED
@@ -0,0 +1,24 @@
1
+ {
2
+ "name": "sliftutils",
3
+ "version": "0.1.1",
4
+ "main": "index.js",
5
+ "license": "MIT",
6
+ "scripts": {
7
+ "type": "yarn tsc --noEmit"
8
+ },
9
+ "dependencies": {
10
+ "js-sha256": "^0.11.1",
11
+ "typesafecss": "^0.26.0"
12
+ },
13
+ "devDependencies": {
14
+ "@types/chrome": "^0.0.237",
15
+ "debugbreak": "^0.9.9",
16
+ "typedev": "^0.1.1"
17
+ },
18
+ "peerDependencies": {
19
+ "mobx": "^6.13.3",
20
+ "preact": "10.24.3",
21
+ "socket-function": "^0.155.0",
22
+ "typenode": "^5.12.0"
23
+ }
24
+ }
package/spec.txt ADDED
@@ -0,0 +1,33 @@
1
+ TODO:
2
+ 4) New project with:
3
+
4
+ watch helper script in bin
5
+ - And have it automatically ignore .gitignored files
6
+ - And have it also watch .watchignore
7
+
8
+ - HELPER bin to setup the project with:
9
+ (Only if the files don't exist)
10
+ tsconfig.json
11
+ .gitignore
12
+ .vscode (for format on save)
13
+ .eslintrc.js
14
+ .cursorrules
15
+ Add dependencies to package.json
16
+ (Any dependencies our helper project has should be dev dependencies)
17
+ - typenode
18
+ - socket-function
19
+ - typesafecss
20
+ - typedev
21
+ index.ts and index.html
22
+ Add build and type commands to package.json
23
+
24
+ EXPOSE our helpers in our main export, in an index.ts file?
25
+ - Will this let us just import them? Hmm...
26
+ 5) Test it in a new test repo
27
+ - Test it with:
28
+ single html browser site
29
+ nodejs bundled running
30
+ electron
31
+ chrome extension
32
+ - Make sure sourcemaps are preserved and work
33
+ 5.1) Use the new project and bundler in voice-cloner
@@ -0,0 +1,32 @@
1
+ import { DelayedStorage } from "./DelayedStorage";
2
+ import { getFileStorageNested } from "./FileFolderAPI";
3
+ import { IStorageSync } from "./IStorage";
4
+ import { JSONStorage } from "./JSONStorage";
5
+ import { PendingStorage } from "./PendingStorage";
6
+ import { StorageSync } from "./StorageObservable";
7
+
8
+ export function newCachedStrStorage<T>(
9
+ folder: string,
10
+ getValue: (key: string) => Promise<T>
11
+ ) {
12
+ let base = new PendingStorage(`CachedStrStorage`,
13
+ new DelayedStorage(getFileStorageNested(folder))
14
+ );
15
+ let storage = new StorageSync(new JSONStorage<T>(base));
16
+ let pending = new Set<string>();
17
+ let baseStorageGet = storage.get;
18
+ storage.get = (key: string) => {
19
+ if (!pending.has(key)) {
20
+ pending.add(key);
21
+
22
+ (async () => {
23
+ let existingValue = await storage.getPromise(key);
24
+ if (existingValue) return;
25
+ let value = await getValue(key);
26
+ storage.set(key, value);
27
+ })().catch(console.error);
28
+ }
29
+ return baseStorageGet.call(storage, key);
30
+ };
31
+ return storage;
32
+ }
@@ -0,0 +1,30 @@
1
+ import { IStorage } from "./IStorage";
2
+
3
+ export class DelayedStorage<T> implements IStorage<T> {
4
+ constructor(private storage: Promise<IStorage<T>>) { }
5
+ public async get(key: string): Promise<T | undefined> {
6
+ const storage = await this.storage;
7
+ return storage.get(key);
8
+ }
9
+ public async set(key: string, value: T): Promise<void> {
10
+ const storage = await this.storage;
11
+ return storage.set(key, value);
12
+ }
13
+ public async remove(key: string): Promise<void> {
14
+ const storage = await this.storage;
15
+ return storage.remove(key);
16
+ }
17
+ public async getKeys(): Promise<string[]> {
18
+ const storage = await this.storage;
19
+ return storage.getKeys();
20
+ }
21
+ public async getInfo(key: string) {
22
+ const storage = await this.storage;
23
+ return storage.getInfo(key);
24
+ }
25
+
26
+ public async reset() {
27
+ const storage = await this.storage;
28
+ return storage.reset();
29
+ }
30
+ }