orgnote-api 0.41.50 → 0.42.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -33,6 +33,8 @@ export interface CommonEmbeddedWidget {
33
33
  export interface MultilineEmbeddedWidget extends CommonEmbeddedWidget {
34
34
  widgetBuilder?: WidgetBuilder;
35
35
  component?: Component;
36
+ actionsComponent?: Component;
37
+ actionsComponentProps?: Record<string, unknown>;
36
38
  suppressEdit?: boolean;
37
39
  }
38
40
  export type MultilineEmbeddedWidgets = {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "orgnote-api",
3
- "version": "0.41.50",
3
+ "version": "0.42.0",
4
4
  "description": "Official API for creating extensions for OrgNote app",
5
5
  "type": "module",
6
6
  "main": "./index.js",
@@ -48,6 +48,7 @@
48
48
  "homepage": "https://github.com/artawower/orgnote-api#readme",
49
49
  "dependencies": {
50
50
  "axios": "1.8.4",
51
+ "diff-match-patch": "^1.0.5",
51
52
  "neverthrow": "^8.2.0",
52
53
  "openpgp": "6.3.0",
53
54
  "org-mode-ast": "0.11.8",
@@ -58,6 +59,7 @@
58
59
  "@eslint/js": "9.25.1",
59
60
  "@openapitools/openapi-generator-cli": "2.19.1",
60
61
  "@rollup/rollup-linux-arm64-gnu": "4.14.2",
62
+ "@types/diff-match-patch": "^1.0.36",
61
63
  "@types/node": "22.14.1",
62
64
  "eslint": "9.25.1",
63
65
  "eslint-plugin-sonarjs": "3.0.5",
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,27 @@
1
+ import { expect, test } from 'vitest';
2
+ import { isMergeableFile } from "../types.js";
3
+ test('accepts .org files within size limit', () => {
4
+ expect(isMergeableFile('notes/todo.org', 1024)).toBe(true);
5
+ });
6
+ test('accepts .md files within size limit', () => {
7
+ expect(isMergeableFile('readme.md', 2048)).toBe(true);
8
+ });
9
+ test('rejects non-text extensions', () => {
10
+ expect(isMergeableFile('photo.png', 1024)).toBe(false);
11
+ expect(isMergeableFile('data.json', 500)).toBe(false);
12
+ expect(isMergeableFile('archive.zip', 100)).toBe(false);
13
+ });
14
+ test('rejects files exceeding 512KB', () => {
15
+ const overLimit = 512 * 1024 + 1;
16
+ expect(isMergeableFile('big.org', overLimit)).toBe(false);
17
+ });
18
+ test('accepts files exactly at 512KB boundary', () => {
19
+ const exactLimit = 512 * 1024;
20
+ expect(isMergeableFile('exact.org', exactLimit)).toBe(true);
21
+ });
22
+ test('handles files without extension', () => {
23
+ expect(isMergeableFile('README', 100)).toBe(false);
24
+ });
25
+ test('handles dotfiles', () => {
26
+ expect(isMergeableFile('.gitignore', 100)).toBe(false);
27
+ });
package/sync/index.d.ts CHANGED
@@ -7,5 +7,6 @@ export { recoverState } from './recovery.js';
7
7
  export { getOldestSyncedAt } from './utils/oldest-synced-at.js';
8
8
  export { hashContent, hashBytes } from './utils/content-hash.js';
9
9
  export { processUpload, processDownload, processDeleteLocal, processDeleteRemote, handleConflict, generateConflictPath, hasConflict, } from './operations/index.js';
10
- export { SyncOperationType } from './types.js';
11
- export type { SyncState, SyncStateData, SyncedFile, SyncStatus, LocalFile, RemoteFile, UploadResult, SyncPlan, SyncTask, SyncExecutor, SyncContext, CreateSyncPlanParams, } from './types.js';
10
+ export { mergeText } from './merge/index.js';
11
+ export { SyncOperationType, MergeOutcome, isMergeableFile } from './types.js';
12
+ export type { SyncState, SyncStateData, SyncedFile, SyncStatus, LocalFile, RemoteFile, UploadResult, SyncPlan, SyncTask, SyncExecutor, SyncContext, CreateSyncPlanParams, MergeResult, MergeInputs, BaseContentEntry, BaseContentStore, } from './types.js';
package/sync/index.js CHANGED
@@ -7,4 +7,5 @@ export { recoverState } from "./recovery.js";
7
7
  export { getOldestSyncedAt } from "./utils/oldest-synced-at.js";
8
8
  export { hashContent, hashBytes } from "./utils/content-hash.js";
9
9
  export { processUpload, processDownload, processDeleteLocal, processDeleteRemote, handleConflict, generateConflictPath, hasConflict, } from "./operations/index.js";
10
- export { SyncOperationType } from "./types.js";
10
+ export { mergeText } from "./merge/index.js";
11
+ export { SyncOperationType, MergeOutcome, isMergeableFile } from "./types.js";
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,85 @@
1
+ import { expect, test } from 'vitest';
2
+ import { textToUint8Array } from "../../../utils/binary.js";
3
+ import { mergeText } from "../text-merge.js";
4
+ import { MergeOutcome } from "../../types.js";
5
+ const enc = (s) => textToUint8Array(s);
6
+ test('returns remote when base is empty and local is empty', () => {
7
+ const result = mergeText({
8
+ base: new Uint8Array(0),
9
+ local: new Uint8Array(0),
10
+ remote: enc('remote text'),
11
+ });
12
+ expect(result.outcome).toBe(MergeOutcome.Merged);
13
+ expect(new TextDecoder().decode(result.mergedContent)).toBe('remote text');
14
+ });
15
+ test('returns local when base is empty and remote is empty', () => {
16
+ const result = mergeText({
17
+ base: new Uint8Array(0),
18
+ local: enc('local text'),
19
+ remote: new Uint8Array(0),
20
+ });
21
+ expect(result.outcome).toBe(MergeOutcome.Merged);
22
+ expect(new TextDecoder().decode(result.mergedContent)).toBe('local text');
23
+ });
24
+ test('returns remote when local equals base', () => {
25
+ const base = enc('* TODO Task one\n');
26
+ const result = mergeText({
27
+ base,
28
+ local: base,
29
+ remote: enc('* DONE Task one\n'),
30
+ });
31
+ expect(result.outcome).toBe(MergeOutcome.Merged);
32
+ expect(new TextDecoder().decode(result.mergedContent)).toBe('* DONE Task one\n');
33
+ });
34
+ test('returns local when remote equals base', () => {
35
+ const base = enc('* TODO Task one\n');
36
+ const result = mergeText({
37
+ base,
38
+ local: enc('* TODO Task one\n** Subtask\n'),
39
+ remote: base,
40
+ });
41
+ expect(result.outcome).toBe(MergeOutcome.Merged);
42
+ expect(new TextDecoder().decode(result.mergedContent)).toBe('* TODO Task one\n** Subtask\n');
43
+ });
44
+ test('returns local when all three are identical', () => {
45
+ const content = enc('* Same content\n');
46
+ const result = mergeText({ base: content, local: content, remote: content });
47
+ expect(result.outcome).toBe(MergeOutcome.Merged);
48
+ expect(new TextDecoder().decode(result.mergedContent)).toBe('* Same content\n');
49
+ });
50
+ test('merges non-overlapping edits from local and remote', () => {
51
+ const base = enc('* TODO Task one\n* TODO Task two\n');
52
+ const local = enc('* DONE Task one\n* TODO Task two\n');
53
+ const remote = enc('* TODO Task one\n* DONE Task two\n');
54
+ const result = mergeText({ base, local, remote });
55
+ expect(result.outcome).toBe(MergeOutcome.Merged);
56
+ const merged = new TextDecoder().decode(result.mergedContent);
57
+ expect(merged).toContain('DONE Task one');
58
+ expect(merged).toContain('DONE Task two');
59
+ });
60
+ test('diff-match-patch returns ambiguous for overlapping changes on same region', () => {
61
+ const base = enc('* TODO Task one\n');
62
+ const local = enc('* DONE Task one\n');
63
+ const remote = enc('* CANCELLED Task one\n');
64
+ const result = mergeText({ base, local, remote });
65
+ expect(result.outcome).toBe(MergeOutcome.Ambiguous);
66
+ expect(result.mergedContent).toBeUndefined();
67
+ });
68
+ test('merges insertion at different positions', () => {
69
+ const base = enc('* A\n');
70
+ const local = enc('* A\n* B\n');
71
+ const remote = enc('* A\n* C\n');
72
+ const result = mergeText({ base, local, remote });
73
+ expect(result.outcome).toBe(MergeOutcome.Merged);
74
+ const merged = new TextDecoder().decode(result.mergedContent);
75
+ expect(merged).toContain('* B');
76
+ expect(merged).toContain('* C');
77
+ });
78
+ test('handles empty string content', () => {
79
+ const result = mergeText({
80
+ base: enc(''),
81
+ local: enc(''),
82
+ remote: enc(''),
83
+ });
84
+ expect(result.outcome).toBe(MergeOutcome.Merged);
85
+ });
@@ -0,0 +1 @@
1
+ export { mergeText } from './text-merge.js';
@@ -0,0 +1 @@
1
+ export { mergeText } from "./text-merge.js";
@@ -0,0 +1,2 @@
1
+ import type { MergeInputs, MergeResult } from '../types.js';
2
+ export declare const mergeText: (inputs: MergeInputs) => MergeResult;
@@ -0,0 +1,120 @@
1
+ import DiffMatchPatch from 'diff-match-patch';
2
+ import { textToUint8Array, uint8ArrayToText } from "../../utils/binary.js";
3
+ import { MergeOutcome } from "../types.js";
4
+ const dmp = new DiffMatchPatch();
5
+ const DIFF_EQUAL = 0;
6
+ const DIFF_DELETE = -1;
7
+ const DIFF_INSERT = 1;
8
+ const INITIAL_STATE = {
9
+ regions: [],
10
+ basePos: 0,
11
+ pending: null,
12
+ };
13
+ const commitPending = (state) => {
14
+ if (!state.pending)
15
+ return state;
16
+ const { start, replacement } = state.pending;
17
+ return {
18
+ regions: [
19
+ ...state.regions,
20
+ { baseStart: start, baseEnd: state.basePos, replacement },
21
+ ],
22
+ basePos: state.basePos,
23
+ pending: null,
24
+ };
25
+ };
26
+ const ensurePending = (state) => {
27
+ if (state.pending)
28
+ return state;
29
+ return { ...state, pending: { start: state.basePos, replacement: '' } };
30
+ };
31
+ const applyEqual = (state, text) => ({
32
+ ...commitPending(state),
33
+ basePos: state.basePos + text.length,
34
+ pending: null,
35
+ });
36
+ const applyDelete = (state, text) => ({
37
+ ...ensurePending(state),
38
+ basePos: state.basePos + text.length,
39
+ });
40
+ const applyInsert = (state, text) => {
41
+ const opened = ensurePending(state);
42
+ return {
43
+ ...opened,
44
+ pending: {
45
+ start: opened.pending.start,
46
+ replacement: opened.pending.replacement + text,
47
+ },
48
+ };
49
+ };
50
+ const OP_HANDLERS = {
51
+ [DIFF_EQUAL]: applyEqual,
52
+ [DIFF_DELETE]: applyDelete,
53
+ [DIFF_INSERT]: applyInsert,
54
+ };
55
+ const applyDiffOp = (state, [op, text]) => OP_HANDLERS[op](state, text);
56
+ const diffToChangeRegions = (diffs) => {
57
+ const finalState = diffs.reduce(applyDiffOp, INITIAL_STATE);
58
+ return commitPending(finalState).regions;
59
+ };
60
+ const regionsOverlap = (local, remote) => local.some((lr) => remote.some((rr) => lr.baseStart < rr.baseEnd && rr.baseStart < lr.baseEnd));
61
+ const applyRegionsToBase = (baseText, regions) => {
62
+ const sorted = [...regions].sort((a, b) => a.baseStart - b.baseStart);
63
+ let lastEnd = 0;
64
+ return (sorted
65
+ .map(({ baseStart, baseEnd, replacement }) => {
66
+ const chunk = baseText.substring(lastEnd, baseStart) + replacement;
67
+ lastEnd = baseEnd;
68
+ return chunk;
69
+ })
70
+ .join('') + baseText.substring(lastEnd));
71
+ };
72
+ const computeDiffs = (baseText, changedText) => {
73
+ const diffs = dmp.diff_main(baseText, changedText);
74
+ dmp.diff_cleanupSemantic(diffs);
75
+ return diffs;
76
+ };
77
+ const mergeThreeWay = (baseText, localText, remoteText) => {
78
+ const localRegions = diffToChangeRegions(computeDiffs(baseText, localText));
79
+ const remoteRegions = diffToChangeRegions(computeDiffs(baseText, remoteText));
80
+ if (regionsOverlap(localRegions, remoteRegions))
81
+ return null;
82
+ return applyRegionsToBase(baseText, [...localRegions, ...remoteRegions]);
83
+ };
84
+ const SHORT_CIRCUIT_RULES = [
85
+ { matches: ({ base, local }) => base === local, pick: ({ remote }) => remote },
86
+ { matches: ({ base, remote }) => base === remote, pick: ({ local }) => local },
87
+ { matches: ({ local, remote }) => local === remote, pick: ({ local }) => local },
88
+ ];
89
+ const toTextTriplet = (inputs) => ({
90
+ base: uint8ArrayToText(inputs.base),
91
+ local: uint8ArrayToText(inputs.local),
92
+ remote: uint8ArrayToText(inputs.remote),
93
+ });
94
+ const resolveShortCircuit = (triplet) => SHORT_CIRCUIT_RULES.find((rule) => rule.matches(triplet))?.pick(triplet) ?? null;
95
+ const toMergedResult = (merged) => {
96
+ if (merged === null)
97
+ return { outcome: MergeOutcome.Ambiguous };
98
+ return { outcome: MergeOutcome.Merged, mergedContent: textToUint8Array(merged) };
99
+ };
100
+ const resolveByteShortCircuit = (inputs) => {
101
+ const baseEmpty = inputs.base.length === 0;
102
+ const localEmpty = inputs.local.length === 0;
103
+ const remoteEmpty = inputs.remote.length === 0;
104
+ if (baseEmpty && localEmpty) {
105
+ return { outcome: MergeOutcome.Merged, mergedContent: inputs.remote };
106
+ }
107
+ if (baseEmpty && remoteEmpty) {
108
+ return { outcome: MergeOutcome.Merged, mergedContent: inputs.local };
109
+ }
110
+ return null;
111
+ };
112
+ export const mergeText = (inputs) => {
113
+ const shortCircuit = resolveByteShortCircuit(inputs);
114
+ if (shortCircuit)
115
+ return shortCircuit;
116
+ const triplet = toTextTriplet(inputs);
117
+ const merged = resolveShortCircuit(triplet)
118
+ ?? mergeThreeWay(triplet.base, triplet.local, triplet.remote);
119
+ return toMergedResult(merged);
120
+ };
@@ -0,0 +1,22 @@
1
+ import { expect, test, vi } from 'vitest';
2
+ import { readBinaryContent } from "../read-binary-content.js";
3
+ test('returns Uint8Array as-is', async () => {
4
+ const content = new TextEncoder().encode('hello');
5
+ const fs = { readFile: vi.fn(async () => content) };
6
+ const result = await readBinaryContent(fs, '/test.org');
7
+ expect(result).toBe(content);
8
+ });
9
+ test('converts string to Uint8Array', async () => {
10
+ const fs = {
11
+ readFile: vi.fn(async () => 'hello'),
12
+ };
13
+ const result = await readBinaryContent(fs, '/test.org');
14
+ expect(new TextDecoder().decode(result)).toBe('hello');
15
+ });
16
+ test('handles empty content', async () => {
17
+ const fs = {
18
+ readFile: vi.fn(async () => ''),
19
+ };
20
+ const result = await readBinaryContent(fs, '/test.org');
21
+ expect(result.length).toBe(0);
22
+ });
@@ -0,0 +1,121 @@
1
+ import { expect, test, vi } from 'vitest';
2
+ import { tryMergeConflict } from "../conflict.js";
3
+ import { createMemorySyncState } from "../../memory-state.js";
4
+ const createMergeContext = (overrides) => ({
5
+ fs: overrides.fs,
6
+ executor: (overrides.executor ?? {}),
7
+ state: createMemorySyncState(),
8
+ serverTime: '2024-01-01T00:00:00Z',
9
+ deviceName: 'test-device',
10
+ isDirtyFile: overrides.isDirtyFile,
11
+ });
12
+ const CONFLICT_RESULT = {
13
+ status: 'conflict',
14
+ serverVersion: 5,
15
+ };
16
+ const BASE_CONTENT = new TextEncoder().encode('* TODO Base task\n');
17
+ const LOCAL_CONTENT = new TextEncoder().encode('* DONE Local edit\n');
18
+ const REMOTE_CONTENT = new TextEncoder().encode('* TODO Base task\n* New remote\n');
19
+ test('returns false for non-mergeable file extension', async () => {
20
+ const ctx = createMergeContext({
21
+ fs: {
22
+ fileInfo: vi.fn(async () => ({ mtime: 1, size: 100 })),
23
+ },
24
+ });
25
+ const result = await tryMergeConflict('/image.png', CONFLICT_RESULT, BASE_CONTENT, ctx);
26
+ expect(result).toBeNull();
27
+ });
28
+ test('returns false when no base content provided', async () => {
29
+ const ctx = createMergeContext({
30
+ fs: {
31
+ fileInfo: vi.fn(async () => ({ mtime: 1, size: 100 })),
32
+ },
33
+ });
34
+ const result = await tryMergeConflict('/note.org', CONFLICT_RESULT, null, ctx);
35
+ expect(result).toBeNull();
36
+ });
37
+ test('returns false when file is dirty in editor', async () => {
38
+ const ctx = createMergeContext({
39
+ fs: {
40
+ fileInfo: vi.fn(async () => ({ mtime: 1, size: 200 })),
41
+ },
42
+ isDirtyFile: async () => true,
43
+ });
44
+ const result = await tryMergeConflict('/note.org', CONFLICT_RESULT, BASE_CONTENT, ctx);
45
+ expect(result).toBeNull();
46
+ });
47
+ test('returns false when remote download fails with 404', async () => {
48
+ const { AxiosError } = await import('axios');
49
+ const notFoundResponse = {
50
+ status: 404,
51
+ statusText: 'Not Found',
52
+ headers: {},
53
+ config: {},
54
+ data: {},
55
+ };
56
+ const ctx = createMergeContext({
57
+ fs: {
58
+ fileInfo: vi.fn(async () => ({ mtime: 1, size: 200 })),
59
+ readFile: vi.fn(async () => LOCAL_CONTENT),
60
+ writeFile: vi.fn(async () => undefined),
61
+ deleteFile: vi.fn(async () => undefined),
62
+ },
63
+ executor: {
64
+ download: vi.fn(async () => {
65
+ throw new AxiosError('Not Found', 'ERR_BAD_REQUEST', undefined, undefined, notFoundResponse);
66
+ }),
67
+ },
68
+ });
69
+ const result = await tryMergeConflict('/note.org', CONFLICT_RESULT, BASE_CONTENT, ctx);
70
+ expect(result).toBeNull();
71
+ });
72
+ test('returns false when no base available for merge', async () => {
73
+ const ctx = createMergeContext({
74
+ fs: {
75
+ fileInfo: vi.fn(async () => ({ mtime: 1, size: 200 })),
76
+ },
77
+ });
78
+ const result = await tryMergeConflict('/note.org', CONFLICT_RESULT, null, ctx);
79
+ expect(result).toBeNull();
80
+ });
81
+ test('writes merged content when merge succeeds', async () => {
82
+ const ctx = createMergeContext({
83
+ fs: {
84
+ fileInfo: vi.fn(async () => ({ mtime: 1, size: 200 })),
85
+ readFile: vi.fn()
86
+ .mockResolvedValueOnce(LOCAL_CONTENT)
87
+ .mockResolvedValueOnce(REMOTE_CONTENT),
88
+ writeFile: vi.fn(async () => undefined),
89
+ deleteFile: vi.fn(async () => undefined),
90
+ },
91
+ executor: {
92
+ download: vi.fn(async () => undefined),
93
+ },
94
+ });
95
+ const result = await tryMergeConflict('/note.org', CONFLICT_RESULT, BASE_CONTENT, ctx);
96
+ expect(result).toBeTruthy();
97
+ expect(ctx.fs.writeFile).toHaveBeenCalledWith('/note.org', expect.any(Uint8Array));
98
+ });
99
+ test('reads local content before downloading remote', async () => {
100
+ const callOrder = [];
101
+ const ctx = createMergeContext({
102
+ fs: {
103
+ fileInfo: vi.fn(async () => ({ mtime: 1, size: 200 })),
104
+ readFile: vi.fn(async () => {
105
+ callOrder.push('readLocal');
106
+ return LOCAL_CONTENT;
107
+ }),
108
+ writeFile: vi.fn(async () => undefined),
109
+ deleteFile: vi.fn(async () => {
110
+ callOrder.push('deleteTemp');
111
+ }),
112
+ },
113
+ executor: {
114
+ download: vi.fn(async () => {
115
+ callOrder.push('download');
116
+ }),
117
+ },
118
+ });
119
+ await tryMergeConflict('/note.org', CONFLICT_RESULT, BASE_CONTENT, ctx);
120
+ expect(callOrder.indexOf('readLocal')).toBeLessThan(callOrder.indexOf('download'));
121
+ });
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,211 @@
1
+ import { expect, test, vi } from 'vitest';
2
+ import { processUpload } from "../upload.js";
3
+ import { createMemorySyncState } from "../../memory-state.js";
4
+ vi.mock('../content-hash', () => ({
5
+ resolveContentHash: vi.fn(async () => 'mock-hash'),
6
+ }));
7
+ const createUploadContext = (overrides) => ({
8
+ fs: overrides.fs,
9
+ executor: (overrides.executor ?? {}),
10
+ state: createMemorySyncState(),
11
+ serverTime: '2024-01-01T00:00:00Z',
12
+ deviceName: 'test-device',
13
+ isDirtyFile: overrides.isDirtyFile,
14
+ baseStore: overrides.baseStore,
15
+ });
16
+ const LOCAL_FILE = {
17
+ path: '/note.org',
18
+ mtime: 100,
19
+ size: 50,
20
+ contentHash: 'abc123',
21
+ };
22
+ const createBaseStore = (storedBase) => ({
23
+ get: vi.fn(async () => storedBase ?? null),
24
+ set: vi.fn(async () => undefined),
25
+ remove: vi.fn(async () => undefined),
26
+ });
27
+ test('upload without baseStore uses fallback conflict on 409', async () => {
28
+ const ctx = createUploadContext({
29
+ fs: {
30
+ copyFile: vi.fn(async () => undefined),
31
+ fileInfo: vi.fn(async () => ({ mtime: 100, size: 50 })),
32
+ readFile: vi.fn(async () => new TextEncoder().encode('server content')),
33
+ deleteFile: vi.fn(async () => undefined),
34
+ },
35
+ executor: {
36
+ upload: vi.fn(async () => ({
37
+ status: 'conflict',
38
+ serverVersion: 3,
39
+ })),
40
+ download: vi.fn(async () => undefined),
41
+ },
42
+ });
43
+ await processUpload(LOCAL_FILE, ctx);
44
+ const stored = await ctx.state.getFile('/note.org');
45
+ expect(stored?.status).toBe('synced');
46
+ expect(stored?.version).toBe(3);
47
+ expect(stored?.conflictPath).toContain('.sync-conflict-');
48
+ });
49
+ test('successful upload without baseStore persists synced state', async () => {
50
+ const ctx = createUploadContext({
51
+ fs: {
52
+ fileInfo: vi.fn(async () => ({ mtime: 100, size: 50 })),
53
+ readFile: vi.fn(async () => new TextEncoder().encode('content')),
54
+ },
55
+ executor: {
56
+ upload: vi.fn(async () => ({ status: 'ok', version: 2 })),
57
+ },
58
+ });
59
+ await processUpload(LOCAL_FILE, ctx);
60
+ const stored = await ctx.state.getFile('/note.org');
61
+ expect(stored?.status).toBe('synced');
62
+ expect(stored?.version).toBe(2);
63
+ });
64
+ test('successful upload with baseStore updates base', async () => {
65
+ const fileContent = new TextEncoder().encode('* Task\n');
66
+ const baseStore = createBaseStore();
67
+ const ctx = createUploadContext({
68
+ fs: {
69
+ fileInfo: vi.fn(async () => ({ mtime: 100, size: 50 })),
70
+ readFile: vi.fn(async () => fileContent),
71
+ },
72
+ executor: {
73
+ upload: vi.fn(async () => ({ status: 'ok', version: 4 })),
74
+ },
75
+ baseStore,
76
+ });
77
+ await processUpload(LOCAL_FILE, ctx);
78
+ expect(baseStore.set).toHaveBeenCalledWith('/note.org', expect.objectContaining({
79
+ path: '/note.org',
80
+ version: 4,
81
+ content: fileContent,
82
+ }));
83
+ });
84
+ test('merge-retry flow: conflict → merge → retry success → synced', async () => {
85
+ const baseContent = new TextEncoder().encode('* TODO Base\n');
86
+ const localContent = new TextEncoder().encode('* DONE Local\n');
87
+ const remoteContent = new TextEncoder().encode('* TODO Base\n* New\n');
88
+ const baseStore = createBaseStore({
89
+ path: '/note.org',
90
+ version: 2,
91
+ contentHash: 'old-hash',
92
+ content: baseContent,
93
+ updatedAt: '2024-01-01T00:00:00Z',
94
+ });
95
+ let uploadCallCount = 0;
96
+ const ctx = createUploadContext({
97
+ fs: {
98
+ fileInfo: vi.fn(async () => ({ mtime: 100, size: 200 })),
99
+ readFile: vi.fn()
100
+ .mockResolvedValueOnce(localContent)
101
+ .mockResolvedValueOnce(remoteContent)
102
+ .mockResolvedValueOnce(localContent),
103
+ writeFile: vi.fn(async () => undefined),
104
+ deleteFile: vi.fn(async () => undefined),
105
+ },
106
+ executor: {
107
+ upload: vi.fn(async () => {
108
+ uploadCallCount++;
109
+ if (uploadCallCount === 1) {
110
+ return { status: 'conflict', serverVersion: 3 };
111
+ }
112
+ return { status: 'ok', version: 4 };
113
+ }),
114
+ download: vi.fn(async () => undefined),
115
+ },
116
+ baseStore,
117
+ });
118
+ await processUpload(LOCAL_FILE, ctx);
119
+ expect(uploadCallCount).toBe(2);
120
+ const stored = await ctx.state.getFile('/note.org');
121
+ expect(stored?.status).toBe('synced');
122
+ expect(stored?.version).toBe(4);
123
+ expect(stored?.conflictPath).toBeUndefined();
124
+ });
125
+ test('merge-retry flow: conflict → merge fails → fallback conflict', async () => {
126
+ const baseContent = new TextEncoder().encode('* TODO Task\n');
127
+ const localContent = new TextEncoder().encode('* DONE Task\n');
128
+ const remoteContent = new TextEncoder().encode('* CANCELLED Task\n');
129
+ const baseStore = createBaseStore({
130
+ path: '/note.org',
131
+ version: 2,
132
+ contentHash: 'old-hash',
133
+ content: baseContent,
134
+ updatedAt: '2024-01-01T00:00:00Z',
135
+ });
136
+ const ctx = createUploadContext({
137
+ fs: {
138
+ copyFile: vi.fn(async () => undefined),
139
+ fileInfo: vi.fn(async () => ({ mtime: 100, size: 200 })),
140
+ readFile: vi.fn()
141
+ .mockResolvedValueOnce(localContent)
142
+ .mockResolvedValueOnce(remoteContent)
143
+ .mockResolvedValueOnce(new TextEncoder().encode('server')),
144
+ writeFile: vi.fn(async () => undefined),
145
+ deleteFile: vi.fn(async () => undefined),
146
+ },
147
+ executor: {
148
+ upload: vi.fn(async () => ({
149
+ status: 'conflict',
150
+ serverVersion: 3,
151
+ })),
152
+ download: vi.fn(async () => undefined),
153
+ },
154
+ baseStore,
155
+ });
156
+ await processUpload(LOCAL_FILE, ctx);
157
+ const stored = await ctx.state.getFile('/note.org');
158
+ expect(stored?.status).toBe('synced');
159
+ expect(stored?.conflictPath).toContain('.sync-conflict-');
160
+ });
161
+ test('merge-retry flow: merge succeeds but retry conflicts → fallback', async () => {
162
+ const baseContent = new TextEncoder().encode('* TODO Base\n');
163
+ const localContent = new TextEncoder().encode('* DONE Local\n');
164
+ const remoteContent = new TextEncoder().encode('* TODO Base\n* New\n');
165
+ const baseStore = createBaseStore({
166
+ path: '/note.org',
167
+ version: 2,
168
+ contentHash: 'old-hash',
169
+ content: baseContent,
170
+ updatedAt: '2024-01-01T00:00:00Z',
171
+ });
172
+ const ctx = createUploadContext({
173
+ fs: {
174
+ copyFile: vi.fn(async () => undefined),
175
+ fileInfo: vi.fn(async () => ({ mtime: 100, size: 200 })),
176
+ readFile: vi.fn()
177
+ .mockResolvedValueOnce(localContent)
178
+ .mockResolvedValueOnce(remoteContent)
179
+ .mockResolvedValueOnce(localContent)
180
+ .mockResolvedValueOnce(new TextEncoder().encode('server')),
181
+ writeFile: vi.fn(async () => undefined),
182
+ deleteFile: vi.fn(async () => undefined),
183
+ },
184
+ executor: {
185
+ upload: vi.fn(async () => ({
186
+ status: 'conflict',
187
+ serverVersion: 3,
188
+ })),
189
+ download: vi.fn(async () => undefined),
190
+ },
191
+ baseStore,
192
+ });
193
+ await processUpload(LOCAL_FILE, ctx);
194
+ const stored = await ctx.state.getFile('/note.org');
195
+ expect(stored?.status).toBe('synced');
196
+ expect(stored?.conflictPath).toContain('.sync-conflict-');
197
+ });
198
+ test('upload error stores error status and rethrows', async () => {
199
+ const ctx = createUploadContext({
200
+ fs: {},
201
+ executor: {
202
+ upload: vi.fn(async () => {
203
+ throw new Error('network failure');
204
+ }),
205
+ },
206
+ });
207
+ await expect(processUpload(LOCAL_FILE, ctx)).rejects.toThrow('network failure');
208
+ const stored = await ctx.state.getFile('/note.org');
209
+ expect(stored?.status).toBe('error');
210
+ expect(stored?.errorMessage).toContain('network failure');
211
+ });
@@ -3,6 +3,7 @@ type ConflictUploadResult = Extract<UploadResult, {
3
3
  status: 'conflict';
4
4
  }>;
5
5
  export declare const generateConflictPath: (path: string, deviceName?: string) => string;
6
+ export declare const tryMergeConflict: (path: string, conflictResult: ConflictUploadResult, baseContent: Uint8Array | null, ctx: SyncContext) => Promise<Uint8Array | null>;
6
7
  export declare const handleConflict: (path: string, conflictResult: ConflictUploadResult, ctx: SyncContext) => Promise<void>;
7
8
  export declare const hasConflict: (file: {
8
9
  conflictPath?: string;
@@ -1,12 +1,18 @@
1
+ import axios from 'axios';
2
+ import { MergeOutcome } from "../types.js";
1
3
  import { createSyncedFile } from "./synced-file.js";
2
4
  import { resolveContentHash } from "./content-hash.js";
3
- import axios from 'axios';
5
+ import { readBinaryContent } from "./read-binary-content.js";
6
+ import { mergeText } from "../merge/text-merge.js";
7
+ import { isMergeableFile } from "../types.js";
8
+ import { to } from "../../utils/to-error.js";
4
9
  export const generateConflictPath = (path, deviceName = 'device') => {
5
10
  const lastDot = path.lastIndexOf('.');
6
11
  const ext = lastDot >= 0 ? path.substring(lastDot) : '';
7
12
  const base = lastDot >= 0 ? path.substring(0, lastDot) : path;
8
13
  const timestamp = Date.now();
9
- return `${base}.sync-conflict-${timestamp}-${deviceName}${ext}`;
14
+ const safeDeviceName = deviceName.replace(/[^a-zA-Z0-9_-]/g, '_');
15
+ return `${base}.sync-conflict-${timestamp}-${safeDeviceName}${ext}`;
10
16
  };
11
17
  const copyFile = async (fs, src, dest) => {
12
18
  if (fs.copyFile) {
@@ -16,24 +22,106 @@ const copyFile = async (fs, src, dest) => {
16
22
  const content = await fs.readFile(src, 'binary');
17
23
  await fs.writeFile(dest, content);
18
24
  };
19
- const isNotFoundError = (error) => {
20
- return axios.isAxiosError(error) && error.response?.status === 404;
25
+ const isAxiosNotFound = (error) => {
26
+ if (!axios.isAxiosError(error))
27
+ return false;
28
+ return error.response?.status === 404;
21
29
  };
22
- const tryDownloadServerVersion = async (path, serverVersion, ctx) => {
30
+ const refreshBaseStore = async (path, version, contentHash, ctx) => {
31
+ if (!ctx.baseStore)
32
+ return;
33
+ const content = await readBinaryContent(ctx.fs, path);
34
+ await ctx.baseStore.set(path, {
35
+ path,
36
+ version,
37
+ contentHash,
38
+ content,
39
+ updatedAt: ctx.serverTime,
40
+ });
41
+ };
42
+ const removeBaseStoreEntry = async (path, ctx) => {
43
+ if (!ctx.baseStore)
44
+ return;
45
+ await ctx.baseStore.remove(path);
46
+ };
47
+ const downloadRemoteContent = async (path, version, ctx) => {
48
+ const result = await to(ctx.executor.download)({
49
+ path,
50
+ version,
51
+ deleted: false,
52
+ updatedAt: new Date().toISOString(),
53
+ });
54
+ if (result.isErr()) {
55
+ if (isAxiosNotFound(result.error))
56
+ return null;
57
+ throw result.error;
58
+ }
59
+ return readBinaryContent(ctx.fs, path);
60
+ };
61
+ const isDirty = async (path, ctx) => {
62
+ if (!ctx.isDirtyFile)
63
+ return false;
64
+ return ctx.isDirtyFile(path);
65
+ };
66
+ const canAttemptMerge = async (path, baseContent, ctx) => {
67
+ const fileInfo = await ctx.fs.fileInfo(path);
68
+ if (!fileInfo) {
69
+ return false;
70
+ }
71
+ if (!isMergeableFile(path, fileInfo.size)) {
72
+ return false;
73
+ }
74
+ if (!baseContent) {
75
+ return false;
76
+ }
77
+ const dirty = await isDirty(path, ctx);
78
+ if (dirty) {
79
+ return false;
80
+ }
81
+ return true;
82
+ };
83
+ export const tryMergeConflict = async (path, conflictResult, baseContent, ctx) => {
84
+ if (!(await canAttemptMerge(path, baseContent, ctx)))
85
+ return null;
86
+ const localContent = await readBinaryContent(ctx.fs, path);
87
+ let remoteContent;
23
88
  try {
24
- await ctx.executor.download({
25
- path,
26
- version: serverVersion,
27
- deleted: false,
28
- updatedAt: new Date().toISOString(),
29
- });
30
- return true;
89
+ remoteContent = await downloadRemoteContent(path, conflictResult.serverVersion, ctx);
31
90
  }
32
91
  catch (error) {
33
- if (isNotFoundError(error))
34
- return false;
92
+ await ctx.fs.writeFile(path, localContent);
35
93
  throw error;
36
94
  }
95
+ if (!remoteContent) {
96
+ await ctx.fs.writeFile(path, localContent);
97
+ return null;
98
+ }
99
+ const inputs = {
100
+ base: baseContent,
101
+ local: localContent,
102
+ remote: remoteContent,
103
+ };
104
+ const mergeResult = mergeText(inputs);
105
+ if (mergeResult.outcome !== MergeOutcome.Merged ||
106
+ !mergeResult.mergedContent) {
107
+ return null;
108
+ }
109
+ await ctx.fs.writeFile(path, mergeResult.mergedContent);
110
+ return mergeResult.mergedContent;
111
+ };
112
+ const tryDownloadServerVersion = async (path, serverVersion, ctx) => {
113
+ const result = await to(ctx.executor.download)({
114
+ path,
115
+ version: serverVersion,
116
+ deleted: false,
117
+ updatedAt: new Date().toISOString(),
118
+ });
119
+ if (result.isErr()) {
120
+ if (isAxiosNotFound(result.error))
121
+ return false;
122
+ throw result.error;
123
+ }
124
+ return true;
37
125
  };
38
126
  export const handleConflict = async (path, conflictResult, ctx) => {
39
127
  const conflictPath = generateConflictPath(path, ctx.deviceName);
@@ -42,6 +130,7 @@ export const handleConflict = async (path, conflictResult, ctx) => {
42
130
  if (!downloaded) {
43
131
  await ctx.fs.deleteFile(path);
44
132
  await ctx.state.removeFile(path);
133
+ await removeBaseStoreEntry(path, ctx);
45
134
  return;
46
135
  }
47
136
  const fileInfo = await ctx.fs.fileInfo(path);
@@ -56,5 +145,6 @@ export const handleConflict = async (path, conflictResult, ctx) => {
56
145
  status: 'synced',
57
146
  conflictPath,
58
147
  }));
148
+ await refreshBaseStore(path, conflictResult.serverVersion, contentHash, ctx);
59
149
  };
60
150
  export const hasConflict = (file) => file.conflictPath !== undefined;
@@ -2,6 +2,7 @@ export const processDeleteLocal = async (path, ctx) => {
2
2
  try {
3
3
  await ctx.fs.deleteFile(path);
4
4
  await ctx.state.removeFile(path);
5
+ await removeBaseStoreEntry(path, ctx);
5
6
  }
6
7
  catch (error) {
7
8
  const stored = await ctx.state.getFile(path);
@@ -15,3 +16,8 @@ export const processDeleteLocal = async (path, ctx) => {
15
16
  throw error;
16
17
  }
17
18
  };
19
+ const removeBaseStoreEntry = async (path, ctx) => {
20
+ if (!ctx.baseStore)
21
+ return;
22
+ await ctx.baseStore.remove(path);
23
+ };
@@ -8,10 +8,12 @@ export const processDeleteRemote = async (path, ctx) => {
8
8
  try {
9
9
  await ctx.executor.deleteRemote(path, stored?.version ?? 0);
10
10
  await ctx.state.removeFile(path);
11
+ await removeBaseStoreEntry(path, ctx);
11
12
  }
12
13
  catch (error) {
13
14
  if (isNotFoundError(error)) {
14
15
  await ctx.state.removeFile(path);
16
+ await removeBaseStoreEntry(path, ctx);
15
17
  return;
16
18
  }
17
19
  if (stored) {
@@ -24,3 +26,8 @@ export const processDeleteRemote = async (path, ctx) => {
24
26
  throw error;
25
27
  }
26
28
  };
29
+ const removeBaseStoreEntry = async (path, ctx) => {
30
+ if (!ctx.baseStore)
31
+ return;
32
+ await ctx.baseStore.remove(path);
33
+ };
@@ -1,5 +1,6 @@
1
1
  import { createSyncedFile } from "./synced-file.js";
2
2
  import { resolveContentHash } from "./content-hash.js";
3
+ import { readBinaryContent } from "./read-binary-content.js";
3
4
  const storedMeta = (stored) => ({
4
5
  mtime: stored?.mtime ?? 0,
5
6
  size: stored?.size ?? 0,
@@ -33,6 +34,18 @@ const markError = async (file, stored, error, ctx) => {
33
34
  errorMessage: String(error),
34
35
  }));
35
36
  };
37
+ const refreshBaseStoreAfterDownload = async (path, version, contentHash, ctx) => {
38
+ if (!ctx.baseStore)
39
+ return;
40
+ const content = await readBinaryContent(ctx.fs, path);
41
+ await ctx.baseStore.set(path, {
42
+ path,
43
+ version,
44
+ contentHash,
45
+ content,
46
+ updatedAt: ctx.serverTime,
47
+ });
48
+ };
36
49
  export const processDownload = async (file, ctx) => {
37
50
  const stored = await ctx.state.getFile(file.path);
38
51
  await markDownloading(file, stored, ctx);
@@ -40,6 +53,7 @@ export const processDownload = async (file, ctx) => {
40
53
  await ctx.executor.download(file);
41
54
  const downloadedMeta = await resolveDownloadedMeta(file, ctx);
42
55
  await markSynced(file, downloadedMeta, ctx);
56
+ await refreshBaseStoreAfterDownload(file.path, file.version, downloadedMeta.contentHash ?? '', ctx);
43
57
  }
44
58
  catch (error) {
45
59
  await markError(file, stored, error, ctx);
@@ -2,4 +2,5 @@ export { processUpload } from './upload.js';
2
2
  export { processDownload } from './download.js';
3
3
  export { processDeleteLocal } from './delete-local.js';
4
4
  export { processDeleteRemote } from './delete-remote.js';
5
- export { handleConflict, generateConflictPath, hasConflict } from './conflict.js';
5
+ export { handleConflict, generateConflictPath, hasConflict, } from './conflict.js';
6
+ export { readBinaryContent } from './read-binary-content.js';
@@ -2,4 +2,5 @@ export { processUpload } from "./upload.js";
2
2
  export { processDownload } from "./download.js";
3
3
  export { processDeleteLocal } from "./delete-local.js";
4
4
  export { processDeleteRemote } from "./delete-remote.js";
5
- export { handleConflict, generateConflictPath, hasConflict } from "./conflict.js";
5
+ export { handleConflict, generateConflictPath, hasConflict, } from "./conflict.js";
6
+ export { readBinaryContent } from "./read-binary-content.js";
@@ -0,0 +1,2 @@
1
+ import type { FileSystem } from '../../models/file-system.js';
2
+ export declare const readBinaryContent: (fs: FileSystem, path: string) => Promise<Uint8Array>;
@@ -0,0 +1,6 @@
1
+ export const readBinaryContent = async (fs, path) => {
2
+ const content = await fs.readFile(path, 'binary');
3
+ return content instanceof Uint8Array
4
+ ? content
5
+ : new TextEncoder().encode(String(content));
6
+ };
@@ -1,30 +1,74 @@
1
- import { handleConflict } from "./conflict.js";
1
+ import { handleConflict, tryMergeConflict } from "./conflict.js";
2
2
  import { createSyncedFile } from "./synced-file.js";
3
- const executeUpload = async (file, expectedVersion, ctx) => {
4
- const result = await ctx.executor.upload(file, expectedVersion);
5
- if (result.status === 'ok') {
6
- await ctx.state.setFile(file.path, createSyncedFile(file, {
7
- version: result.version,
8
- status: 'synced',
9
- syncedAt: ctx.serverTime,
10
- }));
11
- return;
12
- }
13
- await handleConflict(file.path, result, ctx);
3
+ import { resolveContentHash } from "./content-hash.js";
4
+ import { readBinaryContent } from "./read-binary-content.js";
5
+ import { hashContent } from "../utils/content-hash.js";
6
+ import { to } from "../../utils/to-error.js";
7
+ const persistErrorState = async (file, expectedVersion, error, ctx) => {
8
+ await ctx.state.setFile(file.path, createSyncedFile(file, {
9
+ version: expectedVersion,
10
+ status: 'error',
11
+ errorMessage: String(error),
12
+ }));
14
13
  };
15
14
  export const processUpload = async (file, ctx) => {
16
15
  const stored = await ctx.state.getFile(file.path);
17
16
  const expectedVersion = stored?.version;
18
17
  await ctx.state.setFile(file.path, createSyncedFile(file, { version: expectedVersion, status: 'uploading' }));
19
- try {
20
- await executeUpload(file, expectedVersion, ctx);
18
+ const execute = ctx.baseStore
19
+ ? to(() => executeUploadWithMergeRetry(file, expectedVersion, ctx))
20
+ : to(() => executeUpload(file, expectedVersion, ctx));
21
+ const result = await execute();
22
+ if (result.isOk())
23
+ return;
24
+ await persistErrorState(file, expectedVersion, result.error, ctx);
25
+ throw result.error;
26
+ };
27
+ const executeUpload = async (file, expectedVersion, ctx) => {
28
+ const result = await ctx.executor.upload(file, expectedVersion);
29
+ if (result.status !== 'ok') {
30
+ await handleConflict(file.path, result, ctx);
31
+ return;
21
32
  }
22
- catch (error) {
23
- await ctx.state.setFile(file.path, createSyncedFile(file, {
24
- version: expectedVersion,
25
- status: 'error',
26
- errorMessage: String(error),
27
- }));
28
- throw error;
33
+ await persistSyncedSnapshot(file.path, result.version, ctx);
34
+ };
35
+ const executeUploadWithMergeRetry = async (file, expectedVersion, ctx) => {
36
+ const result = await ctx.executor.upload(file, expectedVersion);
37
+ if (result.status === 'ok') {
38
+ await persistSyncedSnapshot(file.path, result.version, ctx);
39
+ return;
40
+ }
41
+ const baseEntry = ctx.baseStore ? await ctx.baseStore.get(file.path) : null;
42
+ const mergedContent = await tryMergeConflict(file.path, result, baseEntry?.content ?? null, ctx);
43
+ if (!mergedContent) {
44
+ await handleConflict(file.path, result, ctx);
45
+ return;
46
+ }
47
+ const contentHash = await hashContent(mergedContent);
48
+ const mergedFile = { ...file, contentHash };
49
+ const retryResult = await ctx.executor.upload(mergedFile, result.serverVersion);
50
+ if (retryResult.status !== 'ok') {
51
+ await handleConflict(file.path, retryResult, ctx);
52
+ return;
29
53
  }
54
+ await persistSyncedSnapshot(file.path, retryResult.version, ctx);
55
+ };
56
+ const persistSyncedSnapshot = async (path, version, ctx) => {
57
+ const fileInfo = await ctx.fs.fileInfo(path);
58
+ const contentHash = fileInfo ? await resolveContentHash(ctx.fs, path) : '';
59
+ await ctx.state.setFile(path, createSyncedFile({
60
+ mtime: fileInfo?.mtime ?? 0,
61
+ size: fileInfo?.size ?? 0,
62
+ contentHash,
63
+ }, { version, status: 'synced', syncedAt: ctx.serverTime }));
64
+ if (!ctx.baseStore)
65
+ return;
66
+ const content = await readBinaryContent(ctx.fs, path);
67
+ await ctx.baseStore.set(path, {
68
+ path,
69
+ version,
70
+ contentHash,
71
+ content,
72
+ updatedAt: ctx.serverTime,
73
+ });
30
74
  };
package/sync/types.d.ts CHANGED
@@ -74,4 +74,32 @@ export interface SyncContext {
74
74
  fs: FileSystem;
75
75
  serverTime: string;
76
76
  deviceName?: string;
77
+ isDirtyFile?: (path: string) => Promise<boolean> | boolean;
78
+ baseStore?: BaseContentStore;
77
79
  }
80
+ export declare enum MergeOutcome {
81
+ Merged = "merged",
82
+ Ambiguous = "ambiguous"
83
+ }
84
+ export interface MergeResult {
85
+ outcome: MergeOutcome;
86
+ mergedContent?: Uint8Array;
87
+ }
88
+ export interface BaseContentEntry {
89
+ path: string;
90
+ version: number;
91
+ contentHash: string;
92
+ content: Uint8Array;
93
+ updatedAt: string;
94
+ }
95
+ export interface BaseContentStore {
96
+ get(path: string): Promise<BaseContentEntry | null>;
97
+ set(path: string, entry: BaseContentEntry): Promise<void>;
98
+ remove(path: string): Promise<void>;
99
+ }
100
+ export interface MergeInputs {
101
+ base: Uint8Array;
102
+ local: Uint8Array;
103
+ remote: Uint8Array;
104
+ }
105
+ export declare const isMergeableFile: (path: string, size: number) => boolean;
package/sync/types.js CHANGED
@@ -5,3 +5,15 @@ export var SyncOperationType;
5
5
  SyncOperationType["DeleteLocal"] = "deleteLocal";
6
6
  SyncOperationType["DeleteRemote"] = "deleteRemote";
7
7
  })(SyncOperationType || (SyncOperationType = {}));
8
+ export var MergeOutcome;
9
+ (function (MergeOutcome) {
10
+ MergeOutcome["Merged"] = "merged";
11
+ MergeOutcome["Ambiguous"] = "ambiguous";
12
+ })(MergeOutcome || (MergeOutcome = {}));
13
+ const MERGEABLE_EXTENSIONS = new Set(['.org', '.md']);
14
+ const MAX_MERGEABLE_SIZE_BYTES = 512 * 1024;
15
+ export const isMergeableFile = (path, size) => {
16
+ const lastDot = path.lastIndexOf('.');
17
+ const ext = lastDot >= 0 ? path.substring(lastDot) : '';
18
+ return MERGEABLE_EXTENSIONS.has(ext) && size <= MAX_MERGEABLE_SIZE_BYTES;
19
+ };