orgnote-api 0.41.35 → 0.41.36

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/package.json +1 -1
  2. package/sync/__tests__/create-sync-plan.spec.d.ts +1 -0
  3. package/sync/__tests__/create-sync-plan.spec.js +55 -0
  4. package/sync/__tests__/fetch.spec.d.ts +1 -0
  5. package/sync/__tests__/fetch.spec.js +27 -0
  6. package/sync/__tests__/plan.spec.js +240 -22
  7. package/sync/create-sync-plan.js +23 -3
  8. package/sync/fetch.js +1 -0
  9. package/sync/index.d.ts +1 -0
  10. package/sync/index.js +1 -0
  11. package/sync/operations/__tests__/conflict.spec.d.ts +1 -0
  12. package/sync/operations/__tests__/conflict.spec.js +62 -0
  13. package/sync/operations/__tests__/content-hash.spec.d.ts +1 -0
  14. package/sync/operations/__tests__/content-hash.spec.js +37 -0
  15. package/sync/operations/__tests__/download.spec.d.ts +1 -0
  16. package/sync/operations/__tests__/download.spec.js +69 -0
  17. package/sync/operations/__tests__/fixtures.d.ts +10 -0
  18. package/sync/operations/__tests__/fixtures.js +9 -0
  19. package/sync/operations/__tests__/synced-file.spec.d.ts +1 -0
  20. package/sync/operations/__tests__/synced-file.spec.js +10 -0
  21. package/sync/operations/conflict.js +9 -5
  22. package/sync/operations/content-hash.d.ts +2 -0
  23. package/sync/operations/content-hash.js +17 -0
  24. package/sync/operations/download.js +34 -6
  25. package/sync/operations/synced-file.d.ts +1 -0
  26. package/sync/operations/synced-file.js +1 -0
  27. package/sync/plan.js +52 -16
  28. package/sync/types.d.ts +4 -1
  29. package/sync/utils/__tests__/content-hash.spec.d.ts +1 -0
  30. package/sync/utils/__tests__/content-hash.spec.js +38 -0
  31. package/sync/utils/content-hash.d.ts +2 -0
  32. package/sync/utils/content-hash.js +23 -0
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "orgnote-api",
3
- "version": "0.41.35",
3
+ "version": "0.41.36",
4
4
  "description": "Official API for creating extensions for OrgNote app",
5
5
  "type": "module",
6
6
  "main": "./index.js",
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,55 @@
1
+ import { expect, test, vi } from 'vitest';
2
+ import { createMemorySyncState } from "../memory-state.js";
3
+ import { createSyncPlan } from "../create-sync-plan.js";
4
+ const createSyncApiMock = () => {
5
+ const syncChangesGet = vi.fn(async () => ({
6
+ data: {
7
+ data: {
8
+ changes: [],
9
+ hasMore: false,
10
+ serverTime: '2024-01-01T00:00:00Z',
11
+ },
12
+ },
13
+ }));
14
+ return { syncChangesGet };
15
+ };
16
+ const createFsMock = (entries, readFileImpl) => {
17
+ const readFile = vi.fn(readFileImpl);
18
+ return {
19
+ readDir: vi.fn(async () => entries),
20
+ readFile,
21
+ };
22
+ };
23
+ test('createSyncPlan falls back to original file when hash read fails', async () => {
24
+ const fs = createFsMock([{ name: 'a.org', path: '/a.org', type: 'file', size: 10, mtime: 10 }], async () => {
25
+ throw new Error('file locked');
26
+ });
27
+ const api = createSyncApiMock();
28
+ const state = createMemorySyncState();
29
+ const plan = await createSyncPlan({
30
+ fs,
31
+ api,
32
+ state,
33
+ rootPath: '/',
34
+ enableContentHashCheck: true,
35
+ });
36
+ expect(plan.toUpload).toHaveLength(1);
37
+ expect(plan.toUpload[0].path).toBe('/a.org');
38
+ expect(plan.toUpload[0].contentHash).toBeUndefined();
39
+ });
40
+ test('createSyncPlan skips hashing when enableContentHashCheck is false', async () => {
41
+ const fs = createFsMock([{ name: 'a.org', path: '/a.org', type: 'file', size: 10, mtime: 10 }], async () => {
42
+ throw new Error('must not be called');
43
+ });
44
+ const api = createSyncApiMock();
45
+ const state = createMemorySyncState();
46
+ const plan = await createSyncPlan({
47
+ fs,
48
+ api,
49
+ state,
50
+ rootPath: '/',
51
+ enableContentHashCheck: false,
52
+ });
53
+ expect(plan.toUpload).toHaveLength(1);
54
+ expect(fs.readFile).toHaveBeenCalledTimes(0);
55
+ });
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,27 @@
1
+ import { expect, test } from 'vitest';
2
+ import { fetchRemoteChanges } from "../fetch.js";
3
+ test('fetchRemoteChanges maps contentHash from API changes', async () => {
4
+ const api = {
5
+ syncChangesGet: async () => ({
6
+ data: {
7
+ data: {
8
+ changes: [
9
+ {
10
+ id: '1',
11
+ path: '/a.org',
12
+ version: 1,
13
+ deleted: false,
14
+ updatedAt: '2024-01-01T00:00:00Z',
15
+ contentHash: 'hash-1',
16
+ },
17
+ ],
18
+ hasMore: false,
19
+ serverTime: '2024-01-01T00:00:00Z',
20
+ },
21
+ },
22
+ }),
23
+ };
24
+ const result = await fetchRemoteChanges(api);
25
+ expect(result.files).toHaveLength(1);
26
+ expect(result.files[0].contentHash).toBe('hash-1');
27
+ });
@@ -4,7 +4,13 @@ const emptyState = { files: {} };
4
4
  const serverTime = '2024-01-01T00:00:00Z';
5
5
  test('new local file → upload', () => {
6
6
  const localFiles = [{ path: 'a.org', mtime: 1000, size: 100 }];
7
- const plan = createPlan({ localFiles, deletedLocally: [], remoteFiles: [], stateData: emptyState, serverTime });
7
+ const plan = createPlan({
8
+ localFiles,
9
+ deletedLocally: [],
10
+ remoteFiles: [],
11
+ stateData: emptyState,
12
+ serverTime,
13
+ });
8
14
  expect(plan.toUpload).toHaveLength(1);
9
15
  expect(plan.toUpload[0].path).toBe('a.org');
10
16
  });
@@ -12,7 +18,13 @@ test('new remote file → download', () => {
12
18
  const remoteFiles = [
13
19
  { path: 'b.org', version: 1, deleted: false, updatedAt: '' },
14
20
  ];
15
- const plan = createPlan({ localFiles: [], deletedLocally: [], remoteFiles, stateData: emptyState, serverTime });
21
+ const plan = createPlan({
22
+ localFiles: [],
23
+ deletedLocally: [],
24
+ remoteFiles,
25
+ stateData: emptyState,
26
+ serverTime,
27
+ });
16
28
  expect(plan.toDownload).toHaveLength(1);
17
29
  expect(plan.toDownload[0].path).toBe('b.org');
18
30
  });
@@ -22,9 +34,17 @@ test('unchanged file → skip', () => {
22
34
  { path: 'c.org', version: 1, deleted: false, updatedAt: '' },
23
35
  ];
24
36
  const stateData = {
25
- files: { 'c.org': { mtime: 1000, size: 100, version: 1, status: 'synced' } },
37
+ files: {
38
+ 'c.org': { mtime: 1000, size: 100, version: 1, status: 'synced' },
39
+ },
26
40
  };
27
- const plan = createPlan({ localFiles, deletedLocally: [], remoteFiles, stateData, serverTime });
41
+ const plan = createPlan({
42
+ localFiles,
43
+ deletedLocally: [],
44
+ remoteFiles,
45
+ stateData,
46
+ serverTime,
47
+ });
28
48
  expect(plan.toUpload).toHaveLength(0);
29
49
  expect(plan.toDownload).toHaveLength(0);
30
50
  });
@@ -34,9 +54,17 @@ test('local changed → upload', () => {
34
54
  { path: 'd.org', version: 1, deleted: false, updatedAt: '' },
35
55
  ];
36
56
  const stateData = {
37
- files: { 'd.org': { mtime: 1000, size: 100, version: 1, status: 'synced' } },
57
+ files: {
58
+ 'd.org': { mtime: 1000, size: 100, version: 1, status: 'synced' },
59
+ },
38
60
  };
39
- const plan = createPlan({ localFiles, deletedLocally: [], remoteFiles, stateData, serverTime });
61
+ const plan = createPlan({
62
+ localFiles,
63
+ deletedLocally: [],
64
+ remoteFiles,
65
+ stateData,
66
+ serverTime,
67
+ });
40
68
  expect(plan.toUpload).toHaveLength(1);
41
69
  });
42
70
  test('remote changed → download', () => {
@@ -45,29 +73,58 @@ test('remote changed → download', () => {
45
73
  { path: 'e.org', version: 2, deleted: false, updatedAt: '' },
46
74
  ];
47
75
  const stateData = {
48
- files: { 'e.org': { mtime: 1000, size: 100, version: 1, status: 'synced' } },
76
+ files: {
77
+ 'e.org': { mtime: 1000, size: 100, version: 1, status: 'synced' },
78
+ },
49
79
  };
50
- const plan = createPlan({ localFiles, deletedLocally: [], remoteFiles, stateData, serverTime });
80
+ const plan = createPlan({
81
+ localFiles,
82
+ deletedLocally: [],
83
+ remoteFiles,
84
+ stateData,
85
+ serverTime,
86
+ });
51
87
  expect(plan.toDownload).toHaveLength(1);
52
88
  });
53
89
  test('both changed → upload (conflict handled by server)', () => {
54
90
  const localFiles = [{ path: 'f.org', mtime: 3000, size: 100 }];
55
91
  const remoteFiles = [
56
- { path: 'f.org', version: 2, deleted: false, updatedAt: '1970-01-01T00:00:02.000Z' },
92
+ {
93
+ path: 'f.org',
94
+ version: 2,
95
+ deleted: false,
96
+ updatedAt: '1970-01-01T00:00:02.000Z',
97
+ },
57
98
  ];
58
99
  const stateData = {
59
- files: { 'f.org': { mtime: 1000, size: 100, version: 1, status: 'synced' } },
100
+ files: {
101
+ 'f.org': { mtime: 1000, size: 100, version: 1, status: 'synced' },
102
+ },
60
103
  };
61
- const plan = createPlan({ localFiles, deletedLocally: [], remoteFiles, stateData, serverTime });
104
+ const plan = createPlan({
105
+ localFiles,
106
+ deletedLocally: [],
107
+ remoteFiles,
108
+ stateData,
109
+ serverTime,
110
+ });
62
111
  expect(plan.toUpload).toHaveLength(1);
63
112
  expect(plan.toDownload).toHaveLength(0);
64
113
  });
65
114
  test('deleted locally → delete remote', () => {
66
115
  const deletedLocally = ['g.org'];
67
116
  const stateData = {
68
- files: { 'g.org': { mtime: 1000, size: 100, version: 1, status: 'synced' } },
117
+ files: {
118
+ 'g.org': { mtime: 1000, size: 100, version: 1, status: 'synced' },
119
+ },
69
120
  };
70
- const plan = createPlan({ localFiles: [], deletedLocally, remoteFiles: [], stateData, serverTime });
121
+ const plan = createPlan({
122
+ localFiles: [],
123
+ deletedLocally,
124
+ remoteFiles: [],
125
+ stateData,
126
+ serverTime,
127
+ });
71
128
  expect(plan.toDeleteRemote).toContain('g.org');
72
129
  });
73
130
  test('deleted remotely → delete local', () => {
@@ -76,9 +133,17 @@ test('deleted remotely → delete local', () => {
76
133
  { path: 'h.org', version: 2, deleted: true, updatedAt: '' },
77
134
  ];
78
135
  const stateData = {
79
- files: { 'h.org': { mtime: 1000, size: 100, version: 1, status: 'synced' } },
136
+ files: {
137
+ 'h.org': { mtime: 1000, size: 100, version: 1, status: 'synced' },
138
+ },
80
139
  };
81
- const plan = createPlan({ localFiles, deletedLocally: [], remoteFiles, stateData, serverTime });
140
+ const plan = createPlan({
141
+ localFiles,
142
+ deletedLocally: [],
143
+ remoteFiles,
144
+ stateData,
145
+ serverTime,
146
+ });
82
147
  expect(plan.toDeleteLocal).toContain('h.org');
83
148
  });
84
149
  test('deleted locally but modified remotely → download', () => {
@@ -87,30 +152,183 @@ test('deleted locally but modified remotely → download', () => {
87
152
  { path: 'i.org', version: 2, deleted: false, updatedAt: '' },
88
153
  ];
89
154
  const stateData = {
90
- files: { 'i.org': { mtime: 1000, size: 100, version: 1, status: 'synced' } },
155
+ files: {
156
+ 'i.org': { mtime: 1000, size: 100, version: 1, status: 'synced' },
157
+ },
91
158
  };
92
- const plan = createPlan({ localFiles: [], deletedLocally, remoteFiles, stateData, serverTime });
159
+ const plan = createPlan({
160
+ localFiles: [],
161
+ deletedLocally,
162
+ remoteFiles,
163
+ stateData,
164
+ serverTime,
165
+ });
93
166
  expect(plan.toDownload).toHaveLength(1);
94
167
  expect(plan.toDeleteRemote).toHaveLength(0);
95
168
  });
96
169
  test('deleted remotely but modified locally → upload (local changes win)', () => {
97
170
  const localFiles = [{ path: 'j.org', mtime: 3000, size: 100 }];
98
171
  const remoteFiles = [
99
- { path: 'j.org', version: 2, deleted: true, updatedAt: '1970-01-01T00:00:02.000Z' },
172
+ {
173
+ path: 'j.org',
174
+ version: 2,
175
+ deleted: true,
176
+ updatedAt: '1970-01-01T00:00:02.000Z',
177
+ },
100
178
  ];
101
179
  const stateData = {
102
- files: { 'j.org': { mtime: 1000, size: 100, version: 1, status: 'synced' } },
180
+ files: {
181
+ 'j.org': { mtime: 1000, size: 100, version: 1, status: 'synced' },
182
+ },
103
183
  };
104
- const plan = createPlan({ localFiles, deletedLocally: [], remoteFiles, stateData, serverTime });
184
+ const plan = createPlan({
185
+ localFiles,
186
+ deletedLocally: [],
187
+ remoteFiles,
188
+ stateData,
189
+ serverTime,
190
+ });
105
191
  expect(plan.toUpload).toHaveLength(1);
106
192
  expect(plan.toDeleteLocal).toHaveLength(0);
107
193
  });
108
194
  test('file with error status → retry upload', () => {
109
195
  const localFiles = [{ path: 'k.org', mtime: 1000, size: 100 }];
110
196
  const stateData = {
111
- files: { 'k.org': { mtime: 1000, size: 100, version: 1, status: 'error', errorMessage: 'some error' } },
197
+ files: {
198
+ 'k.org': {
199
+ mtime: 1000,
200
+ size: 100,
201
+ version: 1,
202
+ status: 'error',
203
+ errorMessage: 'some error',
204
+ },
205
+ },
112
206
  };
113
- const plan = createPlan({ localFiles, deletedLocally: [], remoteFiles: [], stateData, serverTime });
207
+ const plan = createPlan({
208
+ localFiles,
209
+ deletedLocally: [],
210
+ remoteFiles: [],
211
+ stateData,
212
+ serverTime,
213
+ });
114
214
  expect(plan.toUpload).toHaveLength(1);
115
215
  expect(plan.toUpload[0].path).toBe('k.org');
116
216
  });
217
+ test('mtime changed but same contentHash → skip upload', () => {
218
+ const localFiles = [
219
+ { path: 'l.org', mtime: 2000, size: 100, contentHash: 'same-hash' },
220
+ ];
221
+ const stateData = {
222
+ files: {
223
+ 'l.org': {
224
+ mtime: 1000,
225
+ size: 100,
226
+ version: 1,
227
+ status: 'synced',
228
+ contentHash: 'same-hash',
229
+ },
230
+ },
231
+ };
232
+ const plan = createPlan({
233
+ localFiles,
234
+ deletedLocally: [],
235
+ remoteFiles: [],
236
+ stateData,
237
+ serverTime,
238
+ });
239
+ expect(plan.toUpload).toHaveLength(0);
240
+ });
241
+ test('mtime same but different contentHash → upload', () => {
242
+ const localFiles = [
243
+ { path: 'm.org', mtime: 1000, size: 100, contentHash: 'new-hash' },
244
+ ];
245
+ const stateData = {
246
+ files: {
247
+ 'm.org': {
248
+ mtime: 1000,
249
+ size: 100,
250
+ version: 1,
251
+ status: 'synced',
252
+ contentHash: 'old-hash',
253
+ },
254
+ },
255
+ };
256
+ const plan = createPlan({
257
+ localFiles,
258
+ deletedLocally: [],
259
+ remoteFiles: [],
260
+ stateData,
261
+ serverTime,
262
+ });
263
+ expect(plan.toUpload).toHaveLength(1);
264
+ expect(plan.toUpload[0].path).toBe('m.org');
265
+ });
266
+ test('both hashes missing → fallback to mtime', () => {
267
+ const localFiles = [{ path: 'n.org', mtime: 2000, size: 100 }];
268
+ const stateData = {
269
+ files: {
270
+ 'n.org': {
271
+ mtime: 1000,
272
+ size: 100,
273
+ version: 1,
274
+ status: 'synced',
275
+ },
276
+ },
277
+ };
278
+ const plan = createPlan({
279
+ localFiles,
280
+ deletedLocally: [],
281
+ remoteFiles: [],
282
+ stateData,
283
+ serverTime,
284
+ });
285
+ expect(plan.toUpload).toHaveLength(1);
286
+ expect(plan.toUpload[0].path).toBe('n.org');
287
+ });
288
+ test('local hash present and stored hash absent → fallback to mtime', () => {
289
+ const localFiles = [
290
+ { path: 'o.org', mtime: 1000, size: 100, contentHash: 'new-hash' },
291
+ ];
292
+ const stateData = {
293
+ files: {
294
+ 'o.org': {
295
+ mtime: 1000,
296
+ size: 100,
297
+ version: 1,
298
+ status: 'synced',
299
+ },
300
+ },
301
+ };
302
+ const plan = createPlan({
303
+ localFiles,
304
+ deletedLocally: [],
305
+ remoteFiles: [],
306
+ stateData,
307
+ serverTime,
308
+ });
309
+ expect(plan.toUpload).toHaveLength(0);
310
+ });
311
+ test('local hash present, stored hash absent, different mtime → upload', () => {
312
+ const localFiles = [
313
+ { path: 'p.org', mtime: 2000, size: 100, contentHash: 'some-hash' },
314
+ ];
315
+ const stateData = {
316
+ files: {
317
+ 'p.org': {
318
+ mtime: 1000,
319
+ size: 100,
320
+ version: 1,
321
+ status: 'synced',
322
+ },
323
+ },
324
+ };
325
+ const plan = createPlan({
326
+ localFiles,
327
+ deletedLocally: [],
328
+ remoteFiles: [],
329
+ stateData,
330
+ serverTime,
331
+ });
332
+ expect(plan.toUpload).toHaveLength(1);
333
+ expect(plan.toUpload[0].path).toBe('p.org');
334
+ });
@@ -2,12 +2,32 @@ import { scanLocalFiles, findDeletedLocally } from "./scan.js";
2
2
  import { fetchRemoteChanges } from "./fetch.js";
3
3
  import { createPlan } from "./plan.js";
4
4
  import { getOldestSyncedAt } from "./utils/oldest-synced-at.js";
5
+ import { hashContent } from "./utils/content-hash.js";
6
+ const enrichLocalFilesWithHash = async (fs, localFiles) => {
7
+ const hashResults = await Promise.allSettled(localFiles.map(async (file) => {
8
+ const content = await fs.readFile(file.path, 'binary');
9
+ return {
10
+ ...file,
11
+ contentHash: await hashContent(content),
12
+ };
13
+ }));
14
+ return hashResults.map((result, index) => result.status === 'fulfilled' ? result.value : localFiles[index]);
15
+ };
5
16
  export async function createSyncPlan(params) {
6
- const { fs, api, state, rootPath, ignorePatterns } = params;
17
+ const { fs, api, state, rootPath, ignorePatterns, enableContentHashCheck } = params;
7
18
  const stateData = await state.get();
8
19
  const localFiles = await scanLocalFiles(fs, rootPath, ignorePatterns);
9
- const deletedLocally = findDeletedLocally(localFiles, stateData);
20
+ const localFilesWithHashes = enableContentHashCheck
21
+ ? await enrichLocalFilesWithHash(fs, localFiles)
22
+ : localFiles;
23
+ const deletedLocally = findDeletedLocally(localFilesWithHashes, stateData);
10
24
  const since = getOldestSyncedAt(stateData);
11
25
  const { files: remoteFiles, serverTime } = await fetchRemoteChanges(api, since);
12
- return createPlan({ localFiles, deletedLocally, remoteFiles, stateData, serverTime });
26
+ return createPlan({
27
+ localFiles: localFilesWithHashes,
28
+ deletedLocally,
29
+ remoteFiles,
30
+ stateData,
31
+ serverTime,
32
+ });
13
33
  }
package/sync/fetch.js CHANGED
@@ -29,4 +29,5 @@ const toRemoteFile = (change) => ({
29
29
  version: change.version,
30
30
  deleted: change.deleted,
31
31
  updatedAt: change.updatedAt,
32
+ contentHash: change.contentHash,
32
33
  });
package/sync/index.d.ts CHANGED
@@ -5,6 +5,7 @@ export { scanLocalFiles, findDeletedLocally } from './scan.js';
5
5
  export { fetchRemoteChanges } from './fetch.js';
6
6
  export { recoverState } from './recovery.js';
7
7
  export { getOldestSyncedAt } from './utils/oldest-synced-at.js';
8
+ export { hashContent, hashBytes } from './utils/content-hash.js';
8
9
  export { processUpload, processDownload, processDeleteLocal, processDeleteRemote, handleConflict, generateConflictPath, hasConflict, } from './operations/index.js';
9
10
  export { SyncOperationType } from './types.js';
10
11
  export type { SyncState, SyncStateData, SyncedFile, SyncStatus, LocalFile, RemoteFile, UploadResult, SyncPlan, SyncTask, SyncExecutor, SyncContext, CreateSyncPlanParams, } from './types.js';
package/sync/index.js CHANGED
@@ -5,5 +5,6 @@ export { scanLocalFiles, findDeletedLocally } from "./scan.js";
5
5
  export { fetchRemoteChanges } from "./fetch.js";
6
6
  export { recoverState } from "./recovery.js";
7
7
  export { getOldestSyncedAt } from "./utils/oldest-synced-at.js";
8
+ export { hashContent, hashBytes } from "./utils/content-hash.js";
8
9
  export { processUpload, processDownload, processDeleteLocal, processDeleteRemote, handleConflict, generateConflictPath, hasConflict, } from "./operations/index.js";
9
10
  export { SyncOperationType } from "./types.js";
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,62 @@
1
+ import { expect, test, vi } from 'vitest';
2
+ import { AxiosError } from 'axios';
3
+ import { ErrorFileNotFound } from "../../../models/file-system.js";
4
+ import { handleConflict } from "../conflict.js";
5
+ import { createContext, SHA256_OF_ABC } from "./fixtures.js";
6
+ test('handleConflict stores computed contentHash after server download', async () => {
7
+ const fs = {
8
+ copyFile: vi.fn(async () => undefined),
9
+ fileInfo: vi.fn(async () => ({ mtime: 100, size: 3 })),
10
+ readFile: vi.fn(async () => new TextEncoder().encode('abc')),
11
+ };
12
+ const executor = {
13
+ download: vi.fn(async () => undefined),
14
+ };
15
+ const ctx = createContext({ fs, executor, deviceName: 'dev' });
16
+ await handleConflict('/c.org', { status: 'conflict', serverVersion: 7 }, ctx);
17
+ const stored = await ctx.state.getFile('/c.org');
18
+ expect(stored?.status).toBe('synced');
19
+ expect(stored?.version).toBe(7);
20
+ expect(stored?.contentHash).toBe(SHA256_OF_ABC);
21
+ expect(stored?.conflictPath).toContain('.sync-conflict-');
22
+ });
23
+ test('handleConflict removes local file and state when server version is missing', async () => {
24
+ const fs = {
25
+ copyFile: vi.fn(async () => undefined),
26
+ deleteFile: vi.fn(async () => undefined),
27
+ };
28
+ const notFoundResponse = {
29
+ status: 404,
30
+ statusText: 'Not Found',
31
+ headers: {},
32
+ config: {},
33
+ data: {},
34
+ };
35
+ const executor = {
36
+ download: vi.fn(async () => {
37
+ throw new AxiosError('Not Found', 'ERR_BAD_REQUEST', undefined, undefined, notFoundResponse);
38
+ }),
39
+ };
40
+ const ctx = createContext({ fs, executor, deviceName: 'dev' });
41
+ await handleConflict('/missing.org', { status: 'conflict', serverVersion: 7 }, ctx);
42
+ const stored = await ctx.state.getFile('/missing.org');
43
+ expect(fs.deleteFile).toHaveBeenCalledWith('/missing.org');
44
+ expect(stored).toBeNull();
45
+ });
46
+ test('handleConflict stores undefined contentHash when read fails with ErrorFileNotFound', async () => {
47
+ const fs = {
48
+ copyFile: vi.fn(async () => undefined),
49
+ fileInfo: vi.fn(async () => ({ mtime: 100, size: 3 })),
50
+ readFile: vi.fn(async () => {
51
+ throw new ErrorFileNotFound('/c.org');
52
+ }),
53
+ };
54
+ const executor = {
55
+ download: vi.fn(async () => undefined),
56
+ };
57
+ const ctx = createContext({ fs, executor, deviceName: 'dev' });
58
+ await handleConflict('/c.org', { status: 'conflict', serverVersion: 7 }, ctx);
59
+ const stored = await ctx.state.getFile('/c.org');
60
+ expect(stored?.status).toBe('synced');
61
+ expect(stored?.contentHash).toBeUndefined();
62
+ });
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,37 @@
1
+ import { expect, test, vi } from 'vitest';
2
+ import { ErrorFileNotFound } from "../../../models/file-system.js";
3
+ import { resolveContentHash } from "../content-hash.js";
4
+ import { SHA256_OF_ABC } from "./fixtures.js";
5
+ test('resolveContentHash returns preferredHash without reading file', async () => {
6
+ const fs = {
7
+ readFile: vi.fn(async () => new TextEncoder().encode('abc')),
8
+ };
9
+ const result = await resolveContentHash(fs, '/a.org', 'remote-hash');
10
+ expect(result).toBe('remote-hash');
11
+ expect(fs.readFile).toHaveBeenCalledTimes(0);
12
+ });
13
+ test('resolveContentHash computes hash when preferredHash is absent', async () => {
14
+ const fs = {
15
+ readFile: vi.fn(async () => new TextEncoder().encode('abc')),
16
+ };
17
+ const result = await resolveContentHash(fs, '/a.org');
18
+ expect(result).toBe(SHA256_OF_ABC);
19
+ expect(fs.readFile).toHaveBeenCalledTimes(1);
20
+ });
21
+ test('resolveContentHash returns undefined for ErrorFileNotFound', async () => {
22
+ const fs = {
23
+ readFile: vi.fn(async () => {
24
+ throw new ErrorFileNotFound('/missing.org');
25
+ }),
26
+ };
27
+ const result = await resolveContentHash(fs, '/missing.org');
28
+ expect(result).toBeUndefined();
29
+ });
30
+ test('resolveContentHash rethrows unexpected errors', async () => {
31
+ const fs = {
32
+ readFile: vi.fn(async () => {
33
+ throw new Error('permission denied');
34
+ }),
35
+ };
36
+ await expect(resolveContentHash(fs, '/a.org')).rejects.toThrow('permission denied');
37
+ });
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,69 @@
1
+ import { expect, test, vi } from 'vitest';
2
+ import { processDownload } from "../download.js";
3
+ import { createContext, SHA256_OF_ABC } from "./fixtures.js";
4
+ test('processDownload stores remote contentHash when provided', async () => {
5
+ const fs = {
6
+ fileInfo: vi.fn(async () => ({ mtime: 42, size: 10 })),
7
+ readFile: vi.fn(async () => {
8
+ throw new Error('must not be called');
9
+ }),
10
+ };
11
+ const executor = {
12
+ download: vi.fn(async () => undefined),
13
+ };
14
+ const ctx = createContext({ fs, executor });
15
+ const file = {
16
+ path: '/a.org',
17
+ version: 2,
18
+ deleted: false,
19
+ updatedAt: '2024-01-01T00:00:00Z',
20
+ contentHash: 'remote-hash',
21
+ };
22
+ await processDownload(file, ctx);
23
+ const stored = await ctx.state.getFile('/a.org');
24
+ expect(stored?.contentHash).toBe('remote-hash');
25
+ expect(fs.readFile).toHaveBeenCalledTimes(0);
26
+ });
27
+ test('processDownload computes contentHash when remote hash is absent', async () => {
28
+ const fs = {
29
+ fileInfo: vi.fn(async () => ({ mtime: 42, size: 10 })),
30
+ readFile: vi.fn(async () => new TextEncoder().encode('abc')),
31
+ };
32
+ const executor = {
33
+ download: vi.fn(async () => undefined),
34
+ };
35
+ const ctx = createContext({ fs, executor });
36
+ const file = {
37
+ path: '/b.org',
38
+ version: 3,
39
+ deleted: false,
40
+ updatedAt: '2024-01-01T00:00:00Z',
41
+ };
42
+ await processDownload(file, ctx);
43
+ const stored = await ctx.state.getFile('/b.org');
44
+ expect(stored?.contentHash).toBe(SHA256_OF_ABC);
45
+ expect(fs.readFile).toHaveBeenCalledTimes(1);
46
+ });
47
+ test('processDownload stores error status and rethrows when download fails', async () => {
48
+ const fs = {
49
+ fileInfo: vi.fn(async () => ({ mtime: 42, size: 10 })),
50
+ readFile: vi.fn(async () => new TextEncoder().encode('abc')),
51
+ };
52
+ const downloadError = new Error('network');
53
+ const executor = {
54
+ download: vi.fn(async () => {
55
+ throw downloadError;
56
+ }),
57
+ };
58
+ const ctx = createContext({ fs, executor });
59
+ const file = {
60
+ path: '/d.org',
61
+ version: 3,
62
+ deleted: false,
63
+ updatedAt: '2024-01-01T00:00:00Z',
64
+ };
65
+ await expect(processDownload(file, ctx)).rejects.toThrow('network');
66
+ const stored = await ctx.state.getFile('/d.org');
67
+ expect(stored?.status).toBe('error');
68
+ expect(stored?.errorMessage).toContain('network');
69
+ });
@@ -0,0 +1,10 @@
1
+ import type { FileSystem } from '../../../models/file-system.js';
2
+ import type { SyncContext, SyncExecutor } from '../../types.js';
3
+ export declare const SHA256_OF_ABC = "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad";
4
+ interface CreateContextParams {
5
+ fs: FileSystem;
6
+ executor: SyncExecutor;
7
+ deviceName?: string;
8
+ }
9
+ export declare const createContext: ({ fs, executor, deviceName, }: CreateContextParams) => SyncContext;
10
+ export {};
@@ -0,0 +1,9 @@
1
+ import { createMemorySyncState } from "../../memory-state.js";
2
+ export const SHA256_OF_ABC = 'ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad';
3
+ export const createContext = ({ fs, executor, deviceName, }) => ({
4
+ fs,
5
+ executor,
6
+ state: createMemorySyncState(),
7
+ serverTime: '2024-01-01T00:00:00Z',
8
+ deviceName,
9
+ });
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,10 @@
1
+ import { expect, test } from 'vitest';
2
+ import { createSyncedFile } from "../synced-file.js";
3
+ test('createSyncedFile preserves contentHash from metadata', () => {
4
+ const result = createSyncedFile({ mtime: 1, size: 2, contentHash: 'abc' }, { status: 'synced' });
5
+ expect(result.contentHash).toBe('abc');
6
+ });
7
+ test('createSyncedFile keeps contentHash undefined when absent', () => {
8
+ const result = createSyncedFile({ mtime: 1, size: 2 }, { status: 'synced' });
9
+ expect(result.contentHash).toBeUndefined();
10
+ });
@@ -1,4 +1,6 @@
1
1
  import { createSyncedFile } from "./synced-file.js";
2
+ import { resolveContentHash } from "./content-hash.js";
3
+ import axios from 'axios';
2
4
  export const generateConflictPath = (path, deviceName = 'device') => {
3
5
  const lastDot = path.lastIndexOf('.');
4
6
  const ext = lastDot >= 0 ? path.substring(lastDot) : '';
@@ -15,10 +17,7 @@ const copyFile = async (fs, src, dest) => {
15
17
  await fs.writeFile(dest, content);
16
18
  };
17
19
  const isNotFoundError = (error) => {
18
- if (typeof error !== 'object' || error === null)
19
- return false;
20
- const axiosError = error;
21
- return axiosError.response?.status === 404;
20
+ return axios.isAxiosError(error) && error.response?.status === 404;
22
21
  };
23
22
  const tryDownloadServerVersion = async (path, serverVersion, ctx) => {
24
23
  try {
@@ -46,7 +45,12 @@ export const handleConflict = async (path, conflictResult, ctx) => {
46
45
  return;
47
46
  }
48
47
  const fileInfo = await ctx.fs.fileInfo(path);
49
- const meta = { mtime: fileInfo?.mtime ?? 0, size: fileInfo?.size ?? 0 };
48
+ const contentHash = await resolveContentHash(ctx.fs, path);
49
+ const meta = {
50
+ mtime: fileInfo?.mtime ?? 0,
51
+ size: fileInfo?.size ?? 0,
52
+ contentHash,
53
+ };
50
54
  await ctx.state.setFile(path, createSyncedFile(meta, {
51
55
  version: conflictResult.serverVersion,
52
56
  status: 'synced',
@@ -0,0 +1,2 @@
1
+ import { type FileSystem } from '../../models/file-system.js';
2
+ export declare const resolveContentHash: (fs: FileSystem, path: string, preferredHash?: string) => Promise<string | undefined>;
@@ -0,0 +1,17 @@
1
+ import { ErrorFileNotFound } from "../../models/file-system.js";
2
+ import { hashContent } from "../utils/content-hash.js";
3
+ export const resolveContentHash = async (fs, path, preferredHash) => {
4
+ if (preferredHash !== undefined) {
5
+ return preferredHash;
6
+ }
7
+ try {
8
+ const content = await fs.readFile(path, 'binary');
9
+ return hashContent(content);
10
+ }
11
+ catch (error) {
12
+ if (error instanceof ErrorFileNotFound) {
13
+ return undefined;
14
+ }
15
+ throw error;
16
+ }
17
+ };
@@ -1,20 +1,48 @@
1
1
  import { createSyncedFile } from "./synced-file.js";
2
+ import { resolveContentHash } from "./content-hash.js";
2
3
  const storedMeta = (stored) => ({
3
4
  mtime: stored?.mtime ?? 0,
4
5
  size: stored?.size ?? 0,
5
6
  });
7
+ const markDownloading = async (file, stored, ctx) => {
8
+ await ctx.state.setFile(file.path, createSyncedFile(storedMeta(stored), {
9
+ version: stored?.version,
10
+ status: 'downloading',
11
+ }));
12
+ };
13
+ const resolveDownloadedMeta = async (file, ctx) => {
14
+ const fileInfo = await ctx.fs.fileInfo(file.path);
15
+ const contentHash = await resolveContentHash(ctx.fs, file.path, file.contentHash);
16
+ return {
17
+ mtime: fileInfo?.mtime ?? 0,
18
+ size: fileInfo?.size ?? 0,
19
+ contentHash,
20
+ };
21
+ };
22
+ const markSynced = async (file, meta, ctx) => {
23
+ await ctx.state.setFile(file.path, createSyncedFile(meta, {
24
+ version: file.version,
25
+ status: 'synced',
26
+ syncedAt: ctx.serverTime,
27
+ }));
28
+ };
29
+ const markError = async (file, stored, error, ctx) => {
30
+ await ctx.state.setFile(file.path, createSyncedFile(storedMeta(stored), {
31
+ version: stored?.version,
32
+ status: 'error',
33
+ errorMessage: String(error),
34
+ }));
35
+ };
6
36
  export const processDownload = async (file, ctx) => {
7
37
  const stored = await ctx.state.getFile(file.path);
8
- const meta = storedMeta(stored);
9
- await ctx.state.setFile(file.path, createSyncedFile(meta, { version: stored?.version, status: 'downloading' }));
38
+ await markDownloading(file, stored, ctx);
10
39
  try {
11
40
  await ctx.executor.download(file);
12
- const fileInfo = await ctx.fs.fileInfo(file.path);
13
- const downloadedMeta = { mtime: fileInfo?.mtime ?? 0, size: fileInfo?.size ?? 0 };
14
- await ctx.state.setFile(file.path, createSyncedFile(downloadedMeta, { version: file.version, status: 'synced', syncedAt: ctx.serverTime }));
41
+ const downloadedMeta = await resolveDownloadedMeta(file, ctx);
42
+ await markSynced(file, downloadedMeta, ctx);
15
43
  }
16
44
  catch (error) {
17
- await ctx.state.setFile(file.path, createSyncedFile(meta, { version: stored?.version, status: 'error', errorMessage: String(error) }));
45
+ await markError(file, stored, error, ctx);
18
46
  throw error;
19
47
  }
20
48
  };
@@ -2,6 +2,7 @@ import type { SyncedFile, SyncStatus } from '../types.js';
2
2
  interface FileMeta {
3
3
  mtime: number;
4
4
  size: number;
5
+ contentHash?: string;
5
6
  }
6
7
  export interface SyncedFileOptions {
7
8
  version?: number;
@@ -1,5 +1,6 @@
1
1
  export const createSyncedFile = (meta, options) => ({
2
2
  mtime: meta.mtime,
3
3
  size: meta.size,
4
+ contentHash: meta.contentHash,
4
5
  ...options,
5
6
  });
package/sync/plan.js CHANGED
@@ -1,25 +1,43 @@
1
1
  import { SyncOperationType } from "./types.js";
2
2
  export const createPlan = ({ localFiles, deletedLocally, remoteFiles, stateData, serverTime, }) => {
3
3
  const index = buildFileIndex(localFiles, deletedLocally, remoteFiles);
4
- const localActions = localFiles.map(local => resolveLocalFile(local, index.remoteByPath.get(local.path), stateData.files[local.path]));
5
- const deletedActions = deletedLocally.map(path => resolveDeletedLocally(path, index.remoteByPath.get(path), stateData.files[path]));
4
+ const localActions = localFiles.map((local) => resolveLocalFile(local, index.remoteByPath.get(local.path), stateData.files[local.path]));
5
+ const deletedActions = deletedLocally.map((path) => resolveDeletedLocally(path, index.remoteByPath.get(path), stateData.files[path]));
6
6
  const remoteActions = remoteFiles
7
- .filter(remote => isNewRemote(remote, index))
7
+ .filter((remote) => isNewRemote(remote, index))
8
8
  .map(resolveNewRemote);
9
9
  return buildPlanFromActions([...localActions, ...deletedActions, ...remoteActions], serverTime);
10
10
  };
11
11
  const buildFileIndex = (localFiles, deletedLocally, remoteFiles) => ({
12
- remoteByPath: new Map(remoteFiles.map(f => [f.path, f])),
13
- localByPath: new Map(localFiles.map(f => [f.path, f])),
12
+ remoteByPath: new Map(remoteFiles.map((f) => [f.path, f])),
13
+ localByPath: new Map(localFiles.map((f) => [f.path, f])),
14
14
  deletedSet: new Set(deletedLocally),
15
15
  });
16
16
  const isNewRemote = (remote, index) => !index.localByPath.has(remote.path) && !index.deletedSet.has(remote.path);
17
- const buildPlanFromActions = (actions, serverTime) => actions.reduce((plan, action) => applyAction(plan, action), { toUpload: [], toDownload: [], toDeleteLocal: [], toDeleteRemote: [], serverTime });
17
+ const buildPlanFromActions = (actions, serverTime) => actions.reduce((plan, action) => applyAction(plan, action), {
18
+ toUpload: [],
19
+ toDownload: [],
20
+ toDeleteLocal: [],
21
+ toDeleteRemote: [],
22
+ serverTime,
23
+ });
18
24
  const actionHandlers = {
19
- [SyncOperationType.Upload]: (plan, action) => ({ ...plan, toUpload: [...plan.toUpload, action.file] }),
20
- [SyncOperationType.Download]: (plan, action) => ({ ...plan, toDownload: [...plan.toDownload, action.file] }),
21
- [SyncOperationType.DeleteLocal]: (plan, action) => ({ ...plan, toDeleteLocal: [...plan.toDeleteLocal, action.path] }),
22
- [SyncOperationType.DeleteRemote]: (plan, action) => ({ ...plan, toDeleteRemote: [...plan.toDeleteRemote, action.path] }),
25
+ [SyncOperationType.Upload]: (plan, action) => ({
26
+ ...plan,
27
+ toUpload: [...plan.toUpload, action.file],
28
+ }),
29
+ [SyncOperationType.Download]: (plan, action) => ({
30
+ ...plan,
31
+ toDownload: [...plan.toDownload, action.file],
32
+ }),
33
+ [SyncOperationType.DeleteLocal]: (plan, action) => ({
34
+ ...plan,
35
+ toDeleteLocal: [...plan.toDeleteLocal, action.path],
36
+ }),
37
+ [SyncOperationType.DeleteRemote]: (plan, action) => ({
38
+ ...plan,
39
+ toDeleteRemote: [...plan.toDeleteRemote, action.path],
40
+ }),
23
41
  none: (plan) => plan,
24
42
  };
25
43
  const applyAction = (plan, action) => actionHandlers[action.type](plan, action);
@@ -45,13 +63,31 @@ const resolveDeletedLocally = (path, remote, stored) => {
45
63
  if (!remote || remote.deleted) {
46
64
  return deleteRemote(path);
47
65
  }
48
- return isRemoteChanged(remote, stored) ? download(remote) : deleteRemote(path);
66
+ return isRemoteChanged(remote, stored)
67
+ ? download(remote)
68
+ : deleteRemote(path);
49
69
  };
50
70
  const resolveNewRemote = (remote) => remote.deleted ? none() : download(remote);
51
- const isLocalChanged = (local, stored) => !stored || local.mtime !== stored.mtime || stored.status === 'error';
71
+ const isLocalChanged = (local, stored) => !stored ||
72
+ stored.status === 'error' ||
73
+ (local.contentHash && stored.contentHash
74
+ ? local.contentHash !== stored.contentHash
75
+ : local.mtime !== stored.mtime);
52
76
  const isRemoteChanged = (remote, stored) => !stored || remote.version > (stored.version ?? 0);
53
- const upload = (file) => ({ type: SyncOperationType.Upload, file });
54
- const download = (file) => ({ type: SyncOperationType.Download, file });
55
- const deleteLocal = (path) => ({ type: SyncOperationType.DeleteLocal, path });
56
- const deleteRemote = (path) => ({ type: SyncOperationType.DeleteRemote, path });
77
+ const upload = (file) => ({
78
+ type: SyncOperationType.Upload,
79
+ file,
80
+ });
81
+ const download = (file) => ({
82
+ type: SyncOperationType.Download,
83
+ file,
84
+ });
85
+ const deleteLocal = (path) => ({
86
+ type: SyncOperationType.DeleteLocal,
87
+ path,
88
+ });
89
+ const deleteRemote = (path) => ({
90
+ type: SyncOperationType.DeleteRemote,
91
+ path,
92
+ });
57
93
  const none = () => ({ type: 'none' });
package/sync/types.d.ts CHANGED
@@ -11,6 +11,7 @@ export declare enum SyncOperationType {
11
11
  export interface SyncedFile {
12
12
  mtime: number;
13
13
  size: number;
14
+ contentHash?: string;
14
15
  version?: number;
15
16
  status: SyncStatus;
16
17
  syncedAt?: string;
@@ -31,8 +32,9 @@ export interface LocalFile {
31
32
  path: string;
32
33
  mtime: number;
33
34
  size: number;
35
+ contentHash?: string;
34
36
  }
35
- export type RemoteFile = Pick<FileChange, 'path' | 'version' | 'deleted' | 'updatedAt'>;
37
+ export type RemoteFile = Pick<FileChange, 'path' | 'version' | 'deleted' | 'updatedAt' | 'contentHash'>;
36
38
  export type UploadResult = {
37
39
  status: 'ok';
38
40
  version: number;
@@ -64,6 +66,7 @@ export interface CreateSyncPlanParams {
64
66
  state: SyncState;
65
67
  rootPath: string;
66
68
  ignorePatterns?: string[];
69
+ enableContentHashCheck?: boolean;
67
70
  }
68
71
  export interface SyncContext {
69
72
  executor: SyncExecutor;
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,38 @@
1
+ import { expect, test } from 'vitest';
2
+ import { hashBytes, hashContent } from "../content-hash.js";
3
+ const toBytes = (value) => new TextEncoder().encode(value);
4
+ test('hashContent returns deterministic SHA-256 digest', async () => {
5
+ const digest = await hashContent(toBytes('abc'));
6
+ expect(digest).toMatchInlineSnapshot('"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"');
7
+ expect(digest).toMatch(/^[0-9a-f]{64}$/);
8
+ });
9
+ test('hashContent returns the same digest for the same content', async () => {
10
+ const content = toBytes('orgnote');
11
+ const first = await hashContent(content);
12
+ const second = await hashContent(content);
13
+ expect(first).toBe(second);
14
+ });
15
+ test('hashContent returns different digests for different content', async () => {
16
+ const first = await hashContent(toBytes('orgnote-a'));
17
+ const second = await hashContent(toBytes('orgnote-b'));
18
+ expect(first).not.toBe(second);
19
+ });
20
+ test('hashBytes is an alias of hashContent', async () => {
21
+ const content = toBytes('alias-check');
22
+ const first = await hashContent(content);
23
+ const second = await hashBytes(content);
24
+ expect(first).toBe(second);
25
+ });
26
+ test('hashContent handles long text deterministically', async () => {
27
+ const content = toBytes('orgnote-long-text-'.repeat(2000));
28
+ const first = await hashContent(content);
29
+ const second = await hashContent(content);
30
+ expect(first).toBe(second);
31
+ expect(first).toMatch(/^[0-9a-f]{64}$/);
32
+ });
33
+ test('hashContent handles binary bytes', async () => {
34
+ const bytes = new Uint8Array([0, 255, 1, 128, 64, 32, 16, 8, 4, 2, 1, 0]);
35
+ const digest = await hashContent(bytes);
36
+ expect(digest).toMatchInlineSnapshot('"c7e3c80490776a8af2e44e522fd67583434ab733dfa0488f535ebe6db5bd7f34"');
37
+ expect(digest).toMatch(/^[0-9a-f]{64}$/);
38
+ });
@@ -0,0 +1,2 @@
1
+ export declare const hashContent: (content: Uint8Array) => Promise<string>;
2
+ export declare const hashBytes: (content: Uint8Array) => Promise<string>;
@@ -0,0 +1,23 @@
1
+ const bytesToHex = (bytes) => {
2
+ let hex = '';
3
+ for (const byte of bytes) {
4
+ hex += byte.toString(16).padStart(2, '0');
5
+ }
6
+ return hex;
7
+ };
8
+ let subtleCrypto = null;
9
+ const getSubtleCrypto = () => {
10
+ if (subtleCrypto) {
11
+ return subtleCrypto;
12
+ }
13
+ if (!globalThis.crypto?.subtle) {
14
+ throw new Error('Web Crypto API is not available');
15
+ }
16
+ subtleCrypto = globalThis.crypto.subtle;
17
+ return subtleCrypto;
18
+ };
19
+ export const hashContent = async (content) => {
20
+ const digest = await getSubtleCrypto().digest('SHA-256', content);
21
+ return bytesToHex(new Uint8Array(digest));
22
+ };
23
+ export const hashBytes = hashContent;