@luzzle/core 0.0.37 → 0.0.38
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -36
- package/dist/src/database/migrations/2026-02-11T23:03:45Z-clear-pieces-cache.d.ts +3 -0
- package/dist/src/database/migrations/2026-02-11T23:03:45Z-clear-pieces-cache.js +8 -0
- package/dist/src/database/migrations/2026-02-11T23:03:45Z-clear-pieces-cache.js.map +1 -0
- package/dist/src/lib/ajv.test.js +1 -1
- package/dist/src/lib/ajv.test.js.map +1 -1
- package/dist/src/llm/google.d.ts +1 -1
- package/dist/src/pieces/Piece.d.ts +2 -2
- package/dist/src/pieces/Piece.fixtures.d.ts +21 -27
- package/dist/src/pieces/Piece.fixtures.js +34 -21
- package/dist/src/pieces/Piece.fixtures.js.map +1 -1
- package/dist/src/pieces/Piece.js +45 -42
- package/dist/src/pieces/Piece.js.map +1 -1
- package/dist/src/pieces/Piece.test.js +487 -739
- package/dist/src/pieces/Piece.test.js.map +1 -1
- package/dist/src/pieces/item.js +29 -17
- package/dist/src/pieces/item.js.map +1 -1
- package/dist/src/pieces/item.test.js +137 -115
- package/dist/src/pieces/item.test.js.map +1 -1
- package/dist/src/pieces/manager.test.js +1 -1
- package/dist/src/pieces/manager.test.js.map +1 -1
- package/dist/src/pieces/utils/frontmatter.d.ts +42 -10
- package/dist/src/pieces/utils/frontmatter.js +112 -44
- package/dist/src/pieces/utils/frontmatter.js.map +1 -1
- package/dist/src/pieces/utils/frontmatter.path.d.ts +5 -0
- package/dist/src/pieces/utils/frontmatter.path.js +92 -0
- package/dist/src/pieces/utils/frontmatter.path.js.map +1 -0
- package/dist/src/pieces/utils/frontmatter.path.test.d.ts +1 -0
- package/dist/src/pieces/utils/frontmatter.path.test.js +172 -0
- package/dist/src/pieces/utils/frontmatter.path.test.js.map +1 -0
- package/dist/src/pieces/utils/frontmatter.test.js +200 -169
- package/dist/src/pieces/utils/frontmatter.test.js.map +1 -1
- package/dist/src/pieces/utils/piece.js +3 -2
- package/dist/src/pieces/utils/piece.js.map +1 -1
- package/dist/src/pieces/utils/piece.test.js +1 -1
- package/dist/src/pieces/utils/piece.test.js.map +1 -1
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +1 -1
- package/dist/src/pieces/utils/piece.fixtures.d.ts +0 -19
- package/dist/src/pieces/utils/piece.fixtures.js +0 -39
- package/dist/src/pieces/utils/piece.fixtures.js.map +0 -1
|
@@ -1,149 +1,91 @@
|
|
|
1
1
|
import { describe, expect, test, vi, afterEach } from 'vitest';
|
|
2
|
-
import { makeMarkdownSample, makePieceItemSelectable, makePieceMock,
|
|
2
|
+
import { makeMarkdownSample, makePieceItemSelectable, makePieceMock, makeSchema, makeStorage, } from './Piece.fixtures.js';
|
|
3
3
|
import { mockKysely } from '../database/database.mock.js';
|
|
4
|
-
import
|
|
5
|
-
import
|
|
6
|
-
import
|
|
7
|
-
import
|
|
8
|
-
import compile from '../lib/ajv.js';
|
|
9
|
-
import { makePieceItemInsertable, makePieceItemUpdatable, validatePieceItem, getValidatePieceItemErrors, } from './item.js';
|
|
10
|
-
import { selectItems, deleteItem, selectItem, insertItem, updateItem } from './items.js';
|
|
11
|
-
import { calculateHashFromFile, makePieceValue, makePieceAttachment } from './utils/piece.js';
|
|
12
|
-
import { makeCache } from './cache.fixtures.js';
|
|
4
|
+
import * as cache from './cache.js';
|
|
5
|
+
import * as item from './item.js';
|
|
6
|
+
import * as items from './items.js';
|
|
7
|
+
import * as pieceUtils from './utils/piece.js';
|
|
13
8
|
import slugify from '@sindresorhus/slugify';
|
|
14
9
|
import { PassThrough } from 'stream';
|
|
15
10
|
import { cpus } from 'os';
|
|
11
|
+
import { makeCache } from './cache.fixtures.js';
|
|
12
|
+
// Only mock external boundaries and environment
|
|
16
13
|
vi.mock('./cache.js');
|
|
17
|
-
vi.mock('os');
|
|
18
|
-
vi.mock('@sindresorhus/slugify');
|
|
19
|
-
vi.mock('./utils/frontmatter.js');
|
|
20
|
-
vi.mock('../lib/ajv.js');
|
|
21
|
-
vi.mock('./utils/markdown.js');
|
|
22
|
-
vi.mock('../lib/markdown.js');
|
|
23
14
|
vi.mock('./item.js');
|
|
24
15
|
vi.mock('./items.js');
|
|
25
16
|
vi.mock('./utils/piece.js');
|
|
17
|
+
vi.mock('os');
|
|
18
|
+
vi.mock('@sindresorhus/slugify');
|
|
26
19
|
const mocks = {
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
removeCache: vi.mocked(removeCache),
|
|
32
|
-
updateCache: vi.mocked(updateCache),
|
|
33
|
-
getCache: vi.mocked(getCache),
|
|
34
|
-
compile: vi.mocked(compile),
|
|
35
|
-
getPieceSchemaFields: vi.mocked(getPieceFrontmatterSchemaFields),
|
|
36
|
-
databaseValueToFrontmatterValue: vi.mocked(databaseValueToPieceFrontmatterValue),
|
|
37
|
-
calculateHashFromFile: vi.mocked(calculateHashFromFile),
|
|
38
|
-
makeInsertable: vi.mocked(makePieceItemInsertable),
|
|
39
|
-
makeUpdatable: vi.mocked(makePieceItemUpdatable),
|
|
40
|
-
initializePieceFrontMatter: vi.mocked(initializePieceFrontMatter),
|
|
41
|
-
selectItems: vi.mocked(selectItems),
|
|
42
|
-
deleteItem: vi.mocked(deleteItem),
|
|
43
|
-
insertItem: vi.mocked(insertItem),
|
|
44
|
-
updateItem: vi.mocked(updateItem),
|
|
45
|
-
selectItem: vi.mocked(selectItem),
|
|
46
|
-
validatePieceItem: vi.mocked(validatePieceItem),
|
|
47
|
-
getValidatePieceItemErrors: vi.mocked(getValidatePieceItemErrors),
|
|
20
|
+
cache: vi.mocked(cache),
|
|
21
|
+
item: vi.mocked(item),
|
|
22
|
+
items: vi.mocked(items),
|
|
23
|
+
pieceUtils: vi.mocked(pieceUtils),
|
|
48
24
|
slugify: vi.mocked(slugify),
|
|
49
|
-
makePieceValue: vi.mocked(makePieceValue),
|
|
50
|
-
makePieceAttachment: vi.mocked(makePieceAttachment),
|
|
51
25
|
cpus: vi.mocked(cpus),
|
|
52
26
|
};
|
|
53
|
-
const spies = {};
|
|
54
27
|
describe('pieces/Piece.ts', () => {
|
|
55
28
|
afterEach(() => {
|
|
56
|
-
|
|
57
|
-
mock.mockReset();
|
|
58
|
-
});
|
|
59
|
-
Object.keys(spies).forEach((key) => {
|
|
60
|
-
spies[key].mockRestore();
|
|
61
|
-
delete spies[key];
|
|
62
|
-
});
|
|
29
|
+
vi.clearAllMocks();
|
|
63
30
|
});
|
|
64
|
-
test('constructor throws', () => {
|
|
65
|
-
const schema = makeSchema('not-title');
|
|
31
|
+
test('constructor throws on name mismatch', () => {
|
|
66
32
|
const PieceType = makePieceMock();
|
|
67
|
-
const
|
|
68
|
-
const storage = makeStorage(
|
|
69
|
-
|
|
70
|
-
mocks.makePieceMarkdown.mockReturnValueOnce(markdown);
|
|
71
|
-
expect(() => new PieceType('books', storage, schema)).toThrow();
|
|
33
|
+
const schema = makeSchema('table');
|
|
34
|
+
const storage = makeStorage();
|
|
35
|
+
expect(() => new PieceType('mismatch', storage, schema)).toThrow('does not match the schema title');
|
|
72
36
|
});
|
|
73
|
-
test('create', async () => {
|
|
37
|
+
test('create generates a new markdown piece', async () => {
|
|
74
38
|
const PieceType = makePieceMock();
|
|
75
|
-
const
|
|
76
|
-
const
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
const pieceMarkdown = await piece.create(file, title);
|
|
85
|
-
expect(pieceMarkdown).toEqual(markdown);
|
|
86
|
-
});
|
|
87
|
-
test('create throws on existing piece', async () => {
|
|
39
|
+
const storage = makeStorage();
|
|
40
|
+
const piece = new PieceType('table', storage);
|
|
41
|
+
mocks.slugify.mockReturnValue('my-title');
|
|
42
|
+
vi.spyOn(storage, 'exists').mockResolvedValue(false);
|
|
43
|
+
const result = await piece.create('dir', 'My Title');
|
|
44
|
+
expect(result.piece).toBe('table');
|
|
45
|
+
expect(result.frontmatter.title).toBe('title');
|
|
46
|
+
});
|
|
47
|
+
test('create throws if file already exists', async () => {
|
|
88
48
|
const PieceType = makePieceMock();
|
|
89
|
-
const
|
|
90
|
-
const
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
const
|
|
98
|
-
const
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
const
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
expect(
|
|
110
|
-
});
|
|
111
|
-
test('delete fails', async () => {
|
|
112
|
-
const path = 'path/to/slug.md';
|
|
113
|
-
const storage = makeStorage('root');
|
|
114
|
-
const PieceTest = makePieceMock();
|
|
115
|
-
const pieceTest = new PieceTest('books', storage);
|
|
116
|
-
spies.exists = vi.spyOn(storage, 'exists').mockResolvedValueOnce(false);
|
|
117
|
-
spies.delete = vi.spyOn(storage, 'delete').mockResolvedValueOnce();
|
|
118
|
-
const deleting = pieceTest.delete(path);
|
|
119
|
-
expect(deleting).rejects.toThrowError();
|
|
49
|
+
const storage = makeStorage();
|
|
50
|
+
const piece = new PieceType('table', storage);
|
|
51
|
+
mocks.slugify.mockReturnValue('my-title');
|
|
52
|
+
vi.spyOn(storage, 'exists').mockResolvedValue(true);
|
|
53
|
+
await expect(piece.create('dir', 'My Title')).rejects.toThrow('file already exists');
|
|
54
|
+
});
|
|
55
|
+
test('delete removes file if it exists', async () => {
|
|
56
|
+
const storage = makeStorage();
|
|
57
|
+
const PieceType = makePieceMock();
|
|
58
|
+
const piece = new PieceType('table', storage);
|
|
59
|
+
vi.spyOn(storage, 'exists').mockResolvedValue(true);
|
|
60
|
+
vi.spyOn(storage, 'delete').mockResolvedValue(undefined);
|
|
61
|
+
await piece.delete('file.md');
|
|
62
|
+
expect(storage.delete).toHaveBeenCalledWith('file.md');
|
|
63
|
+
});
|
|
64
|
+
test('delete throws if file missing', async () => {
|
|
65
|
+
const storage = makeStorage();
|
|
66
|
+
const PieceType = makePieceMock();
|
|
67
|
+
const piece = new PieceType('table', storage);
|
|
68
|
+
vi.spyOn(storage, 'exists').mockResolvedValue(false);
|
|
69
|
+
await expect(piece.delete('file.md')).rejects.toThrow('does not exist');
|
|
120
70
|
});
|
|
121
71
|
test('get schema', () => {
|
|
122
72
|
const PieceType = makePieceMock();
|
|
123
73
|
const type = 'table';
|
|
124
74
|
const schema = makeSchema(type);
|
|
125
|
-
const markdown = makeMarkdownSample();
|
|
126
75
|
const storage = makeStorage('root');
|
|
127
|
-
mocks.initializePieceFrontMatter.mockReturnValueOnce(markdown.frontmatter);
|
|
128
|
-
mocks.makePieceMarkdown.mockReturnValueOnce(markdown);
|
|
129
76
|
const piece = new PieceType(type, storage, schema);
|
|
130
77
|
expect(piece.schema).toEqual(schema);
|
|
131
78
|
});
|
|
132
|
-
test('isOutdated', async () => {
|
|
133
|
-
const filename = '/path/to/slug.books.md';
|
|
134
|
-
const db = mockKysely().db;
|
|
79
|
+
test('isOutdated returns true if file is newer than cache', async () => {
|
|
135
80
|
const PieceType = makePieceMock();
|
|
136
|
-
const
|
|
137
|
-
const
|
|
138
|
-
const
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
.mockResolvedValueOnce({ last_modified: fileDate });
|
|
145
|
-
const isOutdated = await pieceTest.isOutdated(filename, db);
|
|
146
|
-
expect(isOutdated).toEqual(true);
|
|
81
|
+
const storage = makeStorage();
|
|
82
|
+
const piece = new PieceType('table', storage);
|
|
83
|
+
const db = mockKysely().db;
|
|
84
|
+
mocks.cache.getCache.mockResolvedValue(makeCache({
|
|
85
|
+
date_updated: 1000, date_added: 1000
|
|
86
|
+
}));
|
|
87
|
+
vi.spyOn(storage, 'stat').mockResolvedValue({ last_modified: new Date(2000) });
|
|
88
|
+
expect(await piece.isOutdated('file.md', db)).toBe(true);
|
|
147
89
|
});
|
|
148
90
|
test('isOutdated by date_added', async () => {
|
|
149
91
|
const filename = '/path/to/slug.books.md';
|
|
@@ -151,689 +93,495 @@ describe('pieces/Piece.ts', () => {
|
|
|
151
93
|
const PieceType = makePieceMock();
|
|
152
94
|
const cacheDate = new Date('11-11-2000').getTime();
|
|
153
95
|
const fileDate = new Date('11-11-2001');
|
|
154
|
-
const
|
|
155
|
-
const
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
.mockResolvedValueOnce({ last_modified: fileDate });
|
|
96
|
+
const storage = makeStorage();
|
|
97
|
+
const pieceTest = new PieceType('table', storage);
|
|
98
|
+
mocks.cache.getCache.mockResolvedValue(makeCache({
|
|
99
|
+
date_added: cacheDate, date_updated: null, id: '1', file_path: filename, content_hash: 'h'
|
|
100
|
+
}));
|
|
101
|
+
vi.spyOn(storage, 'stat').mockResolvedValue({ last_modified: fileDate });
|
|
161
102
|
const isOutdated = await pieceTest.isOutdated(filename, db);
|
|
162
103
|
expect(isOutdated).toEqual(true);
|
|
163
104
|
});
|
|
164
|
-
test('isOutdated returns false', async () => {
|
|
165
|
-
const filename = '/path/to/slug.books.md';
|
|
166
|
-
const db = mockKysely().db;
|
|
105
|
+
test('isOutdated returns false if cache is current', async () => {
|
|
167
106
|
const PieceType = makePieceMock();
|
|
168
|
-
const
|
|
169
|
-
const
|
|
170
|
-
const
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
.mockResolvedValueOnce({ last_modified: fileDate });
|
|
177
|
-
const isOutdated = await pieceTest.isOutdated(filename, db);
|
|
178
|
-
expect(isOutdated).toEqual(false);
|
|
107
|
+
const storage = makeStorage();
|
|
108
|
+
const piece = new PieceType('table', storage);
|
|
109
|
+
const db = mockKysely().db;
|
|
110
|
+
mocks.cache.getCache.mockResolvedValue(makeCache({
|
|
111
|
+
date_updated: 3000, date_added: 3000
|
|
112
|
+
}));
|
|
113
|
+
vi.spyOn(storage, 'stat').mockResolvedValue({ last_modified: new Date(2000) });
|
|
114
|
+
expect(await piece.isOutdated('file.md', db)).toBe(false);
|
|
179
115
|
});
|
|
180
116
|
test('isOutdated throws', async () => {
|
|
181
|
-
const filename = '
|
|
117
|
+
const filename = 'file.md';
|
|
182
118
|
const db = mockKysely().db;
|
|
183
119
|
const PieceType = makePieceMock();
|
|
184
|
-
const storage = makeStorage(
|
|
185
|
-
const pieceTest = new PieceType('
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
await expect(isOutdating).rejects.toThrow();
|
|
120
|
+
const storage = makeStorage();
|
|
121
|
+
const pieceTest = new PieceType('table', storage);
|
|
122
|
+
vi.spyOn(storage, 'stat').mockRejectedValue(new Error('oof'));
|
|
123
|
+
await expect(pieceTest.isOutdated(filename, db)).rejects.toThrow();
|
|
189
124
|
});
|
|
190
|
-
test('validate', () => {
|
|
125
|
+
test('validate calls item.validatePieceItem', () => {
|
|
191
126
|
const PieceType = makePieceMock();
|
|
127
|
+
const piece = new PieceType();
|
|
192
128
|
const markdown = makeMarkdownSample();
|
|
129
|
+
mocks.item.validatePieceItem.mockReturnValue(true);
|
|
130
|
+
const result = piece.validate(markdown);
|
|
131
|
+
expect(result.isValid).toBe(true);
|
|
132
|
+
});
|
|
133
|
+
test('validate returns errors on failure', () => {
|
|
134
|
+
const PieceType = makePieceMock();
|
|
193
135
|
const piece = new PieceType();
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
136
|
+
const markdown = makeMarkdownSample();
|
|
137
|
+
mocks.item.validatePieceItem.mockReturnValue(false);
|
|
138
|
+
mocks.item.getValidatePieceItemErrors.mockReturnValue(['error']);
|
|
139
|
+
const result = piece.validate(markdown);
|
|
140
|
+
expect(result.isValid).toBe(false);
|
|
141
|
+
if (!result.isValid) {
|
|
142
|
+
expect(result.errors).toEqual(['error']);
|
|
143
|
+
}
|
|
197
144
|
});
|
|
198
|
-
test('
|
|
145
|
+
test('get reads and extracts markdown', async () => {
|
|
146
|
+
const PieceType = makePieceMock();
|
|
147
|
+
const storage = makeStorage();
|
|
148
|
+
const piece = new PieceType('table', storage);
|
|
149
|
+
const fm = { title: 'sample' };
|
|
150
|
+
vi.spyOn(storage, 'exists').mockResolvedValue(true);
|
|
151
|
+
vi.spyOn(storage, 'readFile').mockResolvedValue('---\ntitle: sample\n---\nbody');
|
|
152
|
+
const result = await piece.get('file.md');
|
|
153
|
+
expect(result.frontmatter.title).toBe(fm.title);
|
|
154
|
+
expect(result.note).toBe('body');
|
|
155
|
+
});
|
|
156
|
+
test('get throws if file missing', async () => {
|
|
199
157
|
const PieceType = makePieceMock();
|
|
158
|
+
const storage = makeStorage();
|
|
159
|
+
const piece = new PieceType('table', storage);
|
|
160
|
+
vi.spyOn(storage, 'exists').mockResolvedValue(false);
|
|
161
|
+
await expect(piece.get('file.md')).rejects.toThrow('does not exist');
|
|
162
|
+
});
|
|
163
|
+
test('write saves markdown if valid', async () => {
|
|
164
|
+
const PieceType = makePieceMock();
|
|
165
|
+
const storage = makeStorage();
|
|
166
|
+
const piece = new PieceType('table', storage);
|
|
200
167
|
const markdown = makeMarkdownSample();
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
const
|
|
215
|
-
const
|
|
216
|
-
const pieceTest = new PieceTest('books', storage);
|
|
217
|
-
mocks.extract.mockResolvedValueOnce(extracted);
|
|
218
|
-
mocks.makePieceMarkdown.mockReturnValueOnce(markdown);
|
|
219
|
-
spies.exists = vi.spyOn(storage, 'exists').mockResolvedValueOnce(true);
|
|
220
|
-
spies.readFile = vi.spyOn(storage, 'readFile').mockResolvedValueOnce(note);
|
|
221
|
-
const get = await pieceTest.get(path);
|
|
222
|
-
expect(mocks.extract).toHaveBeenCalledWith(note);
|
|
223
|
-
expect(mocks.makePieceMarkdown).toHaveBeenCalledOnce();
|
|
224
|
-
expect(get).toEqual(markdown);
|
|
225
|
-
});
|
|
226
|
-
test('get throws', async () => {
|
|
227
|
-
const note = 'note';
|
|
228
|
-
const frontmatter = makeFrontmatterSample();
|
|
229
|
-
const path = '/path/to/slug.md';
|
|
230
|
-
const extracted = { markdown: note, frontmatter };
|
|
231
|
-
const markdown = makeMarkdownSample({ note, frontmatter });
|
|
232
|
-
const storage = makeStorage('root');
|
|
233
|
-
const PieceTest = makePieceMock();
|
|
234
|
-
const pieceTest = new PieceTest('books', storage);
|
|
235
|
-
mocks.extract.mockResolvedValueOnce(extracted);
|
|
236
|
-
mocks.makePieceMarkdown.mockReturnValueOnce(markdown);
|
|
237
|
-
spies.exists = vi.spyOn(storage, 'exists').mockResolvedValueOnce(false);
|
|
238
|
-
const getting = pieceTest.get(path);
|
|
239
|
-
await expect(getting).rejects.toThrowError();
|
|
240
|
-
});
|
|
241
|
-
test('write', async () => {
|
|
242
|
-
const sample = makeMarkdownSample();
|
|
243
|
-
const contents = JSON.stringify(sample.frontmatter);
|
|
244
|
-
const storage = makeStorage('root');
|
|
245
|
-
const PieceTest = makePieceMock();
|
|
246
|
-
const pieceTest = new PieceTest('books', storage);
|
|
247
|
-
mocks.validatePieceItem.mockReturnValueOnce(true);
|
|
248
|
-
mocks.toMarkdownString.mockReturnValueOnce(contents);
|
|
249
|
-
spies.write = vi.spyOn(storage, 'writeFile').mockResolvedValueOnce(undefined);
|
|
250
|
-
await pieceTest.write(sample);
|
|
251
|
-
expect(spies.write).toHaveBeenCalledWith(sample.filePath, contents);
|
|
252
|
-
});
|
|
253
|
-
test('write fails', async () => {
|
|
254
|
-
const sample = makeMarkdownSample();
|
|
255
|
-
const contents = JSON.stringify(sample.frontmatter);
|
|
256
|
-
const storage = makeStorage('root');
|
|
257
|
-
const PieceTest = makePieceMock();
|
|
258
|
-
const pieceTest = new PieceTest('books', storage);
|
|
259
|
-
mocks.validatePieceItem.mockReturnValueOnce(false);
|
|
260
|
-
mocks.getValidatePieceItemErrors.mockReturnValueOnce(['error']);
|
|
261
|
-
mocks.toMarkdownString.mockReturnValueOnce(contents);
|
|
262
|
-
const writing = pieceTest.write(sample);
|
|
263
|
-
await expect(writing).rejects.toThrowError();
|
|
264
|
-
});
|
|
265
|
-
test('prune', async () => {
|
|
266
|
-
const dbPieces = [
|
|
267
|
-
makePieceItemSelectable({ file_path: 'a' }),
|
|
268
|
-
makePieceItemSelectable({ file_path: 'b' }),
|
|
269
|
-
makePieceItemSelectable({ file_path: 'c' }),
|
|
270
|
-
];
|
|
168
|
+
mocks.item.validatePieceItem.mockReturnValue(true);
|
|
169
|
+
vi.spyOn(storage, 'writeFile').mockResolvedValue(undefined);
|
|
170
|
+
await piece.write(markdown);
|
|
171
|
+
expect(storage.writeFile).toHaveBeenCalled();
|
|
172
|
+
});
|
|
173
|
+
test('write throws if invalid', async () => {
|
|
174
|
+
const piece = new (makePieceMock())();
|
|
175
|
+
const markdown = makeMarkdownSample();
|
|
176
|
+
mocks.item.validatePieceItem.mockReturnValue(false);
|
|
177
|
+
mocks.item.getValidatePieceItemErrors.mockReturnValue(['bad']);
|
|
178
|
+
await expect(piece.write(markdown)).rejects.toThrow('Could not write');
|
|
179
|
+
});
|
|
180
|
+
test('prune deletes missing pieces from DB', async () => {
|
|
181
|
+
const PieceType = makePieceMock();
|
|
182
|
+
const piece = new PieceType('table');
|
|
271
183
|
const db = mockKysely().db;
|
|
272
|
-
const storage = makeStorage('root');
|
|
273
|
-
const PieceTest = makePieceMock();
|
|
274
|
-
const pieceTest = new PieceTest('books', storage);
|
|
275
184
|
mocks.cpus.mockReturnValue([{}]);
|
|
276
|
-
mocks.selectItems.
|
|
277
|
-
mocks.deleteItem.
|
|
278
|
-
const
|
|
279
|
-
const result
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
result.push(pruned.file);
|
|
185
|
+
mocks.items.selectItems.mockResolvedValue([makePieceItemSelectable({ file_path: 'missing.md' })]);
|
|
186
|
+
mocks.items.deleteItem.mockResolvedValue(undefined);
|
|
187
|
+
const stream = await piece.prune(db, ['exists.md']);
|
|
188
|
+
for await (const result of stream) {
|
|
189
|
+
if (!result.error) {
|
|
190
|
+
expect(result.action).toBe('pruned');
|
|
283
191
|
}
|
|
284
192
|
}
|
|
285
|
-
expect(mocks.
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
const dbPieces = [
|
|
291
|
-
makePieceItemSelectable({ file_path: 'a' }),
|
|
292
|
-
makePieceItemSelectable({ file_path: 'b' }),
|
|
293
|
-
makePieceItemSelectable({ file_path: 'c' }),
|
|
294
|
-
];
|
|
193
|
+
expect(mocks.items.deleteItem).toHaveBeenCalledWith(db, 'missing.md');
|
|
194
|
+
});
|
|
195
|
+
test('prune handles dryRun', async () => {
|
|
196
|
+
const PieceType = makePieceMock();
|
|
197
|
+
const piece = new PieceType('table');
|
|
295
198
|
const db = mockKysely().db;
|
|
296
|
-
const storage = makeStorage('root');
|
|
297
|
-
const PieceTest = makePieceMock();
|
|
298
|
-
const pieceTest = new PieceTest('books', storage);
|
|
299
199
|
mocks.cpus.mockReturnValue([{}]);
|
|
300
|
-
mocks.selectItems.
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
if (!pruned.error) {
|
|
306
|
-
result.push(pruned.file);
|
|
200
|
+
mocks.items.selectItems.mockResolvedValue([makePieceItemSelectable({ file_path: 'missing.md' })]);
|
|
201
|
+
const stream = await piece.prune(db, [], { dryRun: true });
|
|
202
|
+
for await (const result of stream) {
|
|
203
|
+
if (!result.error) {
|
|
204
|
+
expect(result.action).toBe('pruned');
|
|
307
205
|
}
|
|
308
206
|
}
|
|
309
|
-
expect(mocks.
|
|
310
|
-
expect(mocks.deleteItem).not.toHaveBeenCalled();
|
|
311
|
-
expect(result).toEqual(['a', 'b', 'c']);
|
|
207
|
+
expect(mocks.items.deleteItem).not.toHaveBeenCalled();
|
|
312
208
|
});
|
|
313
|
-
test('prune error', async () => {
|
|
314
|
-
const
|
|
209
|
+
test('prune handles error', async () => {
|
|
210
|
+
const PieceType = makePieceMock();
|
|
211
|
+
const piece = new PieceType('table');
|
|
315
212
|
const db = mockKysely().db;
|
|
316
|
-
const storage = makeStorage('root');
|
|
317
|
-
const PieceTest = makePieceMock();
|
|
318
|
-
const pieceTest = new PieceTest('books', storage);
|
|
319
213
|
mocks.cpus.mockReturnValue([{}]);
|
|
320
|
-
mocks.selectItems.
|
|
321
|
-
mocks.deleteItem.
|
|
322
|
-
const
|
|
323
|
-
const result
|
|
324
|
-
|
|
325
|
-
if (pruned.error) {
|
|
326
|
-
result.push(pruned.file);
|
|
327
|
-
}
|
|
214
|
+
mocks.items.selectItems.mockResolvedValue([makePieceItemSelectable({ file_path: 'm.md' })]);
|
|
215
|
+
mocks.items.deleteItem.mockRejectedValue(new Error('oof'));
|
|
216
|
+
const stream = await piece.prune(db, []);
|
|
217
|
+
for await (const result of stream) {
|
|
218
|
+
expect(result.error).toBe(true);
|
|
328
219
|
}
|
|
329
|
-
expect(mocks.selectItems).toHaveBeenCalledOnce();
|
|
330
|
-
expect(mocks.deleteItem).toHaveBeenCalledOnce();
|
|
331
|
-
expect(result).toHaveLength(1);
|
|
332
220
|
});
|
|
333
|
-
test('syncMarkdownAdd', async () => {
|
|
334
|
-
const
|
|
335
|
-
const
|
|
336
|
-
const
|
|
337
|
-
const
|
|
338
|
-
const pieceTest = new PieceTest();
|
|
339
|
-
mocks.makeInsertable.mockReturnValueOnce({});
|
|
340
|
-
mocks.insertItem.mockResolvedValueOnce({});
|
|
341
|
-
mocks.calculateHashFromFile.mockResolvedValueOnce(hash);
|
|
342
|
-
await pieceTest.syncMarkdownAdd(dbMocks.db, markdown);
|
|
343
|
-
expect(mocks.insertItem).toHaveBeenCalledOnce();
|
|
344
|
-
expect(mocks.addCache).toHaveBeenCalledWith(dbMocks.db, markdown.filePath, hash);
|
|
345
|
-
});
|
|
346
|
-
test('syncMarkdownAdd supports dryRun', async () => {
|
|
347
|
-
const dbMocks = mockKysely();
|
|
348
|
-
const PieceTest = makePieceMock();
|
|
349
|
-
const keywords = 'a,b'.split(',');
|
|
350
|
-
const markdown = makeMarkdownSample({ frontmatter: { keywords: keywords.join(',') } });
|
|
351
|
-
const pieceTest = new PieceTest();
|
|
352
|
-
await pieceTest.syncMarkdownAdd(dbMocks.db, markdown);
|
|
353
|
-
expect(dbMocks.queries.executeTakeFirst).not.toHaveBeenCalled();
|
|
354
|
-
});
|
|
355
|
-
test('syncMarkdown update', async () => {
|
|
356
|
-
const dbMocks = mockKysely();
|
|
357
|
-
const dbData = { id: 1, slug: 'slug' };
|
|
358
|
-
const PieceTest = makePieceMock();
|
|
359
|
-
const markdown = makeMarkdownSample();
|
|
360
|
-
const pieceTest = new PieceTest();
|
|
361
|
-
mocks.selectItem.mockResolvedValueOnce(dbData);
|
|
362
|
-
spies.syncUpdate = vi.spyOn(pieceTest, 'syncMarkdownUpdate').mockResolvedValueOnce();
|
|
363
|
-
await pieceTest.syncMarkdown(dbMocks.db, markdown);
|
|
364
|
-
expect(spies.syncUpdate).toHaveBeenCalledWith(dbMocks.db, markdown, dbData);
|
|
365
|
-
});
|
|
366
|
-
test('syncMarkdown add', async () => {
|
|
367
|
-
const dbMocks = mockKysely();
|
|
368
|
-
const PieceTest = makePieceMock();
|
|
369
|
-
const markdown = makeMarkdownSample();
|
|
370
|
-
const pieceTest = new PieceTest();
|
|
371
|
-
mocks.selectItem.mockResolvedValueOnce(undefined);
|
|
372
|
-
spies.syncAdd = vi.spyOn(pieceTest, 'syncMarkdownAdd').mockResolvedValueOnce();
|
|
373
|
-
await pieceTest.syncMarkdown(dbMocks.db, markdown);
|
|
374
|
-
expect(spies.syncAdd).toHaveBeenCalledWith(dbMocks.db, markdown);
|
|
375
|
-
});
|
|
376
|
-
test('syncMarkdownUpdate', async () => {
|
|
377
|
-
const dbMocks = mockKysely();
|
|
378
|
-
const PieceTest = makePieceMock();
|
|
221
|
+
test('syncMarkdownAdd inserts piece and adds cache', async () => {
|
|
222
|
+
const PieceType = makePieceMock();
|
|
223
|
+
const storage = makeStorage();
|
|
224
|
+
const piece = new PieceType('table', storage);
|
|
225
|
+
const db = mockKysely().db;
|
|
379
226
|
const markdown = makeMarkdownSample();
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
mocks.
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
const
|
|
390
|
-
const
|
|
391
|
-
const keywords = 'a,b'.split(',');
|
|
392
|
-
const markdown = makeMarkdownSample({ frontmatter: { keywords: keywords.join(',') } });
|
|
393
|
-
const pieceData = { ...makePieceItemSelectable() };
|
|
394
|
-
const updated = { frontmatter_json: JSON.stringify(markdown.frontmatter) };
|
|
395
|
-
const pieceTest = new PieceTest();
|
|
396
|
-
mocks.makeUpdatable.mockReturnValueOnce(updated);
|
|
397
|
-
mocks.updateItem.mockResolvedValueOnce();
|
|
398
|
-
await pieceTest.syncMarkdownUpdate(dbMocks.db, markdown, pieceData);
|
|
399
|
-
expect(mocks.updateItem).toHaveBeenCalledOnce();
|
|
400
|
-
});
|
|
401
|
-
test('syncMarkdownUpdate supports dryRun', async () => {
|
|
402
|
-
const PieceTest = makePieceMock();
|
|
227
|
+
mocks.item.makePieceItemInsertable.mockReturnValue({});
|
|
228
|
+
mocks.pieceUtils.calculateHashFromFile.mockResolvedValue('hash');
|
|
229
|
+
vi.spyOn(storage, 'createReadStream').mockReturnValue({});
|
|
230
|
+
await piece.syncMarkdownAdd(db, markdown);
|
|
231
|
+
expect(mocks.items.insertItem).toHaveBeenCalled();
|
|
232
|
+
expect(mocks.cache.addCache).toHaveBeenCalledWith(db, markdown.filePath, 'hash');
|
|
233
|
+
});
|
|
234
|
+
test('syncMarkdown handles update or add', async () => {
|
|
235
|
+
const PieceType = makePieceMock();
|
|
236
|
+
const piece = new PieceType('table');
|
|
237
|
+
const db = mockKysely().db;
|
|
403
238
|
const markdown = makeMarkdownSample();
|
|
404
|
-
const
|
|
239
|
+
const syncAddSpy = vi.spyOn(piece, 'syncMarkdownAdd').mockResolvedValue(undefined);
|
|
240
|
+
const syncUpdateSpy = vi.spyOn(piece, 'syncMarkdownUpdate').mockResolvedValue(undefined);
|
|
241
|
+
mocks.items.selectItem.mockResolvedValueOnce(undefined);
|
|
242
|
+
await piece.syncMarkdown(db, markdown);
|
|
243
|
+
expect(syncAddSpy).toHaveBeenCalled();
|
|
244
|
+
mocks.items.selectItem.mockResolvedValueOnce(makePieceItemSelectable({ id: '1' }));
|
|
245
|
+
await piece.syncMarkdown(db, markdown);
|
|
246
|
+
expect(syncUpdateSpy).toHaveBeenCalled();
|
|
247
|
+
});
|
|
248
|
+
test('syncMarkdownUpdate updates item and cache', async () => {
|
|
249
|
+
const PieceType = makePieceMock();
|
|
250
|
+
const storage = makeStorage();
|
|
251
|
+
const piece = new PieceType('table', storage);
|
|
405
252
|
const db = mockKysely().db;
|
|
406
|
-
const updated = { frontmatter_json: JSON.stringify(markdown.frontmatter) };
|
|
407
|
-
const pieceTest = new PieceTest();
|
|
408
|
-
mocks.makeUpdatable.mockReturnValueOnce(updated);
|
|
409
|
-
await pieceTest.syncMarkdownUpdate(db, markdown, pieceData);
|
|
410
|
-
});
|
|
411
|
-
test('toMarkdown', () => {
|
|
412
|
-
const pieceMarkdown = makeMarkdownSample();
|
|
413
|
-
const pieceSample = makePieceItemSelectable();
|
|
414
|
-
const PieceTest = makePieceMock();
|
|
415
|
-
mocks.makePieceMarkdown.mockReturnValueOnce(pieceMarkdown);
|
|
416
|
-
mocks.getPieceSchemaFields.mockReturnValueOnce([
|
|
417
|
-
{ name: 'title', type: 'string', format: 'asset' },
|
|
418
|
-
]);
|
|
419
|
-
mocks.databaseValueToFrontmatterValue.mockReturnValueOnce(pieceMarkdown.frontmatter.title);
|
|
420
|
-
const markdown = new PieceTest().toMarkdown(pieceSample);
|
|
421
|
-
expect(mocks.makePieceMarkdown).toHaveBeenCalledWith(pieceMarkdown.filePath, pieceMarkdown.piece, pieceMarkdown.note, pieceMarkdown.frontmatter);
|
|
422
|
-
expect(markdown).toEqual(pieceMarkdown);
|
|
423
|
-
});
|
|
424
|
-
test('toMarkdown with arrays', () => {
|
|
425
|
-
const pieceMarkdown = makeMarkdownSample();
|
|
426
|
-
const pieceSample = makePieceItemSelectable();
|
|
427
|
-
const PieceTest = makePieceMock();
|
|
428
|
-
const title = ['a', 'b'];
|
|
429
|
-
pieceSample.frontmatter_json = JSON.stringify({ title });
|
|
430
|
-
pieceMarkdown.frontmatter = JSON.parse(pieceSample.frontmatter_json);
|
|
431
|
-
mocks.makePieceMarkdown.mockReturnValueOnce(pieceMarkdown);
|
|
432
|
-
mocks.getPieceSchemaFields.mockReturnValueOnce([
|
|
433
|
-
{ name: 'title', type: 'array', items: { type: 'string' } },
|
|
434
|
-
]);
|
|
435
|
-
mocks.databaseValueToFrontmatterValue.mockReturnValueOnce(title);
|
|
436
|
-
const markdown = new PieceTest().toMarkdown(pieceSample);
|
|
437
|
-
expect(mocks.makePieceMarkdown).toHaveBeenCalledWith(pieceMarkdown.filePath, pieceMarkdown.piece, pieceMarkdown.note, pieceMarkdown.frontmatter);
|
|
438
|
-
expect(markdown).toEqual(pieceMarkdown);
|
|
439
|
-
});
|
|
440
|
-
test('sync', async () => {
|
|
441
|
-
const dbMocks = mockKysely();
|
|
442
|
-
const slugs = ['a', 'b', 'c'];
|
|
443
253
|
const markdown = makeMarkdownSample();
|
|
444
|
-
const
|
|
445
|
-
|
|
446
|
-
|
|
254
|
+
const data = makePieceItemSelectable();
|
|
255
|
+
mocks.item.makePieceItemUpdatable.mockReturnValue({});
|
|
256
|
+
mocks.pieceUtils.calculateHashFromFile.mockResolvedValue('new-hash');
|
|
257
|
+
vi.spyOn(storage, 'createReadStream').mockReturnValue({});
|
|
258
|
+
await piece.syncMarkdownUpdate(db, markdown, data);
|
|
259
|
+
expect(mocks.items.updateItem).toHaveBeenCalled();
|
|
260
|
+
expect(mocks.cache.updateCache).toHaveBeenCalledWith(db, data.file_path, 'new-hash');
|
|
261
|
+
});
|
|
262
|
+
test('sync adds new pieces to DB', async () => {
|
|
263
|
+
const PieceType = makePieceMock();
|
|
264
|
+
const storage = makeStorage();
|
|
265
|
+
const piece = new PieceType('table', storage);
|
|
266
|
+
const db = mockKysely().db;
|
|
267
|
+
const markdown = makeMarkdownSample({ filePath: 'new.md' });
|
|
447
268
|
mocks.cpus.mockReturnValue([{}]);
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
file_path: '',
|
|
456
|
-
date_added: 0,
|
|
457
|
-
date_updated: null,
|
|
458
|
-
});
|
|
459
|
-
spies.syncMarkdownAdd = vi.spyOn(pieceTest, 'syncMarkdownAdd').mockResolvedValue();
|
|
460
|
-
spies.syncMarkdownUpdate = vi.spyOn(pieceTest, 'syncMarkdownUpdate').mockResolvedValue();
|
|
461
|
-
spies.get = vi.spyOn(pieceTest, 'get').mockResolvedValue(markdown);
|
|
462
|
-
spies.selectItem = mocks.selectItem
|
|
463
|
-
.mockResolvedValueOnce({})
|
|
464
|
-
.mockResolvedValueOnce({})
|
|
465
|
-
.mockResolvedValueOnce(undefined);
|
|
466
|
-
const gen = await pieceTest.sync(dbMocks.db, slugs);
|
|
467
|
-
const added = [];
|
|
468
|
-
const skipped = [];
|
|
469
|
-
const updated = [];
|
|
470
|
-
for await (const file of gen) {
|
|
471
|
-
if (!file.error) {
|
|
472
|
-
if (file.action === 'added') {
|
|
473
|
-
added.push(file);
|
|
474
|
-
}
|
|
475
|
-
else if (file.action === 'skipped') {
|
|
476
|
-
skipped.push(file);
|
|
477
|
-
}
|
|
478
|
-
else if (file.action === 'updated') {
|
|
479
|
-
updated.push(file);
|
|
480
|
-
}
|
|
269
|
+
vi.spyOn(piece, 'get').mockResolvedValue(markdown);
|
|
270
|
+
mocks.items.selectItem.mockResolvedValue(undefined);
|
|
271
|
+
const syncAddSpy = vi.spyOn(piece, 'syncMarkdownAdd').mockResolvedValue(undefined);
|
|
272
|
+
const stream = await piece.sync(db, ['new.md']);
|
|
273
|
+
for await (const result of stream) {
|
|
274
|
+
if (!result.error) {
|
|
275
|
+
expect(result.action).toBe('added');
|
|
481
276
|
}
|
|
482
277
|
}
|
|
483
|
-
expect(
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
const
|
|
491
|
-
const slugs = ['a', 'b', 'c'];
|
|
492
|
-
const markdown = makeMarkdownSample();
|
|
493
|
-
const storage = makeStorage('root');
|
|
494
|
-
const PieceTest = makePieceMock();
|
|
495
|
-
const pieceTest = new PieceTest('books', storage);
|
|
278
|
+
expect(syncAddSpy).toHaveBeenCalled();
|
|
279
|
+
});
|
|
280
|
+
test('sync updates existing pieces if hash changed', async () => {
|
|
281
|
+
const PieceType = makePieceMock();
|
|
282
|
+
const storage = makeStorage();
|
|
283
|
+
const piece = new PieceType('table', storage);
|
|
284
|
+
const db = mockKysely().db;
|
|
285
|
+
const markdown = makeMarkdownSample({ filePath: 'u.md' });
|
|
496
286
|
mocks.cpus.mockReturnValue([{}]);
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
});
|
|
508
|
-
spies.syncMarkdownAdd = vi.spyOn(pieceTest, 'syncMarkdownAdd').mockResolvedValue();
|
|
509
|
-
spies.syncMarkdownUpdate = vi.spyOn(pieceTest, 'syncMarkdownUpdate').mockResolvedValue();
|
|
510
|
-
spies.get = vi.spyOn(pieceTest, 'get').mockResolvedValue(markdown);
|
|
511
|
-
spies.selectItem = mocks.selectItem
|
|
512
|
-
.mockResolvedValueOnce({})
|
|
513
|
-
.mockResolvedValueOnce({})
|
|
514
|
-
.mockResolvedValueOnce(undefined);
|
|
515
|
-
const gen = await pieceTest.sync(dbMocks.db, slugs, { dryRun: true });
|
|
516
|
-
const added = [];
|
|
517
|
-
const skipped = [];
|
|
518
|
-
const updated = [];
|
|
519
|
-
for await (const file of gen) {
|
|
520
|
-
if (!file.error) {
|
|
521
|
-
if (file.action === 'added') {
|
|
522
|
-
added.push(file);
|
|
523
|
-
}
|
|
524
|
-
else if (file.action === 'skipped') {
|
|
525
|
-
skipped.push(file);
|
|
526
|
-
}
|
|
527
|
-
else if (file.action === 'updated') {
|
|
528
|
-
updated.push(file);
|
|
529
|
-
}
|
|
287
|
+
vi.spyOn(piece, 'get').mockResolvedValue(markdown);
|
|
288
|
+
mocks.items.selectItem.mockResolvedValue(makePieceItemSelectable({ id: '1' }));
|
|
289
|
+
mocks.pieceUtils.calculateHashFromFile.mockResolvedValue('new-hash');
|
|
290
|
+
mocks.cache.getCache.mockResolvedValue(makeCache({ content_hash: 'old-hash' }));
|
|
291
|
+
vi.spyOn(storage, 'createReadStream').mockReturnValue({});
|
|
292
|
+
const syncUpdateSpy = vi.spyOn(piece, 'syncMarkdownUpdate').mockResolvedValue(undefined);
|
|
293
|
+
const stream = await piece.sync(db, ['u.md']);
|
|
294
|
+
for await (const result of stream) {
|
|
295
|
+
if (!result.error) {
|
|
296
|
+
expect(result.action).toBe('updated');
|
|
530
297
|
}
|
|
531
298
|
}
|
|
532
|
-
expect(
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
const
|
|
540
|
-
const slugs = ['a', 'b', 'c'];
|
|
541
|
-
const markdown = makeMarkdownSample();
|
|
542
|
-
const storage = makeStorage('root');
|
|
543
|
-
const PieceTest = makePieceMock();
|
|
544
|
-
const pieceTest = new PieceTest('books', storage);
|
|
299
|
+
expect(syncUpdateSpy).toHaveBeenCalled();
|
|
300
|
+
});
|
|
301
|
+
test('sync skips unchanged pieces', async () => {
|
|
302
|
+
const PieceType = makePieceMock();
|
|
303
|
+
const storage = makeStorage();
|
|
304
|
+
const piece = new PieceType('table', storage);
|
|
305
|
+
const db = mockKysely().db;
|
|
306
|
+
const markdown = makeMarkdownSample({ filePath: 's.md' });
|
|
545
307
|
mocks.cpus.mockReturnValue([{}]);
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
date_updated: null,
|
|
556
|
-
});
|
|
557
|
-
spies.syncMarkdownAdd = vi.spyOn(pieceTest, 'syncMarkdownAdd').mockResolvedValue();
|
|
558
|
-
spies.syncMarkdownUpdate = vi.spyOn(pieceTest, 'syncMarkdownUpdate').mockResolvedValue();
|
|
559
|
-
spies.get = vi.spyOn(pieceTest, 'get').mockResolvedValue(markdown);
|
|
560
|
-
spies.selectItem = mocks.selectItem
|
|
561
|
-
.mockResolvedValueOnce({})
|
|
562
|
-
.mockResolvedValueOnce({})
|
|
563
|
-
.mockResolvedValueOnce(undefined);
|
|
564
|
-
const gen = await pieceTest.sync(dbMocks.db, slugs, { force: true });
|
|
565
|
-
const added = [];
|
|
566
|
-
const updated = [];
|
|
567
|
-
for await (const file of gen) {
|
|
568
|
-
if (!file.error) {
|
|
569
|
-
if (file.action === 'added') {
|
|
570
|
-
added.push(file);
|
|
571
|
-
}
|
|
572
|
-
else if (file.action === 'updated') {
|
|
573
|
-
updated.push(file);
|
|
574
|
-
}
|
|
308
|
+
vi.spyOn(piece, 'get').mockResolvedValue(markdown);
|
|
309
|
+
mocks.items.selectItem.mockResolvedValue(makePieceItemSelectable({ id: '1' }));
|
|
310
|
+
mocks.pieceUtils.calculateHashFromFile.mockResolvedValue('same-hash');
|
|
311
|
+
mocks.cache.getCache.mockResolvedValue(makeCache({ content_hash: 'same-hash' }));
|
|
312
|
+
vi.spyOn(storage, 'createReadStream').mockReturnValue({});
|
|
313
|
+
const stream = await piece.sync(db, ['s.md']);
|
|
314
|
+
for await (const result of stream) {
|
|
315
|
+
if (!result.error) {
|
|
316
|
+
expect(result.action).toBe('skipped');
|
|
575
317
|
}
|
|
576
318
|
}
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
const
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
const
|
|
587
|
-
const
|
|
588
|
-
|
|
319
|
+
});
|
|
320
|
+
test('sync handles dryRun', async () => {
|
|
321
|
+
const PieceType = makePieceMock();
|
|
322
|
+
const storage = makeStorage();
|
|
323
|
+
const piece = new PieceType('table', storage);
|
|
324
|
+
const db = mockKysely().db;
|
|
325
|
+
const markdown = makeMarkdownSample({ filePath: 'new.md' });
|
|
326
|
+
vi.spyOn(piece, 'get').mockResolvedValue(markdown);
|
|
327
|
+
mocks.items.selectItem.mockResolvedValue(undefined);
|
|
328
|
+
const syncAddSpy = vi.spyOn(piece, 'syncMarkdownAdd').mockResolvedValue(undefined);
|
|
329
|
+
const stream = await piece.sync(db, ['new.md'], { dryRun: true });
|
|
330
|
+
for await (const result of stream) {
|
|
331
|
+
if (!result.error) {
|
|
332
|
+
expect(result.action).toBe('added');
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
expect(syncAddSpy).not.toHaveBeenCalled();
|
|
336
|
+
});
|
|
337
|
+
test('sync handles dryRun update', async () => {
|
|
338
|
+
const PieceType = makePieceMock();
|
|
339
|
+
const storage = makeStorage();
|
|
340
|
+
const piece = new PieceType('table', storage);
|
|
341
|
+
const db = mockKysely().db;
|
|
342
|
+
const markdown = makeMarkdownSample({ filePath: 'u.md' });
|
|
343
|
+
vi.spyOn(piece, 'get').mockResolvedValue(markdown);
|
|
344
|
+
mocks.items.selectItem.mockResolvedValue(makePieceItemSelectable({ id: '1' }));
|
|
345
|
+
mocks.pieceUtils.calculateHashFromFile.mockResolvedValue('new-hash');
|
|
346
|
+
mocks.cache.getCache.mockResolvedValue(makeCache({ content_hash: 'old-hash' }));
|
|
347
|
+
vi.spyOn(storage, 'createReadStream').mockReturnValue({});
|
|
348
|
+
const syncUpdateSpy = vi.spyOn(piece, 'syncMarkdownUpdate').mockResolvedValue(undefined);
|
|
349
|
+
const stream = await piece.sync(db, ['u.md'], { dryRun: true });
|
|
350
|
+
for await (const result of stream) {
|
|
351
|
+
if (!result.error) {
|
|
352
|
+
expect(result.action).toBe('updated');
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
expect(syncUpdateSpy).not.toHaveBeenCalled();
|
|
356
|
+
});
|
|
357
|
+
test('sync handles force update even if hash same', async () => {
|
|
358
|
+
const PieceType = makePieceMock();
|
|
359
|
+
const storage = makeStorage();
|
|
360
|
+
const piece = new PieceType('table', storage);
|
|
361
|
+
const db = mockKysely().db;
|
|
362
|
+
const markdown = makeMarkdownSample({ filePath: 'u.md' });
|
|
363
|
+
vi.spyOn(piece, 'get').mockResolvedValue(markdown);
|
|
364
|
+
mocks.items.selectItem.mockResolvedValue(makePieceItemSelectable({ id: '1' }));
|
|
365
|
+
mocks.pieceUtils.calculateHashFromFile.mockResolvedValue('same-hash');
|
|
366
|
+
mocks.cache.getCache.mockResolvedValue(makeCache({ content_hash: 'same-hash' }));
|
|
367
|
+
vi.spyOn(storage, 'createReadStream').mockReturnValue({});
|
|
368
|
+
const syncUpdateSpy = vi.spyOn(piece, 'syncMarkdownUpdate').mockResolvedValue(undefined);
|
|
369
|
+
const stream = await piece.sync(db, ['u.md'], { force: true });
|
|
370
|
+
for await (const result of stream) {
|
|
371
|
+
if (!result.error) {
|
|
372
|
+
expect(result.action).toBe('updated');
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
expect(syncUpdateSpy).toHaveBeenCalled();
|
|
376
|
+
});
|
|
377
|
+
test('sync handles errors', async () => {
|
|
378
|
+
const PieceType = makePieceMock();
|
|
379
|
+
const piece = new PieceType('table');
|
|
380
|
+
const db = mockKysely().db;
|
|
589
381
|
mocks.cpus.mockReturnValue([{}]);
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
382
|
+
vi.spyOn(piece, 'get').mockRejectedValue(new Error('oof'));
|
|
383
|
+
const stream = await piece.sync(db, ['e.md']);
|
|
384
|
+
for await (const result of stream) {
|
|
385
|
+
expect(result.error).toBe(true);
|
|
386
|
+
}
|
|
387
|
+
});
|
|
388
|
+
test('toMarkdown restores frontmatter from DB JSON', () => {
|
|
389
|
+
const PieceType = makePieceMock();
|
|
390
|
+
const piece = new PieceType('table');
|
|
391
|
+
const dbPiece = makePieceItemSelectable({
|
|
392
|
+
frontmatter_json: JSON.stringify({ title: 'db-title', keywords: ['a', 'b'] })
|
|
393
|
+
});
|
|
394
|
+
const result = piece.toMarkdown(dbPiece);
|
|
395
|
+
expect(result.frontmatter.title).toBe('db-title');
|
|
396
|
+
expect(result.frontmatter.keywords).toEqual(['a', 'b']);
|
|
397
|
+
});
|
|
398
|
+
test('setFields updates multiple fields', async () => {
|
|
399
|
+
const PieceType = makePieceMock();
|
|
400
|
+
const schema = makeSchema({
|
|
401
|
+
title: { type: 'string' },
|
|
402
|
+
subtitle: { type: 'string', nullable: true }
|
|
598
403
|
});
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
404
|
+
const piece = new PieceType('table', makeStorage(), schema);
|
|
405
|
+
const markdown = makeMarkdownSample({ frontmatter: { title: 'old', subtitle: 'old' } });
|
|
406
|
+
mocks.pieceUtils.makePieceValue.mockImplementation(async (_, v) => v);
|
|
407
|
+
const updated = await piece.setFields(markdown, { title: 'new', subtitle: 'new' });
|
|
408
|
+
expect(updated.frontmatter.title).toBe('new');
|
|
409
|
+
expect(updated.frontmatter.subtitle).toBe('new');
|
|
410
|
+
});
|
|
411
|
+
test('setField with nested path', async () => {
|
|
412
|
+
const PieceType = makePieceMock();
|
|
413
|
+
const schema = makeSchema({
|
|
414
|
+
meta: {
|
|
415
|
+
type: 'object',
|
|
416
|
+
properties: { author: { type: 'string' } }
|
|
608
417
|
}
|
|
609
|
-
}
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
const
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
const
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
418
|
+
});
|
|
419
|
+
const piece = new PieceType('table', makeStorage(), schema);
|
|
420
|
+
const markdown = makeMarkdownSample({ frontmatter: { title: 't' } });
|
|
421
|
+
mocks.pieceUtils.makePieceValue.mockImplementation(async (_, v) => v);
|
|
422
|
+
const updated = await piece.setField(markdown, 'meta.author', 'Bob');
|
|
423
|
+
const fm = updated.frontmatter;
|
|
424
|
+
expect(fm.meta.author).toBe('Bob');
|
|
425
|
+
});
|
|
426
|
+
test('setField appends to nested array', async () => {
|
|
427
|
+
const PieceType = makePieceMock();
|
|
428
|
+
const schema = makeSchema({
|
|
429
|
+
meta: {
|
|
430
|
+
type: 'object',
|
|
431
|
+
properties: { tags: { type: 'array', items: { type: 'string' } } }
|
|
432
|
+
}
|
|
433
|
+
});
|
|
434
|
+
const piece = new PieceType('table', makeStorage(), schema);
|
|
435
|
+
const markdown = makeMarkdownSample({ frontmatter: { title: 'title', meta: { tags: ['a'] } } });
|
|
436
|
+
mocks.pieceUtils.makePieceValue.mockImplementation(async (_, v) => v);
|
|
437
|
+
const updated = await piece.setField(markdown, 'meta.tags', 'b');
|
|
438
|
+
const fm = updated.frontmatter;
|
|
439
|
+
expect(fm.meta.tags).toEqual(['a', 'b']);
|
|
440
|
+
});
|
|
441
|
+
test('setField handles an array of values', async () => {
|
|
442
|
+
const PieceType = makePieceMock();
|
|
443
|
+
const schema = makeSchema({
|
|
444
|
+
tags: { type: 'array', items: { type: 'string' } }
|
|
445
|
+
});
|
|
446
|
+
const piece = new PieceType('table', makeStorage(), schema);
|
|
447
|
+
const markdown = makeMarkdownSample({ frontmatter: { title: 't', tags: ['a'] } });
|
|
448
|
+
mocks.pieceUtils.makePieceValue.mockImplementation(async (_, v) => v);
|
|
449
|
+
const updated = await piece.setField(markdown, 'tags', ['b', 'c']);
|
|
450
|
+
const fm = updated.frontmatter;
|
|
451
|
+
expect(fm.tags).toEqual(['a', 'b', 'c']);
|
|
452
|
+
});
|
|
453
|
+
test('setField attaches assets even in nested paths', async () => {
|
|
454
|
+
const PieceType = makePieceMock();
|
|
455
|
+
const schema = makeSchema({
|
|
456
|
+
meta: {
|
|
457
|
+
type: 'object',
|
|
458
|
+
properties: { cover: { type: 'string', format: 'asset' } }
|
|
459
|
+
}
|
|
460
|
+
});
|
|
461
|
+
const storage = makeStorage();
|
|
462
|
+
const piece = new PieceType('table', storage, schema);
|
|
622
463
|
const markdown = makeMarkdownSample();
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
const
|
|
626
|
-
const
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
});
|
|
634
|
-
test('setField', async () => {
|
|
464
|
+
mocks.pieceUtils.makePieceValue.mockResolvedValue(new PassThrough());
|
|
465
|
+
mocks.pieceUtils.makePieceAttachment.mockResolvedValue('assets/cover.jpg');
|
|
466
|
+
const updated = await piece.setField(markdown, 'meta.cover', 'upload-me');
|
|
467
|
+
const fm = updated.frontmatter;
|
|
468
|
+
expect(fm.meta.cover).toBe('assets/cover.jpg');
|
|
469
|
+
expect(mocks.pieceUtils.makePieceAttachment).toHaveBeenCalled();
|
|
470
|
+
});
|
|
471
|
+
test('setField handles set error', async () => {
|
|
472
|
+
const PieceType = makePieceMock();
|
|
473
|
+
const piece = new PieceType('table');
|
|
635
474
|
const markdown = makeMarkdownSample();
|
|
636
|
-
|
|
637
|
-
const
|
|
638
|
-
|
|
639
|
-
const PieceTest = makePieceMock();
|
|
640
|
-
const pieceTest = new PieceTest();
|
|
641
|
-
spies.pieceFields = vi.spyOn(pieceTest, 'fields', 'get').mockReturnValueOnce(fields);
|
|
642
|
-
mocks.makePieceMarkdown.mockReturnValueOnce(markdown);
|
|
643
|
-
mocks.makePieceValue.mockImplementation(async (_, value) => value);
|
|
644
|
-
await pieceTest.setField(markdown, field, value);
|
|
645
|
-
expect(mocks.makePieceMarkdown).toHaveBeenCalledWith(markdown.filePath, markdown.piece, markdown.note, { ...markdown.frontmatter, [field]: value });
|
|
475
|
+
mocks.pieceUtils.makePieceValue.mockRejectedValue(new Error('bad'));
|
|
476
|
+
const result = await piece.setField(markdown, 'title', 'new');
|
|
477
|
+
expect(result).toBe(markdown);
|
|
646
478
|
});
|
|
647
479
|
test('setField throws on bad field', async () => {
|
|
480
|
+
const PieceType = makePieceMock();
|
|
481
|
+
const piece = new PieceType('table');
|
|
648
482
|
const markdown = makeMarkdownSample();
|
|
649
|
-
const
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
const
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
const markdown = makeMarkdownSample({ frontmatter: { tags } });
|
|
661
|
-
const field = 'tags';
|
|
662
|
-
const value = 'another-tag';
|
|
663
|
-
const fields = [
|
|
664
|
-
{ name: field, type: 'array', items: { type: 'string' } },
|
|
665
|
-
];
|
|
666
|
-
const PieceTest = makePieceMock();
|
|
667
|
-
const pieceTest = new PieceTest();
|
|
668
|
-
spies.pieceFields = vi.spyOn(pieceTest, 'fields', 'get').mockReturnValueOnce(fields);
|
|
669
|
-
mocks.makePieceMarkdown.mockReturnValueOnce(markdown);
|
|
670
|
-
mocks.makePieceValue.mockImplementation(async (_, value) => value);
|
|
671
|
-
await pieceTest.setField(markdown, field, value);
|
|
672
|
-
expect(mocks.makePieceMarkdown).toHaveBeenCalledWith(markdown.filePath, markdown.piece, markdown.note, { ...markdown.frontmatter, [field]: [value] });
|
|
673
|
-
});
|
|
674
|
-
test('setField resets array types with another array', async () => {
|
|
675
|
-
const tags = ['tag1', 'tag2'];
|
|
676
|
-
const markdown = makeMarkdownSample({ frontmatter: { tags } });
|
|
677
|
-
const field = 'tags';
|
|
678
|
-
const value = 'another-tag';
|
|
679
|
-
const fields = [
|
|
680
|
-
{ name: field, type: 'array', items: { type: 'string' } },
|
|
681
|
-
];
|
|
682
|
-
const PieceTest = makePieceMock();
|
|
683
|
-
const pieceTest = new PieceTest();
|
|
684
|
-
spies.pieceFields = vi.spyOn(pieceTest, 'fields', 'get').mockReturnValueOnce(fields);
|
|
685
|
-
mocks.makePieceMarkdown.mockReturnValueOnce(markdown);
|
|
686
|
-
mocks.makePieceValue.mockImplementation(async (_, value) => value);
|
|
687
|
-
await pieceTest.setField(markdown, field, [...tags, value]);
|
|
688
|
-
expect(mocks.makePieceMarkdown).toHaveBeenCalledWith(markdown.filePath, markdown.piece, markdown.note, { ...markdown.frontmatter, [field]: [...tags, value] });
|
|
689
|
-
});
|
|
690
|
-
test('setField with attachment', async () => {
|
|
691
|
-
const mockReadable = new PassThrough();
|
|
692
|
-
const markdown = makeMarkdownSample();
|
|
693
|
-
const field = 'cover';
|
|
694
|
-
const value = 'file';
|
|
695
|
-
const finalValue = 'path/to/file.jpg';
|
|
696
|
-
const fields = [
|
|
697
|
-
{ name: field, type: 'string', format: 'asset' },
|
|
698
|
-
];
|
|
699
|
-
const PieceTest = makePieceMock();
|
|
700
|
-
const storage = makeStorage('root');
|
|
701
|
-
const pieceTest = new PieceTest('books', storage);
|
|
702
|
-
spies.pieceFields = vi.spyOn(pieceTest, 'fields', 'get').mockReturnValueOnce(fields);
|
|
703
|
-
mocks.makePieceMarkdown.mockReturnValueOnce(markdown);
|
|
704
|
-
mocks.makePieceValue.mockImplementationOnce(async () => mockReadable);
|
|
705
|
-
mocks.makePieceAttachment.mockResolvedValueOnce(finalValue);
|
|
706
|
-
await pieceTest.setField(markdown, field, value);
|
|
707
|
-
expect(mocks.makePieceAttachment).toHaveBeenCalledOnce();
|
|
708
|
-
expect(mocks.makePieceMarkdown).toHaveBeenCalledWith(markdown.filePath, markdown.piece, markdown.note, {
|
|
709
|
-
...markdown.frontmatter,
|
|
710
|
-
[field]: finalValue,
|
|
483
|
+
const setting = piece.setField(markdown, 'title2', 'new');
|
|
484
|
+
expect(setting).rejects.toThrow();
|
|
485
|
+
});
|
|
486
|
+
test('removeField unsets a nested value', async () => {
|
|
487
|
+
const PieceType = makePieceMock();
|
|
488
|
+
const schema = makeSchema({
|
|
489
|
+
meta: {
|
|
490
|
+
type: 'object',
|
|
491
|
+
nullable: true,
|
|
492
|
+
properties: { author: { type: 'string', nullable: true } }
|
|
493
|
+
}
|
|
711
494
|
});
|
|
495
|
+
const piece = new PieceType('table', makeStorage(), schema);
|
|
496
|
+
const markdown = makeMarkdownSample({ frontmatter: { title: 't', meta: { author: 'Alice' } } });
|
|
497
|
+
const updated = await piece.removeField(markdown, 'meta.author');
|
|
498
|
+
const fm = updated.frontmatter;
|
|
499
|
+
expect(fm.meta.author).toBeUndefined();
|
|
712
500
|
});
|
|
713
|
-
test('
|
|
714
|
-
const
|
|
715
|
-
const
|
|
716
|
-
|
|
717
|
-
const value = 'file';
|
|
718
|
-
const fields = [
|
|
719
|
-
{ name: field, type: 'string', format: 'asset' },
|
|
720
|
-
];
|
|
721
|
-
const PieceTest = makePieceMock();
|
|
722
|
-
const storage = makeStorage('root');
|
|
723
|
-
const pieceTest = new PieceTest('books', storage);
|
|
724
|
-
spies.pieceFields = vi.spyOn(pieceTest, 'fields', 'get').mockReturnValueOnce(fields);
|
|
725
|
-
mocks.makePieceValue.mockImplementationOnce(async () => mockReadable);
|
|
726
|
-
mocks.makePieceAttachment.mockRejectedValueOnce(new Error('oof'));
|
|
727
|
-
const newMarkdown = await pieceTest.setField(markdown, field, value);
|
|
728
|
-
expect(mocks.makePieceAttachment).toHaveBeenCalledOnce();
|
|
729
|
-
// Assert that the original markdown is returned and makePieceMarkdown is NOT called to create a new one.
|
|
730
|
-
expect(newMarkdown).toEqual(markdown);
|
|
731
|
-
expect(mocks.makePieceMarkdown).not.toHaveBeenCalled();
|
|
732
|
-
});
|
|
733
|
-
test('removeFields', async () => {
|
|
734
|
-
const markdown = makeMarkdownSample({ frontmatter: { title: 'title', subtitle: 'sub' } });
|
|
735
|
-
const field = 'subtitle';
|
|
736
|
-
const fields = [
|
|
737
|
-
{ name: field, type: 'string', nullable: true },
|
|
738
|
-
];
|
|
739
|
-
const PieceTest = makePieceMock();
|
|
740
|
-
const pieceTest = new PieceTest();
|
|
741
|
-
spies.pieceFields = vi.spyOn(pieceTest, 'fields', 'get').mockReturnValueOnce(fields);
|
|
742
|
-
mocks.makePieceMarkdown.mockReturnValueOnce(markdown);
|
|
743
|
-
mocks.makePieceValue.mockImplementationOnce(async (_, value) => value);
|
|
744
|
-
await pieceTest.removeFields(markdown, [field]);
|
|
745
|
-
expect(mocks.makePieceMarkdown).toHaveBeenCalledWith(markdown.filePath, markdown.piece, markdown.note, {
|
|
746
|
-
...markdown.frontmatter,
|
|
747
|
-
[field]: undefined,
|
|
501
|
+
test('removeField removes specific array index', async () => {
|
|
502
|
+
const PieceType = makePieceMock();
|
|
503
|
+
const schema = makeSchema({
|
|
504
|
+
tags: { type: 'array', nullable: true, items: { type: 'string', nullable: true } }
|
|
748
505
|
});
|
|
506
|
+
const piece = new PieceType('table', makeStorage(), schema);
|
|
507
|
+
const markdown = makeMarkdownSample({ frontmatter: { title: 't', tags: ['a', 'b', 'c'] } });
|
|
508
|
+
const updated = await piece.removeField(markdown, 'tags.1');
|
|
509
|
+
const fm = updated.frontmatter;
|
|
510
|
+
expect(fm.tags).toEqual(['a', 'c']);
|
|
749
511
|
});
|
|
750
|
-
test('removeField', async () => {
|
|
751
|
-
const
|
|
752
|
-
const
|
|
753
|
-
|
|
754
|
-
{ name: field, type: 'string', nullable: true },
|
|
755
|
-
];
|
|
756
|
-
const PieceTest = makePieceMock();
|
|
757
|
-
const pieceTest = new PieceTest();
|
|
758
|
-
spies.pieceFields = vi.spyOn(pieceTest, 'fields', 'get').mockReturnValueOnce(fields);
|
|
759
|
-
mocks.makePieceMarkdown.mockReturnValueOnce(markdown);
|
|
760
|
-
mocks.makePieceValue.mockImplementationOnce(async (_, value) => value);
|
|
761
|
-
await pieceTest.removeField(markdown, field);
|
|
762
|
-
expect(mocks.makePieceMarkdown).toHaveBeenCalledWith(markdown.filePath, markdown.piece, markdown.note, {
|
|
763
|
-
...markdown.frontmatter,
|
|
764
|
-
[field]: undefined,
|
|
512
|
+
test('removeField removes by value from array', async () => {
|
|
513
|
+
const PieceType = makePieceMock();
|
|
514
|
+
const schema = makeSchema({
|
|
515
|
+
tags: { type: 'array', nullable: true, items: { type: 'string', nullable: true } }
|
|
765
516
|
});
|
|
517
|
+
const piece = new PieceType('table', makeStorage(), schema);
|
|
518
|
+
const markdown = makeMarkdownSample({ frontmatter: { title: 't', tags: ['a', 'b'] } });
|
|
519
|
+
mocks.pieceUtils.makePieceValue.mockImplementation(async (_, v) => v);
|
|
520
|
+
const updated = await piece.removeField(markdown, 'tags', 'a');
|
|
521
|
+
const fm = updated.frontmatter;
|
|
522
|
+
expect(fm.tags).toEqual(['b']);
|
|
523
|
+
});
|
|
524
|
+
test('removeField returns markdown if array value missing', async () => {
|
|
525
|
+
const PieceType = makePieceMock();
|
|
526
|
+
const schema = makeSchema({
|
|
527
|
+
tags: { type: 'array', nullable: true, items: { type: 'string', nullable: true } }
|
|
528
|
+
});
|
|
529
|
+
const piece = new PieceType('table', makeStorage(), schema);
|
|
530
|
+
const markdown = makeMarkdownSample({ frontmatter: { title: 't', tags: ['a'] } });
|
|
531
|
+
mocks.pieceUtils.makePieceValue.mockResolvedValue('b');
|
|
532
|
+
const result = await piece.removeField(markdown, 'tags', 'b');
|
|
533
|
+
expect(result).toStrictEqual(markdown);
|
|
766
534
|
});
|
|
767
|
-
test('removeField
|
|
768
|
-
const
|
|
769
|
-
const
|
|
770
|
-
const
|
|
771
|
-
const
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
535
|
+
test('removeField handles scalars by value', async () => {
|
|
536
|
+
const PieceType = makePieceMock();
|
|
537
|
+
const schema = makeSchema({ subtitle: { type: 'string', nullable: true } });
|
|
538
|
+
const piece = new PieceType('table', makeStorage(), schema);
|
|
539
|
+
const markdown = makeMarkdownSample({ frontmatter: { title: 't', subtitle: 's' } });
|
|
540
|
+
mocks.pieceUtils.makePieceValue.mockResolvedValue('s');
|
|
541
|
+
const updated = await piece.removeField(markdown, 'subtitle', 's');
|
|
542
|
+
expect(updated.frontmatter.subtitle).toBeUndefined();
|
|
543
|
+
});
|
|
544
|
+
test('removeField skips removal if scalar value mismatch', async () => {
|
|
545
|
+
const PieceType = makePieceMock();
|
|
546
|
+
const schema = makeSchema({ subtitle: { type: 'string', nullable: true } });
|
|
547
|
+
const piece = new PieceType('table', makeStorage(), schema);
|
|
548
|
+
const markdown = makeMarkdownSample({ frontmatter: { title: 't', subtitle: 's' } });
|
|
549
|
+
// Setup markdown so current value is 's'
|
|
550
|
+
// Our mock return value will be 'mismatch'
|
|
551
|
+
mocks.pieceUtils.makePieceValue.mockResolvedValue('mismatch');
|
|
552
|
+
const result = await piece.removeField(markdown, 'subtitle', 'mismatch');
|
|
553
|
+
expect(result).toStrictEqual(markdown);
|
|
776
554
|
});
|
|
777
555
|
test('removeField throws on required field', async () => {
|
|
778
|
-
const
|
|
779
|
-
const
|
|
780
|
-
|
|
781
|
-
const PieceTest = makePieceMock();
|
|
782
|
-
const pieceTest = new PieceTest();
|
|
783
|
-
spies.pieceFields = vi.spyOn(pieceTest, 'fields', 'get').mockReturnValueOnce(fields);
|
|
784
|
-
const updating = pieceTest.removeField(markdown, field);
|
|
785
|
-
await expect(updating).rejects.toThrowError();
|
|
786
|
-
});
|
|
787
|
-
test('removeField with desired value', async () => {
|
|
788
|
-
const value = 'sub';
|
|
789
|
-
const markdown = makeMarkdownSample({ frontmatter: { title: 'title', subtitle: value } });
|
|
790
|
-
const field = 'subtitle';
|
|
791
|
-
const fields = [
|
|
792
|
-
{ name: field, type: 'string', nullable: true },
|
|
793
|
-
];
|
|
794
|
-
const PieceTest = makePieceMock();
|
|
795
|
-
const pieceTest = new PieceTest();
|
|
796
|
-
spies.pieceFields = vi.spyOn(pieceTest, 'fields', 'get').mockReturnValueOnce(fields);
|
|
797
|
-
mocks.makePieceMarkdown.mockReturnValueOnce(markdown);
|
|
798
|
-
mocks.makePieceValue.mockImplementationOnce(async (_, value) => value);
|
|
799
|
-
await pieceTest.removeField(markdown, field, value);
|
|
800
|
-
expect(mocks.makePieceMarkdown).toHaveBeenCalledWith(markdown.filePath, markdown.piece, markdown.note, {
|
|
801
|
-
...markdown.frontmatter,
|
|
802
|
-
[field]: undefined,
|
|
556
|
+
const PieceType = makePieceMock();
|
|
557
|
+
const schema = makeSchema({
|
|
558
|
+
title: { type: 'string', nullable: false }
|
|
803
559
|
});
|
|
560
|
+
const piece = new PieceType('table', makeStorage(), schema);
|
|
561
|
+
const markdown = makeMarkdownSample({ frontmatter: { title: 't' } });
|
|
562
|
+
await expect(piece.removeField(markdown, 'title')).rejects.toThrow('is a required field');
|
|
804
563
|
});
|
|
805
|
-
test('removeField
|
|
806
|
-
const
|
|
807
|
-
const
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
});
|
|
820
|
-
test('removeField one value from array field', async () => {
|
|
821
|
-
const values = ['one', 'two', 'three'];
|
|
822
|
-
const markdown = makeMarkdownSample({ frontmatter: { title: 'title', subtitle: values } });
|
|
823
|
-
const field = 'subtitle';
|
|
824
|
-
const fields = [
|
|
825
|
-
{ name: field, type: 'array', nullable: true, items: { type: 'string' } },
|
|
826
|
-
];
|
|
827
|
-
const PieceTest = makePieceMock();
|
|
828
|
-
const pieceTest = new PieceTest();
|
|
829
|
-
spies.pieceFields = vi.spyOn(pieceTest, 'fields', 'get').mockReturnValueOnce(fields);
|
|
830
|
-
mocks.makePieceMarkdown.mockReturnValueOnce(markdown);
|
|
831
|
-
mocks.makePieceValue.mockImplementationOnce(async (_, value) => value);
|
|
832
|
-
await pieceTest.removeField(markdown, field, 'two');
|
|
833
|
-
expect(mocks.makePieceMarkdown).toHaveBeenCalledWith(markdown.filePath, markdown.piece, markdown.note, {
|
|
834
|
-
...markdown.frontmatter,
|
|
835
|
-
[field]: ['one', 'three'],
|
|
564
|
+
test('removeField throws on bad field', async () => {
|
|
565
|
+
const PieceType = makePieceMock();
|
|
566
|
+
const schema = makeSchema({
|
|
567
|
+
title: { type: 'string', nullable: false }
|
|
568
|
+
});
|
|
569
|
+
const piece = new PieceType('table', makeStorage(), schema);
|
|
570
|
+
const markdown = makeMarkdownSample({ frontmatter: { title: 't' } });
|
|
571
|
+
await expect(piece.removeField(markdown, 'title2')).rejects.toThrow();
|
|
572
|
+
});
|
|
573
|
+
test('removeFields updates multiple fields', async () => {
|
|
574
|
+
const PieceType = makePieceMock();
|
|
575
|
+
const schema = makeSchema({
|
|
576
|
+
s1: { type: 'string', nullable: true },
|
|
577
|
+
s2: { type: 'string', nullable: true }
|
|
836
578
|
});
|
|
579
|
+
const piece = new PieceType('table', makeStorage(), schema);
|
|
580
|
+
const markdown = makeMarkdownSample({ frontmatter: { title: 't', s1: 'v', s2: 'v' } });
|
|
581
|
+
const updated = await piece.removeFields(markdown, ['s1', 's2']);
|
|
582
|
+
const fm = updated.frontmatter;
|
|
583
|
+
expect(fm.s1).toBeUndefined();
|
|
584
|
+
expect(fm.s2).toBeUndefined();
|
|
837
585
|
});
|
|
838
586
|
});
|
|
839
587
|
//# sourceMappingURL=Piece.test.js.map
|