@helloao/cli 0.0.5 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/db.js CHANGED
@@ -48,9 +48,9 @@ const better_sqlite3_1 = __importDefault(require("better-sqlite3"));
48
48
  const fs_extra_1 = require("fs-extra");
49
49
  const node_crypto_1 = require("node:crypto");
50
50
  const dataset_1 = require("@helloao/tools/generation/dataset");
51
+ const api_1 = require("@helloao/tools/generation/api");
51
52
  const files_1 = require("./files");
52
53
  const hash_js_1 = require("hash.js");
53
- const api_1 = require("@helloao/tools/generation/api");
54
54
  const cliPath = require.resolve('./index');
55
55
  const migrationsPath = path_1.default.resolve((0, path_1.dirname)(cliPath), 'migrations');
56
56
  /**
@@ -101,7 +101,9 @@ async function importTranslationFileBatch(db, files, parser, overwrite) {
101
101
  if (overwrite) {
102
102
  console.log('Overwriting existing translations.');
103
103
  }
104
- const changedFiles = overwrite ? files : getChangedOrNewInputFiles(db, files);
104
+ const changedFiles = overwrite
105
+ ? files
106
+ : getChangedOrNewInputFiles(db, files);
105
107
  console.log('Processing', changedFiles.length, 'changed files');
106
108
  console.log('Skipping', files.length - changedFiles.length, 'unchanged files');
107
109
  const output = (0, dataset_1.generateDataset)(changedFiles, parser);
@@ -118,7 +120,7 @@ async function importTranslationFileBatch(db, files, parser, overwrite) {
118
120
  */
119
121
  function getChangedOrNewInputFiles(db, files) {
120
122
  const fileExists = db.prepare('SELECT COUNT(*) as c FROM InputFile WHERE translationId = @translationId AND name = @name AND sha256 = @sha256;');
121
- return files.filter(f => {
123
+ return files.filter((f) => {
122
124
  const count = fileExists.get({
123
125
  translationId: f.metadata.translation.id,
124
126
  name: path_1.default.basename(f.name),
@@ -128,22 +130,22 @@ function getChangedOrNewInputFiles(db, files) {
128
130
  });
129
131
  }
130
132
  function insertFileMetadata(db, files) {
131
- const fileUpsert = db.prepare(`INSERT INTO InputFile(
132
- translationId,
133
- name,
134
- format,
135
- sha256,
136
- sizeInBytes
137
- ) VALUES (
138
- @translationId,
139
- @name,
140
- @format,
141
- @sha256,
142
- @sizeInBytes
143
- ) ON CONFLICT(translationId, name) DO
144
- UPDATE SET
145
- format=excluded.format,
146
- sha256=excluded.sha256,
133
+ const fileUpsert = db.prepare(`INSERT INTO InputFile(
134
+ translationId,
135
+ name,
136
+ format,
137
+ sha256,
138
+ sizeInBytes
139
+ ) VALUES (
140
+ @translationId,
141
+ @name,
142
+ @format,
143
+ @sha256,
144
+ @sizeInBytes
145
+ ) ON CONFLICT(translationId, name) DO
146
+ UPDATE SET
147
+ format=excluded.format,
148
+ sha256=excluded.sha256,
147
149
  sizeInBytes=excluded.sizeInBytes;`);
148
150
  const insertManyFiles = db.transaction((files) => {
149
151
  for (let file of files) {
@@ -159,32 +161,32 @@ function insertFileMetadata(db, files) {
159
161
  insertManyFiles(files);
160
162
  }
161
163
  function insertTranslations(db, translations) {
162
- const translationUpsert = db.prepare(`INSERT INTO Translation(
163
- id,
164
- name,
165
- language,
166
- shortName,
167
- textDirection,
168
- licenseUrl,
169
- website,
170
- englishName
171
- ) VALUES (
172
- @id,
173
- @name,
174
- @language,
175
- @shortName,
176
- @textDirection,
177
- @licenseUrl,
178
- @website,
179
- @englishName
180
- ) ON CONFLICT(id) DO
181
- UPDATE SET
182
- name=excluded.name,
183
- language=excluded.language,
184
- shortName=excluded.shortName,
185
- textDirection=excluded.textDirection,
186
- licenseUrl=excluded.licenseUrl,
187
- website=excluded.website,
164
+ const translationUpsert = db.prepare(`INSERT INTO Translation(
165
+ id,
166
+ name,
167
+ language,
168
+ shortName,
169
+ textDirection,
170
+ licenseUrl,
171
+ website,
172
+ englishName
173
+ ) VALUES (
174
+ @id,
175
+ @name,
176
+ @language,
177
+ @shortName,
178
+ @textDirection,
179
+ @licenseUrl,
180
+ @website,
181
+ @englishName
182
+ ) ON CONFLICT(id) DO
183
+ UPDATE SET
184
+ name=excluded.name,
185
+ language=excluded.language,
186
+ shortName=excluded.shortName,
187
+ textDirection=excluded.textDirection,
188
+ licenseUrl=excluded.licenseUrl,
189
+ website=excluded.website,
188
190
  englishName=excluded.englishName;`);
189
191
  const insertManyTranslations = db.transaction((translations) => {
190
192
  for (let translation of translations) {
@@ -206,27 +208,27 @@ function insertTranslations(db, translations) {
206
208
  }
207
209
  }
208
210
  function insertTranslationBooks(db, translation, translationBooks) {
209
- const bookUpsert = db.prepare(`INSERT INTO Book(
210
- id,
211
- translationId,
212
- title,
213
- name,
214
- commonName,
215
- numberOfChapters,
216
- \`order\`
217
- ) VALUES (
218
- @id,
219
- @translationId,
220
- @title,
221
- @name,
222
- @commonName,
223
- @numberOfChapters,
224
- @bookOrder
225
- ) ON CONFLICT(id,translationId) DO
226
- UPDATE SET
227
- title=excluded.title,
228
- name=excluded.name,
229
- commonName=excluded.commonName,
211
+ const bookUpsert = db.prepare(`INSERT INTO Book(
212
+ id,
213
+ translationId,
214
+ title,
215
+ name,
216
+ commonName,
217
+ numberOfChapters,
218
+ \`order\`
219
+ ) VALUES (
220
+ @id,
221
+ @translationId,
222
+ @title,
223
+ @name,
224
+ @commonName,
225
+ @numberOfChapters,
226
+ @bookOrder
227
+ ) ON CONFLICT(id,translationId) DO
228
+ UPDATE SET
229
+ title=excluded.title,
230
+ name=excluded.name,
231
+ commonName=excluded.commonName,
230
232
  numberOfChapters=excluded.numberOfChapters;`);
231
233
  const insertMany = db.transaction((books) => {
232
234
  for (let book of books) {
@@ -240,7 +242,7 @@ function insertTranslationBooks(db, translation, translationBooks) {
240
242
  name: book.name,
241
243
  commonName: book.commonName,
242
244
  numberOfChapters: book.chapters.length,
243
- bookOrder: book.order ?? 9999
245
+ bookOrder: book.order ?? 9999,
244
246
  });
245
247
  }
246
248
  });
@@ -250,69 +252,69 @@ function insertTranslationBooks(db, translation, translationBooks) {
250
252
  }
251
253
  }
252
254
  function insertTranslationContent(db, translation, book, chapters) {
253
- const chapterUpsert = db.prepare(`INSERT INTO Chapter(
254
- translationId,
255
- bookId,
256
- number,
257
- json
258
- ) VALUES (
259
- @translationId,
260
- @bookId,
261
- @number,
262
- @json
263
- ) ON CONFLICT(translationId,bookId,number) DO
264
- UPDATE SET
255
+ const chapterUpsert = db.prepare(`INSERT INTO Chapter(
256
+ translationId,
257
+ bookId,
258
+ number,
259
+ json
260
+ ) VALUES (
261
+ @translationId,
262
+ @bookId,
263
+ @number,
264
+ @json
265
+ ) ON CONFLICT(translationId,bookId,number) DO
266
+ UPDATE SET
265
267
  json=excluded.json;`);
266
- const verseUpsert = db.prepare(`INSERT INTO ChapterVerse(
267
- translationId,
268
- bookId,
269
- chapterNumber,
270
- number,
271
- text,
272
- contentJson
273
- ) VALUES (
274
- @translationId,
275
- @bookId,
276
- @chapterNumber,
277
- @number,
278
- @text,
279
- @contentJson
280
- ) ON CONFLICT(translationId,bookId,chapterNumber,number) DO
281
- UPDATE SET
282
- text=excluded.text,
268
+ const verseUpsert = db.prepare(`INSERT INTO ChapterVerse(
269
+ translationId,
270
+ bookId,
271
+ chapterNumber,
272
+ number,
273
+ text,
274
+ contentJson
275
+ ) VALUES (
276
+ @translationId,
277
+ @bookId,
278
+ @chapterNumber,
279
+ @number,
280
+ @text,
281
+ @contentJson
282
+ ) ON CONFLICT(translationId,bookId,chapterNumber,number) DO
283
+ UPDATE SET
284
+ text=excluded.text,
283
285
  contentJson=excluded.contentJson;`);
284
- const footnoteUpsert = db.prepare(`INSERT INTO ChapterFootnote(
285
- translationId,
286
- bookId,
287
- chapterNumber,
288
- id,
289
- verseNumber,
290
- text
291
- ) VALUES (
292
- @translationId,
293
- @bookId,
294
- @chapterNumber,
295
- @id,
296
- @verseNumber,
297
- @text
298
- ) ON CONFLICT(translationId,bookId,chapterNumber,id) DO
299
- UPDATE SET
300
- verseNumber=excluded.verseNumber,
286
+ const footnoteUpsert = db.prepare(`INSERT INTO ChapterFootnote(
287
+ translationId,
288
+ bookId,
289
+ chapterNumber,
290
+ id,
291
+ verseNumber,
292
+ text
293
+ ) VALUES (
294
+ @translationId,
295
+ @bookId,
296
+ @chapterNumber,
297
+ @id,
298
+ @verseNumber,
299
+ @text
300
+ ) ON CONFLICT(translationId,bookId,chapterNumber,id) DO
301
+ UPDATE SET
302
+ verseNumber=excluded.verseNumber,
301
303
  text=excluded.text;`);
302
- const chapterAudioUpsert = db.prepare(`INSERT INTO ChapterAudioUrl(
303
- translationId,
304
- bookId,
305
- number,
306
- reader,
307
- url
308
- ) VALUES (
309
- @translationId,
310
- @bookId,
311
- @number,
312
- @reader,
313
- @url
314
- ) ON CONFLICT(translationId,bookId,number,reader) DO
315
- UPDATE SET
304
+ const chapterAudioUpsert = db.prepare(`INSERT INTO ChapterAudioUrl(
305
+ translationId,
306
+ bookId,
307
+ number,
308
+ reader,
309
+ url
310
+ ) VALUES (
311
+ @translationId,
312
+ @bookId,
313
+ @number,
314
+ @reader,
315
+ @url
316
+ ) ON CONFLICT(translationId,bookId,number,reader) DO
317
+ UPDATE SET
316
318
  url=excluded.url;`);
317
319
  const insertChaptersAndVerses = db.transaction(() => {
318
320
  for (let chapter of chapters) {
@@ -427,7 +429,7 @@ function updateTranslationHashes(db, translations) {
427
429
  for (let book of books) {
428
430
  const chapters = getChapters.all({
429
431
  translationId: translation.id,
430
- bookId: book.id
432
+ bookId: book.id,
431
433
  });
432
434
  const bookSha = (0, hash_js_1.sha256)()
433
435
  .update(book.translationId)
@@ -453,7 +455,7 @@ function updateTranslationHashes(db, translations) {
453
455
  sha256: chapter.sha256,
454
456
  translationId: chapter.translationId,
455
457
  bookId: chapter.bookId,
456
- chapterNumber: chapter.number
458
+ chapterNumber: chapter.number,
457
459
  });
458
460
  }
459
461
  });
@@ -467,7 +469,7 @@ function updateTranslationHashes(db, translations) {
467
469
  updateBookHash.run({
468
470
  sha256: book.sha256,
469
471
  translationId: book.translationId,
470
- bookId: book.id
472
+ bookId: book.id,
471
473
  });
472
474
  }
473
475
  });
@@ -479,7 +481,7 @@ function updateTranslationHashes(db, translations) {
479
481
  for (let translation of translations) {
480
482
  updateTranslationHash.run({
481
483
  sha256: translation.sha256,
482
- translationId: translation.id
484
+ translationId: translation.id,
483
485
  });
484
486
  }
485
487
  });
@@ -502,8 +504,8 @@ function getPrismaDbFromDir(dir) {
502
504
  datasources: {
503
505
  db: {
504
506
  url: `file:${dbPath}`,
505
- }
506
- }
507
+ },
508
+ },
507
509
  });
508
510
  return prisma;
509
511
  }
@@ -514,21 +516,23 @@ async function getDbFromDir(dir) {
514
516
  }
515
517
  async function getDb(dbPath) {
516
518
  const db = new better_sqlite3_1.default(dbPath, {});
517
- db.exec(`CREATE TABLE IF NOT EXISTS "_prisma_migrations" (
518
- "id" TEXT PRIMARY KEY NOT NULL,
519
- "checksum" TEXT NOT NULL,
520
- "finished_at" DATETIME,
521
- "migration_name" TEXT NOT NULL,
522
- "logs" TEXT,
523
- "rolled_back_at" DATETIME,
524
- "started_at" DATETIME NOT NULL DEFAULT current_timestamp,
525
- "applied_steps_count" INTEGER UNSIGNED NOT NULL DEFAULT 0
519
+ db.exec(`CREATE TABLE IF NOT EXISTS "_prisma_migrations" (
520
+ "id" TEXT PRIMARY KEY NOT NULL,
521
+ "checksum" TEXT NOT NULL,
522
+ "finished_at" DATETIME,
523
+ "migration_name" TEXT NOT NULL,
524
+ "logs" TEXT,
525
+ "rolled_back_at" DATETIME,
526
+ "started_at" DATETIME NOT NULL DEFAULT current_timestamp,
527
+ "applied_steps_count" INTEGER UNSIGNED NOT NULL DEFAULT 0
526
528
  );`);
527
529
  const migrations = await (0, fs_extra_1.readdir)(migrationsPath);
528
- const appliedMigrations = db.prepare('SELECT * FROM _prisma_migrations;').all();
530
+ const appliedMigrations = db
531
+ .prepare('SELECT * FROM _prisma_migrations;')
532
+ .all();
529
533
  let missingMigrations = [];
530
534
  for (let migration of migrations) {
531
- if (appliedMigrations.some(m => m.migration_name === migration)) {
535
+ if (appliedMigrations.some((m) => m.migration_name === migration)) {
532
536
  continue;
533
537
  }
534
538
  if (path_1.default.extname(migration) !== '') {
@@ -570,7 +574,7 @@ async function* loadDatasets(db, translationsPerBatch = 50, translationsToLoad)
570
574
  query.where = {
571
575
  id: {
572
576
  in: translationsToLoad,
573
- }
577
+ },
574
578
  };
575
579
  }
576
580
  const translations = await db.translation.findMany(query);
@@ -578,7 +582,7 @@ async function* loadDatasets(db, translationsPerBatch = 50, translationsToLoad)
578
582
  break;
579
583
  }
580
584
  const dataset = {
581
- translations: []
585
+ translations: [],
582
586
  };
583
587
  for (let translation of translations) {
584
588
  const datasetTranslation = {
@@ -609,22 +613,19 @@ async function* loadDatasets(db, translationsPerBatch = 50, translationsToLoad)
609
613
  const audioLinks = await db.chapterAudioUrl.findMany({
610
614
  where: {
611
615
  translationId: translation.id,
612
- bookId: book.id
616
+ bookId: book.id,
613
617
  },
614
- orderBy: [
615
- { number: 'asc' },
616
- { reader: 'asc' }
617
- ]
618
+ orderBy: [{ number: 'asc' }, { reader: 'asc' }],
618
619
  });
619
- const bookChapters = chapters.map(chapter => {
620
+ const bookChapters = chapters.map((chapter) => {
620
621
  return {
621
622
  chapter: JSON.parse(chapter.json),
622
623
  thisChapterAudioLinks: audioLinks
623
- .filter(link => link.number === chapter.number)
624
+ .filter((link) => link.number === chapter.number)
624
625
  .reduce((acc, link) => {
625
626
  acc[link.reader] = link.url;
626
627
  return acc;
627
- }, {})
628
+ }, {}),
628
629
  };
629
630
  });
630
631
  const datasetBook = {
package/files.d.ts CHANGED
@@ -1,5 +1,5 @@
1
- import { InputFile, OutputFile } from "@helloao/tools/generation/common-types";
2
- import { Readable } from "stream";
1
+ import { InputFile, OutputFile } from '@helloao/tools/generation/common-types';
2
+ import { Readable } from 'stream';
3
3
  /**
4
4
  * Defines an interface that contains information about a serialized file.
5
5
  */
@@ -110,4 +110,9 @@ export declare class ZipUploader implements Uploader {
110
110
  upload(file: SerializedFile, _overwrite: boolean): Promise<boolean>;
111
111
  dispose(): Promise<void>;
112
112
  }
113
+ /**
114
+ * Calculates the SHa256 hash of the given input files.
115
+ * @param files The files to hash.
116
+ */
117
+ export declare function hashInputFiles(files: InputFile[]): string;
113
118
  //# sourceMappingURL=files.d.ts.map
package/files.js CHANGED
@@ -28,6 +28,7 @@ exports.serializeOutputFiles = serializeOutputFiles;
28
28
  exports.serializeFile = serializeFile;
29
29
  exports.loadTranslationsFiles = loadTranslationsFiles;
30
30
  exports.loadTranslationFiles = loadTranslationFiles;
31
+ exports.hashInputFiles = hashInputFiles;
31
32
  const promises_1 = require("fs/promises");
32
33
  const path_1 = require("path");
33
34
  const path = __importStar(require("path"));
@@ -112,7 +113,7 @@ async function serializeFile(path, content, options) {
112
113
  if (fileContent instanceof ReadableStream) {
113
114
  json = '';
114
115
  for await (const chunk of stream_1.Readable.fromWeb(fileContent, {
115
- encoding: 'utf-8'
116
+ encoding: 'utf-8',
116
117
  })) {
117
118
  json += chunk;
118
119
  }
@@ -123,7 +124,7 @@ async function serializeFile(path, content, options) {
123
124
  return {
124
125
  path,
125
126
  content: json,
126
- sha256: () => (0, base64_js_1.fromByteArray)(new Uint8Array((0, hash_js_1.sha256)().update(json).digest()))
127
+ sha256: () => (0, base64_js_1.fromByteArray)(new Uint8Array((0, hash_js_1.sha256)().update(json).digest())),
127
128
  };
128
129
  }
129
130
  else if (ext === '.mp3') {
@@ -169,12 +170,14 @@ async function loadTranslationFiles(translation) {
169
170
  return [];
170
171
  }
171
172
  let files = await (0, promises_1.readdir)(translation);
172
- let usfmFiles = files.filter(f => (0, path_1.extname)(f) === '.usfm' || (0, path_1.extname)(f) === '.usx' || (0, path_1.extname)(f) === '.json');
173
+ let usfmFiles = files.filter((f) => (0, path_1.extname)(f) === '.usfm' ||
174
+ (0, path_1.extname)(f) === '.usx' ||
175
+ (0, path_1.extname)(f) === '.json');
173
176
  if (usfmFiles.length <= 0) {
174
177
  translation = path.resolve(translation, 'usfm');
175
178
  if ((0, fs_extra_1.existsSync)(translation)) {
176
179
  files = await (0, promises_1.readdir)(translation);
177
- usfmFiles = files.filter(f => (0, path_1.extname)(f) === '.usfm');
180
+ usfmFiles = files.filter((f) => (0, path_1.extname)(f) === '.usfm');
178
181
  }
179
182
  }
180
183
  if (usfmFiles.length <= 0) {
@@ -188,7 +191,7 @@ async function loadTranslationFiles(translation) {
188
191
  }
189
192
  const filePath = path.resolve(translation, file);
190
193
  promises.push(loadFile(filePath, {
191
- translation: metadata
194
+ translation: metadata,
192
195
  }));
193
196
  }
194
197
  return await Promise.all(promises);
@@ -216,13 +219,15 @@ async function loadTranslationMetadata(translation) {
216
219
  licenseUrl: metadata.copyright.attribution_url,
217
220
  website: metadata.copyright.attribution_url,
218
221
  shortName: metadata.name.abbrev,
219
- direction: metadata.direction
222
+ direction: metadata.direction,
220
223
  };
221
224
  }
222
225
  else {
223
226
  const metadataJson = path.resolve(translation, 'metadata.json');
224
227
  if ((0, fs_extra_1.existsSync)(metadataJson)) {
225
- const data = await (0, promises_1.readFile)(metadataJson, { encoding: 'utf-8' });
228
+ const data = await (0, promises_1.readFile)(metadataJson, {
229
+ encoding: 'utf-8',
230
+ });
226
231
  return JSON.parse(data);
227
232
  }
228
233
  }
@@ -238,7 +243,7 @@ async function loadTranslationMetadata(translation) {
238
243
  async function loadFile(file, metadata) {
239
244
  const extension = path.extname(file);
240
245
  const content = await (0, promises_1.readFile)(file, {
241
- encoding: 'utf-8'
246
+ encoding: 'utf-8',
242
247
  });
243
248
  const hash = (0, hash_js_1.sha256)()
244
249
  .update(content)
@@ -339,3 +344,19 @@ function makeRelativePath(path) {
339
344
  }
340
345
  return path;
341
346
  }
347
+ /**
348
+ * Calculates the SHa256 hash of the given input files.
349
+ * @param files The files to hash.
350
+ */
351
+ function hashInputFiles(files) {
352
+ let sha = (0, hash_js_1.sha256)();
353
+ for (let file of files) {
354
+ if (file.sha256) {
355
+ sha.update(file.sha256);
356
+ }
357
+ else {
358
+ sha.update(file.content);
359
+ }
360
+ }
361
+ return sha.digest('hex');
362
+ }
@@ -1,26 +1,26 @@
1
- /*
2
- Warnings:
3
-
4
- - Added the required column `order` to the `Book` table without a default value. This is not possible if the table is not empty.
5
-
6
- */
7
- -- RedefineTables
8
- PRAGMA defer_foreign_keys=ON;
9
- PRAGMA foreign_keys=OFF;
10
- CREATE TABLE "new_Book" (
11
- "id" TEXT NOT NULL,
12
- "translationId" TEXT NOT NULL,
13
- "name" TEXT NOT NULL,
14
- "commonName" TEXT NOT NULL,
15
- "title" TEXT,
16
- "order" INTEGER NOT NULL,
17
- "numberOfChapters" INTEGER NOT NULL,
18
-
19
- PRIMARY KEY ("translationId", "id"),
20
- CONSTRAINT "Book_translationId_fkey" FOREIGN KEY ("translationId") REFERENCES "Translation" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
21
- );
22
- INSERT INTO "new_Book" ("commonName", "id", "name", "numberOfChapters", "title", "translationId") SELECT "commonName", "id", "name", "numberOfChapters", "title", "translationId" FROM "Book";
23
- DROP TABLE "Book";
24
- ALTER TABLE "new_Book" RENAME TO "Book";
25
- PRAGMA foreign_keys=ON;
26
- PRAGMA defer_foreign_keys=OFF;
1
+ /*
2
+ Warnings:
3
+
4
+ - Added the required column `order` to the `Book` table without a default value. This is not possible if the table is not empty.
5
+
6
+ */
7
+ -- RedefineTables
8
+ PRAGMA defer_foreign_keys=ON;
9
+ PRAGMA foreign_keys=OFF;
10
+ CREATE TABLE "new_Book" (
11
+ "id" TEXT NOT NULL,
12
+ "translationId" TEXT NOT NULL,
13
+ "name" TEXT NOT NULL,
14
+ "commonName" TEXT NOT NULL,
15
+ "title" TEXT,
16
+ "order" INTEGER NOT NULL,
17
+ "numberOfChapters" INTEGER NOT NULL,
18
+
19
+ PRIMARY KEY ("translationId", "id"),
20
+ CONSTRAINT "Book_translationId_fkey" FOREIGN KEY ("translationId") REFERENCES "Translation" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
21
+ );
22
+ INSERT INTO "new_Book" ("commonName", "id", "name", "numberOfChapters", "title", "translationId") SELECT "commonName", "id", "name", "numberOfChapters", "title", "translationId" FROM "Book";
23
+ DROP TABLE "Book";
24
+ ALTER TABLE "new_Book" RENAME TO "Book";
25
+ PRAGMA foreign_keys=ON;
26
+ PRAGMA defer_foreign_keys=OFF;