shamela 1.0.6 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -1,102 +1,342 @@
1
- export type GetMasterMetadataResponsePayload = {
2
- url: string;
3
- version: number;
4
- };
5
- export interface OutputOptions {
6
- path: string;
7
- }
8
- export type DownloadMasterOptions = {
9
- masterMetadata?: GetMasterMetadataResponsePayload;
10
- outputFile: OutputOptions;
11
- };
12
- export type GetBookMetadataOptions = {
13
- majorVersion: number;
14
- minorVersion: number;
15
- };
16
- export type GetBookMetadataResponsePayload = {
17
- majorRelease: number;
18
- majorReleaseUrl: string;
19
- minorRelease?: number;
20
- minorReleaseUrl?: string;
21
- };
22
- export type DownloadBookOptions = {
23
- bookMetadata?: GetBookMetadataResponsePayload;
24
- outputFile: OutputOptions;
25
- };
26
- export type Author = {
1
+ /**
2
+ * Represents an author entity.
3
+ */
4
+ type Author = {
5
+ /** Optional biography of the author */
27
6
  biography?: string;
7
+ /** Death year of the author */
28
8
  death?: number;
9
+ /** Unique identifier for the author */
29
10
  id: number;
11
+ /** Name of the author */
30
12
  name: string;
31
13
  };
32
- type PDFFile = {
33
- file: string;
34
- id?: string;
35
- };
36
- export type PDFLinks = {
37
- alias?: number;
38
- cover?: number;
39
- cover_alias?: number;
40
- files?: PDFFile[];
41
- root?: string;
42
- size?: number;
43
- };
44
- export type Metadata = {
45
- coauthor?: number[];
46
- date: string;
47
- group?: number;
48
- hide_diacritic?: boolean;
49
- min_ver?: number;
50
- prefix?: string;
51
- shorts: Record<string, string>;
52
- sub_books: number[];
53
- suffix?: string;
54
- };
55
- export type Book = {
14
+ /**
15
+ * Represents a book entity.
16
+ */
17
+ type Book = {
18
+ /** Author ID(s) associated with the book */
56
19
  author: number | number[];
20
+ /** Bibliography information */
57
21
  bibliography: string;
22
+ /** Category ID the book belongs to */
58
23
  category: number;
24
+ /** Publication date */
59
25
  date?: number;
26
+ /** Optional hint or description */
60
27
  hint?: string;
28
+ /** Unique identifier for the book */
61
29
  id: number;
30
+ /** Major version number */
62
31
  major: number;
32
+ /** Metadata associated with the book */
63
33
  metadata: Metadata;
34
+ /** Optional minor version number */
64
35
  minor?: number;
36
+ /** Name/title of the book */
65
37
  name: string;
38
+ /** Optional PDF download links */
66
39
  pdfLinks?: PDFLinks;
40
+ /** Whether the book is printed (1) or not (0) */
67
41
  printed: number;
42
+ /** Type classification of the book */
68
43
  type: number;
69
44
  };
70
- export type Category = {
45
+ /**
46
+ * Represents book content data.
47
+ */
48
+ type BookData = {
49
+ /** Array of pages in the book */
50
+ pages: Page[];
51
+ /** Optional array of titles/chapters */
52
+ titles?: Title[];
53
+ };
54
+ /**
55
+ * Represents a category entity.
56
+ */
57
+ type Category = {
58
+ /** Unique identifier for the category */
71
59
  id: number;
60
+ /** Name of the category */
72
61
  name: string;
73
62
  };
74
- export type MasterData = {
63
+ /**
64
+ * Options for downloading a book.
65
+ */
66
+ type DownloadBookOptions = {
67
+ /** Optional book metadata */
68
+ bookMetadata?: GetBookMetadataResponsePayload;
69
+ /** Output file configuration */
70
+ outputFile: OutputOptions;
71
+ };
72
+ /**
73
+ * Options for downloading master data.
74
+ */
75
+ type DownloadMasterOptions = {
76
+ /** Optional master metadata */
77
+ masterMetadata?: GetMasterMetadataResponsePayload;
78
+ /** Output file configuration */
79
+ outputFile: OutputOptions;
80
+ };
81
+ /**
82
+ * Options for getting book metadata.
83
+ */
84
+ type GetBookMetadataOptions = {
85
+ /** Major version number */
86
+ majorVersion: number;
87
+ /** Minor version number */
88
+ minorVersion: number;
89
+ };
90
+ /**
91
+ * Response payload for book metadata requests.
92
+ */
93
+ type GetBookMetadataResponsePayload = {
94
+ /** Major release version */
95
+ majorRelease: number;
96
+ /** URL for major release download */
97
+ majorReleaseUrl: string;
98
+ /** Optional minor release version */
99
+ minorRelease?: number;
100
+ /** Optional URL for minor release download */
101
+ minorReleaseUrl?: string;
102
+ };
103
+ /**
104
+ * Response payload for master metadata requests.
105
+ */
106
+ type GetMasterMetadataResponsePayload = {
107
+ /** Download URL */
108
+ url: string;
109
+ /** Version number */
110
+ version: number;
111
+ };
112
+ /**
113
+ * Master data structure containing all core entities.
114
+ */
115
+ type MasterData = {
116
+ /** Array of all authors */
75
117
  authors: Author[];
118
+ /** Array of all books */
76
119
  books: Book[];
120
+ /** Array of all categories */
77
121
  categories: Category[];
78
122
  };
79
- export type Page = {
123
+ /**
124
+ * Metadata structure for books.
125
+ */
126
+ type Metadata = {
127
+ /** Optional co-author IDs */
128
+ coauthor?: number[];
129
+ /** Date information */
130
+ date: string;
131
+ /** Optional group identifier */
132
+ group?: number;
133
+ /** Whether to hide diacritics */
134
+ hide_diacritic?: boolean;
135
+ /** Minimum version requirement */
136
+ min_ver?: number;
137
+ /** Optional prefix text */
138
+ prefix?: string;
139
+ /** Short codes mapping */
140
+ shorts: Record<string, string>;
141
+ /** Sub-book IDs */
142
+ sub_books: number[];
143
+ /** Optional suffix text */
144
+ suffix?: string;
145
+ };
146
+ /**
147
+ * Output file options.
148
+ */
149
+ interface OutputOptions {
150
+ /** Output file path */
151
+ path: string;
152
+ }
153
+ /**
154
+ * Represents a page in a book.
155
+ */
156
+ type Page = {
157
+ /** Content of the page */
80
158
  content: string;
159
+ /** Unique identifier for the page */
81
160
  id: number;
161
+ /** Optional page number */
82
162
  number?: number;
163
+ /** Optional page reference */
83
164
  page?: number;
165
+ /** Optional part number */
84
166
  part?: number;
85
167
  };
86
- export type Title = {
168
+ /**
169
+ * PDF links structure for books.
170
+ */
171
+ type PDFLinks = {
172
+ /** Optional alias ID */
173
+ alias?: number;
174
+ /** Optional cover ID */
175
+ cover?: number;
176
+ /** Optional cover alias ID */
177
+ cover_alias?: number;
178
+ /** Optional array of PDF files */
179
+ files?: PDFFile[];
180
+ /** Optional root path */
181
+ root?: string;
182
+ /** Optional file size */
183
+ size?: number;
184
+ };
185
+ /**
186
+ * Represents a title or chapter heading.
187
+ */
188
+ type Title = {
189
+ /** Content of the title */
87
190
  content: string;
191
+ /** Unique identifier for the title */
88
192
  id: number;
193
+ /** Page number where title appears */
89
194
  page: number;
195
+ /** Optional parent title ID for hierarchical structure */
90
196
  parent?: number;
91
197
  };
92
- export type BookData = {
93
- pages: Page[];
94
- titles?: Title[];
198
+ type PDFFile = {
199
+ file: string;
200
+ id?: string;
95
201
  };
96
- export const getBookMetadata: (id: number, options?: GetBookMetadataOptions) => Promise<GetBookMetadataResponsePayload>;
97
- export const downloadBook: (id: number, options: DownloadBookOptions) => Promise<string>;
98
- export const getMasterMetadata: (version?: number) => Promise<GetMasterMetadataResponsePayload>;
99
- export const downloadMasterDatabase: (options: DownloadMasterOptions) => Promise<string>;
100
- export const getBook: (id: number) => Promise<BookData>;
101
202
 
102
- //# sourceMappingURL=index.d.ts.map
203
+ /**
204
+ * Retrieves metadata for a specific book from the Shamela API.
205
+ *
206
+ * This function fetches book release information including major and minor release
207
+ * URLs and version numbers from the Shamela web service.
208
+ *
209
+ * @param id - The unique identifier of the book to fetch metadata for
210
+ * @param options - Optional parameters for specifying major and minor versions
211
+ * @returns A promise that resolves to book metadata including release URLs and versions
212
+ *
213
+ * @throws {Error} When environment variables are not set or API request fails
214
+ *
215
+ * @example
216
+ * ```typescript
217
+ * const metadata = await getBookMetadata(123, { majorVersion: 1, minorVersion: 2 });
218
+ * console.log(metadata.majorReleaseUrl); // Download URL for the book
219
+ * ```
220
+ */
221
+ declare const getBookMetadata: (id: number, options?: GetBookMetadataOptions) => Promise<GetBookMetadataResponsePayload>;
222
+ /**
223
+ * Downloads and processes a book from the Shamela database.
224
+ *
225
+ * This function downloads the book's database files, applies patches if available,
226
+ * creates the necessary database tables, and exports the data to the specified format.
227
+ * The output can be either a JSON file or a SQLite database file.
228
+ *
229
+ * @param id - The unique identifier of the book to download
230
+ * @param options - Configuration options including output file path and optional book metadata
231
+ * @returns A promise that resolves to the path of the created output file
232
+ *
233
+ * @throws {Error} When download fails, database operations fail, or file operations fail
234
+ *
235
+ * @example
236
+ * ```typescript
237
+ * // Download as JSON
238
+ * const jsonPath = await downloadBook(123, {
239
+ * outputFile: { path: './book.json' }
240
+ * });
241
+ *
242
+ * // Download as SQLite database
243
+ * const dbPath = await downloadBook(123, {
244
+ * outputFile: { path: './book.db' }
245
+ * });
246
+ * ```
247
+ */
248
+ declare const downloadBook: (id: number, options: DownloadBookOptions) => Promise<string>;
249
+ /**
250
+ * Retrieves metadata for the master database from the Shamela API.
251
+ *
252
+ * The master database contains information about all books, authors, and categories
253
+ * in the Shamela library. This function fetches the download URL and version
254
+ * information for the master database patches.
255
+ *
256
+ * @param version - The version number to check for updates (defaults to 0)
257
+ * @returns A promise that resolves to master database metadata including download URL and version
258
+ *
259
+ * @throws {Error} When environment variables are not set or API request fails
260
+ *
261
+ * @example
262
+ * ```typescript
263
+ * const masterMetadata = await getMasterMetadata(5);
264
+ * console.log(masterMetadata.url); // URL to download master database patch
265
+ * console.log(masterMetadata.version); // Latest version number
266
+ * ```
267
+ */
268
+ declare const getMasterMetadata: (version?: number) => Promise<GetMasterMetadataResponsePayload>;
269
+ /**
270
+ * Generates the URL for a book's cover image.
271
+ *
272
+ * This function constructs the URL to access the cover image for a specific book
273
+ * using the book's ID and the API endpoint host.
274
+ *
275
+ * @param bookId - The unique identifier of the book
276
+ * @returns The complete URL to the book's cover image
277
+ *
278
+ * @example
279
+ * ```typescript
280
+ * const coverUrl = getCoverUrl(123);
281
+ * console.log(coverUrl); // "https://api.shamela.ws/covers/123.jpg"
282
+ * ```
283
+ */
284
+ declare const getCoverUrl: (bookId: number) => string;
285
+ /**
286
+ * Downloads and processes the master database from the Shamela service.
287
+ *
288
+ * The master database contains comprehensive information about all books, authors,
289
+ * and categories available in the Shamela library. This function downloads the
290
+ * database files, creates the necessary tables, and exports the data in the
291
+ * specified format (JSON or SQLite).
292
+ *
293
+ * @param options - Configuration options including output file path and optional master metadata
294
+ * @returns A promise that resolves to the path of the created output file
295
+ *
296
+ * @throws {Error} When download fails, expected tables are missing, database operations fail, or file operations fail
297
+ *
298
+ * @example
299
+ * ```typescript
300
+ * // Download master database as JSON
301
+ * const jsonPath = await downloadMasterDatabase({
302
+ * outputFile: { path: './master.json' }
303
+ * });
304
+ *
305
+ * // Download master database as SQLite
306
+ * const dbPath = await downloadMasterDatabase({
307
+ * outputFile: { path: './master.db' }
308
+ * });
309
+ * ```
310
+ */
311
+ declare const downloadMasterDatabase: (options: DownloadMasterOptions) => Promise<string>;
312
+ /**
313
+ * Retrieves complete book data including pages and titles.
314
+ *
315
+ * This is a convenience function that downloads a book's data and returns it
316
+ * as a structured JavaScript object. The function handles the temporary file
317
+ * creation and cleanup automatically.
318
+ *
319
+ * @param id - The unique identifier of the book to retrieve
320
+ * @returns A promise that resolves to the complete book data including pages and titles
321
+ *
322
+ * @throws {Error} When download fails, file operations fail, or JSON parsing fails
323
+ *
324
+ * @example
325
+ * ```typescript
326
+ * const bookData = await getBook(123);
327
+ * console.log(bookData.pages.length); // Number of pages in the book
328
+ * console.log(bookData.titles?.length); // Number of title entries
329
+ * ```
330
+ */
331
+ declare const getBook: (id: number) => Promise<BookData>;
332
+
333
+ type LogFunction = (...args: unknown[]) => void;
334
+ interface Logger {
335
+ debug: LogFunction;
336
+ error: LogFunction;
337
+ info: LogFunction;
338
+ warn?: LogFunction;
339
+ }
340
+ declare const setLogger: (newLogger?: Logger) => void;
341
+
342
+ export { type Author, type Book, type BookData, type Category, type DownloadBookOptions, type DownloadMasterOptions, type GetBookMetadataOptions, type GetBookMetadataResponsePayload, type GetMasterMetadataResponsePayload, type MasterData, type Metadata, type OutputOptions, type PDFLinks, type Page, type Title, downloadBook, downloadMasterDatabase, getBook, getBookMetadata, getCoverUrl, getMasterMetadata, setLogger };
package/dist/index.js ADDED
@@ -0,0 +1,44 @@
1
+ import{Database as W}from"bun:sqlite";import{promises as u}from"fs";import d from"path";import L from"process";import{URL as q}from"url";var M={debug:()=>{},error:()=>{},info:()=>{},warn:()=>{}},c=M,V=(t=M)=>{if(!t.debug||!t.error||!t.info)throw new Error("Logger must implement debug, error, and info methods");c=t};var Y=t=>`'${t.replace(/'/g,"''")}'`,f=(t,e)=>{let r=a=>/^[A-Za-z_][A-Za-z0-9_]*$/.test(a);if(!r(e))throw new Error(`Invalid database name: ${e}`);return t.query(`SELECT name FROM ${e}.sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'`).all().map(({name:a})=>{let s=r(a)?a:Y(a);return{fields:t.query(`PRAGMA ${e}.table_info(${s})`).all().map(p=>p.name),name:a}})};var N="main",E=(t,e)=>{let r=t.replace(/'/g,"''");if(!/^[a-zA-Z0-9_]+$/.test(e))throw new Error("Invalid database alias");return`ATTACH DATABASE '${r}' AS ${e}`},O=(t,e,r=N)=>`
2
+ UPDATE ${r}.${e}
3
+ SET content = ${R("content",r,t)},
4
+ part = ${R("part",r,t)},
5
+ page = ${R("page",r,t)},
6
+ number = ${R("number",r,t)}
7
+ WHERE EXISTS (
8
+ SELECT 1
9
+ FROM ${t}.${e}
10
+ WHERE ${r}.${e}.id = ${t}.${e}.id
11
+ );
12
+ `,w=(t,e,r)=>`
13
+ (SELECT CASE
14
+ WHEN ${r}.title.${t} != '#' THEN ${r}.title.${t}
15
+ ELSE ${e}.title.${t}
16
+ END
17
+ FROM ${r}.title
18
+ WHERE ${e}.title.id = ${r}.title.id)
19
+ `,k=(t,e,r=N)=>`
20
+ UPDATE ${r}.${e}
21
+ SET content = ${w("content",r,t)},
22
+ page = ${w("page",r,t)},
23
+ parent = ${w("parent",r,t)}
24
+ WHERE EXISTS (
25
+ SELECT 1
26
+ FROM ${t}.${e}
27
+ WHERE ${r}.${e}.id = ${t}.${e}.id
28
+ );
29
+ `;var m=t=>{if(!/^[a-zA-Z0-9_]+$/.test(t))throw new Error("Invalid database alias");return`DETACH DATABASE ${t}`},R=(t,e,r)=>`
30
+ (SELECT CASE
31
+ WHEN ${r}.page.${t} != '#' THEN ${r}.page.${t}
32
+ ELSE ${e}.page.${t}
33
+ END
34
+ FROM ${r}.page
35
+ WHERE ${e}.page.id = ${r}.page.id)
36
+ `;var g="patch",l="asl",B=(t,e,r,o,a)=>{let s=[];if(t.find(n=>n.name===r))s.push(`INSERT INTO main.${r}
37
+ SELECT ${o.join(",")}
38
+ FROM ${l}.${r}
39
+ WHERE id NOT IN (
40
+ SELECT id
41
+ FROM ${g}.${r}
42
+ WHERE is_deleted='1'
43
+ )`),s.push(a);else{let n=`INSERT INTO main.${r} SELECT ${o.join(",")} FROM ${l}.${r}`;e.find(i=>i.name===r)?.fields.includes("is_deleted")&&(n+=" WHERE is_deleted='0'"),s.push(n)}return s},C=(t,e,r)=>{t.run(E(e,l)),t.run(E(r,g));try{let o=f(t,g),a=f(t,l);c.debug({aslTables:a,patchTables:o},"Applying patches for...");let s=B(o,a,"page",["id","content","part","page","number"],O(g,"page")),n=B(o,a,"title",["id","content","page","parent"],k(g,"title")),i=[...s,...n].map(p=>t.prepare(p));t.transaction(()=>{i.forEach(p=>p.run())})()}finally{t.run(m(l)),t.run(m(g))}},x=(t,e)=>{t.run(E(e,l));let r=f(t,l);c.debug({tables:r},"copyTableData...");let o=t.prepare(`INSERT INTO main.title SELECT id,content,page,parent FROM ${l}.title`),a=t.prepare(`INSERT INTO main.page SELECT id,content,part,page,number FROM ${l}.page`);t.transaction(()=>{o.run(),a.run()})(),t.run(m(l))},F=t=>{t.run("CREATE TABLE page (id INTEGER PRIMARY KEY, content TEXT, part INTEGER, page INTEGER, number INTEGER)"),t.run("CREATE TABLE title (id INTEGER PRIMARY KEY, content TEXT, page INTEGER, parent INTEGER)")},z=t=>t.query("SELECT * FROM page").all().map(r=>{let{content:o,id:a,number:s,page:n,part:i}=r;return{content:o,id:a,...n&&{page:n},...s&&{number:s},...i&&{part:i}}}),J=t=>t.query("SELECT * FROM title").all().map(r=>{let o=r;return{content:o.content,id:o.id,page:o.page,...o.parent&&{parent:o.parent}}}),H=t=>({pages:z(t),titles:J(t)});import Z from"path";var h="99999";var U=(t,e)=>{let r=e.reduce((n,i)=>{let{name:p}=Z.parse(i);return{...n,[p]:i}},{});Object.entries(r).forEach(([n,i])=>t.run(E(i,n)));let o=t.prepare(`INSERT INTO authors SELECT id,name,biography,(CASE WHEN death_number = ${h} THEN NULL ELSE death_number END) AS death_number FROM author WHERE is_deleted='0'`),a=t.prepare(`INSERT INTO books SELECT id,name,category,type,(CASE WHEN date = ${h} THEN NULL ELSE date END) AS date,author,printed,major_release,minor_release,bibliography,hint,pdf_links,metadata FROM book WHERE is_deleted='0'`),s=t.prepare("INSERT INTO categories SELECT id,name FROM category WHERE is_deleted='0'");t.transaction(()=>{o.run(),a.run(),s.run()})(),Object.keys(r).forEach(n=>t.run(m(n)))},j=t=>{t.run("CREATE TABLE authors (id INTEGER PRIMARY KEY, name TEXT, biography TEXT, death INTEGER)"),t.run("CREATE TABLE books (id INTEGER PRIMARY KEY, name TEXT, category INTEGER, type INTEGER, date INTEGER, author TEXT, printed INTEGER, major INTEGER, minor INTEGER, bibliography TEXT, hint TEXT, pdf_links TEXT, metadata TEXT)"),t.run("CREATE TABLE categories (id INTEGER PRIMARY KEY, name TEXT)")},Q=t=>t.query("SELECT * FROM authors").all().map(o=>({...o.biography&&{biography:o.biography},...o.death&&{death:o.death},id:o.id,name:o.name})),tt=t=>t.query("SELECT * FROM books").all().map(o=>{let a=o;return{author:rt(a.author),bibliography:a.bibliography,category:a.category,id:a.id,major:a.major,metadata:JSON.parse(a.metadata),name:a.name,printed:a.printed,type:a.type,...a.date&&a.date.toString()!==h&&{date:a.date},...a.hint&&{hint:a.hint},...a.pdf_links&&{pdfLinks:ot(a.pdf_links)},...a.minor&&{minorRelease:a.minor}}}),et=t=>t.query("SELECT * FROM categories").all().map(o=>({id:o.id,name:o.name})),rt=t=>{let e=t.split(",\\s+").map(r=>parseInt(r.trim()));return e.length>1?e:e[0]},ot=t=>{let e=JSON.parse(t);return e.files&&(e.files=e.files.map(r=>{let[o,a]=r.split("|");return{...a&&{id:a},file:o}})),e},G=t=>({authors:Q(t),books:tt(t),categories:et(t)});import{createWriteStream as at,promises as b}from"fs";import nt from"https";import st from"os";import _ from"path";import{pipeline as it}from"stream/promises";import pt from"unzipper";var y=async(t="shamela")=>{let e=_.join(st.tmpdir(),t);return b.mkdtemp(e)};async function $(t,e){let r=[];try{let o=await new Promise((a,s)=>{nt.get(t,n=>{n.statusCode!==200?s(new Error(`Failed to download ZIP file: ${n.statusCode} ${n.statusMessage}`)):a(n)}).on("error",n=>{s(new Error(`HTTPS request failed: ${n.message}`))})});return await new Promise((a,s)=>{let n=pt.Parse(),i=[];n.on("entry",p=>{let S=(async()=>{let T=_.join(e,p.path);if(p.type==="Directory")await b.mkdir(T,{recursive:!0}),p.autodrain();else{let X=_.dirname(T);await b.mkdir(X,{recursive:!0});let K=at(T);await it(p,K),r.push(T)}})();i.push(S)}),n.on("finish",async()=>{try{await Promise.all(i),a()}catch(p){s(p)}}),n.on("error",p=>{s(new Error(`Error during extraction: ${p.message}`))}),o.pipe(n)}),r}catch(o){throw new Error(`Error processing URL: ${o.message}`)}}import{Buffer as ct}from"buffer";import lt from"https";import Et from"process";import{URL as mt,URLSearchParams as gt}from"url";var D=(t,e,r=!0)=>{let o=new mt(t);{let a=new gt;Object.entries(e).forEach(([s,n])=>{a.append(s,n.toString())}),r&&a.append("api_key",Et.env.SHAMELA_API_KEY),o.search=a.toString()}return o},I=t=>new Promise((e,r)=>{lt.get(t,o=>{let a=o.headers["content-type"]||"",s=[];o.on("data",n=>{s.push(n)}),o.on("end",()=>{let n=ct.concat(s);if(a.includes("application/json"))try{let i=JSON.parse(n.toString("utf-8"));e(i)}catch(i){r(new Error(`Failed to parse JSON: ${i.message}`))}else e(n)})}).on("error",o=>{r(new Error(`Error making request: ${o.message}`))})});import ut from"path";import dt from"process";var Tt=["author.sqlite","book.sqlite","category.sqlite"],A=()=>{let t=["SHAMELA_API_MASTER_PATCH_ENDPOINT","SHAMELA_API_BOOKS_ENDPOINT","SHAMELA_API_KEY"].filter(e=>!dt.env[e]);if(t.length)throw new Error(`${t.join(", ")} environment variables not set`)},v=t=>{let e=new Set(t.map(r=>ut.basename(r).toLowerCase()));return Tt.every(r=>e.has(r.toLowerCase()))};var P=t=>{let e=new q(t);return e.protocol="https",e.toString()},ft=async(t,e)=>{A();let r=D(`${L.env.SHAMELA_API_BOOKS_ENDPOINT}/${t}`,{major_release:(e?.majorVersion||0).toString(),minor_release:(e?.minorVersion||0).toString()});c.info(`Fetching shamela.ws book link: ${r.toString()}`);try{let o=await I(r);return{majorRelease:o.major_release,majorReleaseUrl:P(o.major_release_url),...o.minor_release_url&&{minorReleaseUrl:P(o.minor_release_url)},...o.minor_release_url&&{minorRelease:o.minor_release}}}catch(o){throw new Error(`Error fetching book metadata: ${o.message}`)}},Rt=async(t,e)=>{c.info(`downloadBook ${t} ${JSON.stringify(e)}`);let r=await y("shamela_downloadBook"),o=e?.bookMetadata||await ft(t),[[a],[s]=[]]=await Promise.all([$(o.majorReleaseUrl,r),...o.minorReleaseUrl?[$(o.minorReleaseUrl,r)]:[]]),n=d.join(r,"book.db"),i=new W(n);try{c.info("Creating tables"),await F(i),s?(c.info(`Applying patches from ${s} to ${a}`),await C(i,a,s)):(c.info(`Copying table data from ${a}`),await x(i,a));let{ext:p}=d.parse(e.outputFile.path);if(p===".json"){let S=await H(i);await Bun.file(e.outputFile.path).write(JSON.stringify(S,null,2))}i.close(),(p===".db"||p===".sqlite")&&await u.rename(n,e.outputFile.path),await u.rm(r,{recursive:!0})}finally{i.close()}return e.outputFile.path},ht=async(t=0)=>{A();let e=D(L.env.SHAMELA_API_MASTER_PATCH_ENDPOINT,{version:t.toString()});c.info(`Fetching shamela.ws master database patch link: ${e.toString()}`);try{let r=await I(e);return{url:r.patch_url,version:r.version}}catch(r){throw new Error(`Error fetching master patch: ${r.message}`)}},ie=t=>{let{origin:e}=new q(L.env.SHAMELA_API_MASTER_PATCH_ENDPOINT);return`${e}/covers/${t}.jpg`},pe=async t=>{c.info(`downloadMasterDatabase ${JSON.stringify(t)}`);let e=await y("shamela_downloadMaster"),r=t.masterMetadata||await ht(0);c.info(`Downloading master database from: ${JSON.stringify(r)}`);let o=await $(P(r.url),e);if(c.info(`sourceTables downloaded: ${o.toString()}`),!v(o))throw c.error(`Some source tables were not found: ${o.toString()}`),new Error("Expected tables not found!");let a=d.join(e,"master.db"),s=new W(a);try{c.info("Creating tables"),await j(s),c.info("Copying data to master table"),await U(s,o);let{ext:n}=d.parse(t.outputFile.path);if(n===".json"){let i=await G(s);await Bun.file(t.outputFile.path).write(JSON.stringify(i,null,2))}s.close(),(n===".db"||n===".sqlite")&&await u.rename(a,t.outputFile.path),await u.rm(e,{recursive:!0})}finally{s.close()}return t.outputFile.path},ce=async t=>{let e=await y("shamela_getBookData"),r=await Rt(t,{outputFile:{path:d.join(e,`${t}.json`)}}),o=await Bun.file(r).json();return await u.rm(e,{recursive:!0}),o};export{Rt as downloadBook,pe as downloadMasterDatabase,ce as getBook,ft as getBookMetadata,ie as getCoverUrl,ht as getMasterMetadata,V as setLogger};
44
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/api.ts","../src/utils/logger.ts","../src/db/common.ts","../src/db/queryBuilder.ts","../src/db/book.ts","../src/db/master.ts","../src/utils/constants.ts","../src/utils/io.ts","../src/utils/network.ts","../src/utils/validation.ts"],"sourcesContent":["import { Database } from 'bun:sqlite';\nimport { promises as fs } from 'node:fs';\nimport path from 'node:path';\nimport process from 'node:process';\nimport { URL } from 'node:url';\n\nimport { applyPatches, copyTableData, createTables as createBookTables, getData as getBookData } from './db/book.js';\nimport {\n copyForeignMasterTableData,\n createTables as createMasterTables,\n getData as getMasterData,\n} from './db/master.js';\nimport {\n BookData,\n DownloadBookOptions,\n DownloadMasterOptions,\n GetBookMetadataOptions,\n GetBookMetadataResponsePayload,\n GetMasterMetadataResponsePayload,\n} from './types.js';\nimport { DEFAULT_MASTER_METADATA_VERSION } from './utils/constants.js';\nimport { createTempDir, unzipFromUrl } from './utils/io.js';\nimport logger from './utils/logger.js';\nimport { buildUrl, httpsGet } from './utils/network.js';\nimport { validateEnvVariables, validateMasterSourceTables } from './utils/validation.js';\n\nconst fixHttpsProtocol = (originalUrl: string) => {\n const url = new URL(originalUrl);\n url.protocol = 'https';\n\n return url.toString();\n};\n\ntype BookUpdatesResponse = {\n major_release: number;\n major_release_url: string;\n minor_release?: number;\n minor_release_url?: string;\n};\n\n/**\n * Retrieves metadata for a specific book from the Shamela API.\n *\n * This function fetches book release information including major and minor release\n * URLs and version numbers from the Shamela web service.\n *\n * @param id - The unique identifier of the book to fetch metadata for\n * @param options - Optional parameters for specifying major and minor versions\n * @returns A promise that resolves to book metadata including release URLs and versions\n *\n * @throws {Error} When environment variables are not set or API request fails\n *\n * @example\n * ```typescript\n * const metadata = await getBookMetadata(123, { majorVersion: 1, minorVersion: 2 });\n * console.log(metadata.majorReleaseUrl); // Download URL for the book\n * ```\n */\nexport const getBookMetadata = async (\n id: number,\n options?: GetBookMetadataOptions,\n): Promise<GetBookMetadataResponsePayload> => {\n validateEnvVariables();\n\n const url = buildUrl(`${process.env.SHAMELA_API_BOOKS_ENDPOINT}/${id}`, {\n major_release: (options?.majorVersion || 0).toString(),\n minor_release: (options?.minorVersion || 0).toString(),\n });\n\n logger.info(`Fetching shamela.ws book link: ${url.toString()}`);\n\n try {\n const response = (await httpsGet(url)) as BookUpdatesResponse;\n return {\n majorRelease: response.major_release,\n majorReleaseUrl: fixHttpsProtocol(response.major_release_url),\n ...(response.minor_release_url && { minorReleaseUrl: fixHttpsProtocol(response.minor_release_url) }),\n ...(response.minor_release_url && { minorRelease: response.minor_release }),\n };\n } catch (error: any) {\n throw new Error(`Error fetching book metadata: ${error.message}`);\n }\n};\n\n/**\n * Downloads and processes a book from the Shamela database.\n *\n * This function downloads the book's database files, applies patches if available,\n * creates the necessary database tables, and exports the data to the specified format.\n * The output can be either a JSON file or a SQLite database file.\n *\n * @param id - The unique identifier of the book to download\n * @param options - Configuration options including output file path and optional book metadata\n * @returns A promise that resolves to the path of the created output file\n *\n * @throws {Error} When download fails, database operations fail, or file operations fail\n *\n * @example\n * ```typescript\n * // Download as JSON\n * const jsonPath = await downloadBook(123, {\n * outputFile: { path: './book.json' }\n * });\n *\n * // Download as SQLite database\n * const dbPath = await downloadBook(123, {\n * outputFile: { path: './book.db' }\n * });\n * ```\n */\nexport const downloadBook = async (id: number, options: DownloadBookOptions): Promise<string> => {\n logger.info(`downloadBook ${id} ${JSON.stringify(options)}`);\n\n const outputDir = await createTempDir('shamela_downloadBook');\n\n const bookResponse: GetBookMetadataResponsePayload = options?.bookMetadata || (await getBookMetadata(id));\n const [[bookDatabase], [patchDatabase] = []]: string[][] = await Promise.all([\n unzipFromUrl(bookResponse.majorReleaseUrl, outputDir),\n ...(bookResponse.minorReleaseUrl ? [unzipFromUrl(bookResponse.minorReleaseUrl, outputDir)] : []),\n ]);\n const dbPath = path.join(outputDir, 'book.db');\n\n const client = new Database(dbPath);\n\n try {\n logger.info(`Creating tables`);\n await createBookTables(client);\n\n if (patchDatabase) {\n logger.info(`Applying patches from ${patchDatabase} to ${bookDatabase}`);\n await applyPatches(client, bookDatabase, patchDatabase);\n } else {\n logger.info(`Copying table data from ${bookDatabase}`);\n await copyTableData(client, bookDatabase);\n }\n\n const { ext: extension } = path.parse(options.outputFile.path);\n\n if (extension === '.json') {\n const result = await getBookData(client);\n await Bun.file(options.outputFile.path).write(JSON.stringify(result, null, 2));\n }\n\n client.close();\n\n if (extension === '.db' || extension === '.sqlite') {\n await fs.rename(dbPath, options.outputFile.path);\n }\n\n await fs.rm(outputDir, { recursive: true });\n } finally {\n client.close();\n }\n\n return options.outputFile.path;\n};\n\n/**\n * Retrieves metadata for the master database from the Shamela API.\n *\n * The master database contains information about all books, authors, and categories\n * in the Shamela library. This function fetches the download URL and version\n * information for the master database patches.\n *\n * @param version - The version number to check for updates (defaults to 0)\n * @returns A promise that resolves to master database metadata including download URL and version\n *\n * @throws {Error} When environment variables are not set or API request fails\n *\n * @example\n * ```typescript\n * const masterMetadata = await getMasterMetadata(5);\n * console.log(masterMetadata.url); // URL to download master database patch\n * console.log(masterMetadata.version); // Latest version number\n * ```\n */\nexport const getMasterMetadata = async (version: number = 0): Promise<GetMasterMetadataResponsePayload> => {\n validateEnvVariables();\n\n const url = buildUrl(process.env.SHAMELA_API_MASTER_PATCH_ENDPOINT as string, { version: version.toString() });\n\n logger.info(`Fetching shamela.ws master database patch link: ${url.toString()}`);\n\n try {\n const response: Record<string, any> = await httpsGet(url);\n return { url: response.patch_url, version: response.version };\n } catch (error: any) {\n throw new Error(`Error fetching master patch: ${error.message}`);\n }\n};\n\n/**\n * Generates the URL for a book's cover image.\n *\n * This function constructs the URL to access the cover image for a specific book\n * using the book's ID and the API endpoint host.\n *\n * @param bookId - The unique identifier of the book\n * @returns The complete URL to the book's cover image\n *\n * @example\n * ```typescript\n * const coverUrl = getCoverUrl(123);\n * console.log(coverUrl); // \"https://api.shamela.ws/covers/123.jpg\"\n * ```\n */\nexport const getCoverUrl = (bookId: number) => {\n const { origin } = new URL(process.env.SHAMELA_API_MASTER_PATCH_ENDPOINT!);\n return `${origin}/covers/${bookId}.jpg`;\n};\n\n/**\n * Downloads and processes the master database from the Shamela service.\n *\n * The master database contains comprehensive information about all books, authors,\n * and categories available in the Shamela library. This function downloads the\n * database files, creates the necessary tables, and exports the data in the\n * specified format (JSON or SQLite).\n *\n * @param options - Configuration options including output file path and optional master metadata\n * @returns A promise that resolves to the path of the created output file\n *\n * @throws {Error} When download fails, expected tables are missing, database operations fail, or file operations fail\n *\n * @example\n * ```typescript\n * // Download master database as JSON\n * const jsonPath = await downloadMasterDatabase({\n * outputFile: { path: './master.json' }\n * });\n *\n * // Download master database as SQLite\n * const dbPath = await downloadMasterDatabase({\n * outputFile: { path: './master.db' }\n * });\n * ```\n */\nexport const downloadMasterDatabase = async (options: DownloadMasterOptions): Promise<string> => {\n logger.info(`downloadMasterDatabase ${JSON.stringify(options)}`);\n\n const outputDir = await createTempDir('shamela_downloadMaster');\n\n const masterResponse: GetMasterMetadataResponsePayload =\n options.masterMetadata || (await getMasterMetadata(DEFAULT_MASTER_METADATA_VERSION));\n\n logger.info(`Downloading master database from: ${JSON.stringify(masterResponse)}`);\n const sourceTables: string[] = await unzipFromUrl(fixHttpsProtocol(masterResponse.url), outputDir);\n\n logger.info(`sourceTables downloaded: ${sourceTables.toString()}`);\n\n if (!validateMasterSourceTables(sourceTables)) {\n logger.error(`Some source tables were not found: ${sourceTables.toString()}`);\n throw new Error('Expected tables not found!');\n }\n\n const dbPath = path.join(outputDir, 'master.db');\n\n const client = new Database(dbPath);\n\n try {\n logger.info(`Creating tables`);\n await createMasterTables(client);\n\n logger.info(`Copying data to master table`);\n await copyForeignMasterTableData(client, sourceTables);\n\n const { ext: extension } = path.parse(options.outputFile.path);\n\n if (extension === '.json') {\n const result = await getMasterData(client);\n await Bun.file(options.outputFile.path).write(JSON.stringify(result, null, 2));\n }\n\n client.close();\n\n if (extension === '.db' || extension === '.sqlite') {\n await fs.rename(dbPath, options.outputFile.path);\n }\n\n await fs.rm(outputDir, { recursive: true });\n } finally {\n client.close();\n }\n\n return options.outputFile.path;\n};\n\n/**\n * Retrieves complete book data including pages and titles.\n *\n * This is a convenience function that downloads a book's data and returns it\n * as a structured JavaScript object. The function handles the temporary file\n * creation and cleanup automatically.\n *\n * @param id - The unique identifier of the book to retrieve\n * @returns A promise that resolves to the complete book data including pages and titles\n *\n * @throws {Error} When download fails, file operations fail, or JSON parsing fails\n *\n * @example\n * ```typescript\n * const bookData = await getBook(123);\n * console.log(bookData.pages.length); // Number of pages in the book\n * console.log(bookData.titles?.length); // Number of title entries\n * ```\n */\nexport const getBook = async (id: number): Promise<BookData> => {\n const outputDir = await createTempDir('shamela_getBookData');\n const outputPath = await downloadBook(id, { outputFile: { path: path.join(outputDir, `${id}.json`) } });\n\n const data: BookData = await Bun.file(outputPath).json();\n await fs.rm(outputDir, { recursive: true });\n\n return data;\n};\n","type LogFunction = (...args: unknown[]) => void;\n\ninterface Logger {\n debug: LogFunction;\n error: LogFunction;\n info: LogFunction;\n warn?: LogFunction;\n}\n\nconst SILENT_LOGGER = { debug: () => {}, error: () => {}, info: () => {}, warn: () => {} };\nlet logger: Logger = SILENT_LOGGER;\n\nexport const setLogger = (newLogger: Logger = SILENT_LOGGER) => {\n if (!newLogger.debug || !newLogger.error || !newLogger.info) {\n throw new Error('Logger must implement debug, error, and info methods');\n }\n\n logger = newLogger;\n};\n\nexport { logger as default };\n","import { Database } from 'bun:sqlite';\n\nexport type InternalTable = {\n fields: string[];\n name: string;\n};\n\nconst escapeAsSqlString = (v: string) => `'${v.replace(/'/g, \"''\")}'`; // safe string literal\n\n/**\n * Retrieves information about internal tables in a specified database.\n *\n * This function queries the SQLite master table to get metadata about all\n * tables in the specified database, including table names and field information.\n * It's useful for database introspection and validation operations.\n *\n * @param db - The database client instance to query\n * @param dbName - The name/alias of the database to inspect (e.g., 'main', 'patch')\n * @returns An array of InternalTable objects containing table metadata\n *\n * @throws {Error} When database query fails or table metadata cannot be parsed\n */\n\nexport const getInternalTables = (db: Database, dbName: string): InternalTable[] => {\n // Allow-list/validate schema name to prevent SQL injection (e.g., 'main', 'temp', 'patch')\n const isValidIdent = (v: string) => /^[A-Za-z_][A-Za-z0-9_]*$/.test(v);\n if (!isValidIdent(dbName)) {\n throw new Error(`Invalid database name: ${dbName}`);\n }\n\n // Get only user tables; exclude SQLite internal tables\n const tables = db\n .query(`SELECT name FROM ${dbName}.sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'`)\n .all() as { name: string }[];\n\n return tables.map(({ name }) => {\n const tableExpr = isValidIdent(name) ? name : escapeAsSqlString(name);\n // Use PRAGMA table_info to reliably fetch column names\n const cols = db.query(`PRAGMA ${dbName}.table_info(${tableExpr})`).all() as { name: string }[];\n const fields = cols.map((c) => c.name);\n return { fields, name } as InternalTable;\n });\n};\n","const MAIN_DB_ALIAS = 'main';\n\n/**\n * Generates SQL to attach a database file with an alias.\n * @param {string} dbFile - Path to the database file to attach\n * @param {string} alias - Alias name for the attached database\n * @returns {string} SQL ATTACH DATABASE statement\n */\nexport const attachDB = (dbFile: string, alias: string) => {\n // Escape single quotes in the file path\n const escapedPath = dbFile.replace(/'/g, \"''\");\n // Validate alias contains only alphanumeric characters and underscores\n if (!/^[a-zA-Z0-9_]+$/.test(alias)) {\n throw new Error('Invalid database alias');\n }\n return `ATTACH DATABASE '${escapedPath}' AS ${alias}`;\n};\n\n/**\n * Builds a SQL query to patch page data from one database to another.\n * @param {string} patchAlias - Alias of the patch database\n * @param {string} tableName - Name of the table to update\n * @param {string} [aslAlias='main'] - Alias of the main database\n * @returns {string} SQL UPDATE statement for patching page data\n */\nexport const buildPagePatchQuery = (\n patchAlias: string,\n tableName: string,\n aslAlias: string = MAIN_DB_ALIAS,\n): string => `\n UPDATE ${aslAlias}.${tableName}\n SET content = ${updatePageColumn('content', aslAlias, patchAlias)},\n part = ${updatePageColumn('part', aslAlias, patchAlias)},\n page = ${updatePageColumn('page', aslAlias, patchAlias)},\n number = ${updatePageColumn('number', aslAlias, patchAlias)}\n WHERE EXISTS (\n SELECT 1\n FROM ${patchAlias}.${tableName}\n WHERE ${aslAlias}.${tableName}.id = ${patchAlias}.${tableName}.id\n );\n`;\n\nconst updateTitleColumn = (columnName: string, aslAlias: string, patchAlias: string) => `\n (SELECT CASE \n WHEN ${patchAlias}.title.${columnName} != '#' THEN ${patchAlias}.title.${columnName}\n ELSE ${aslAlias}.title.${columnName}\n END \n FROM ${patchAlias}.title\n WHERE ${aslAlias}.title.id = ${patchAlias}.title.id)\n`;\n\n/**\n * Builds a SQL query to patch title data from one database to another.\n * @param {string} patchAlias - Alias of the patch database\n * @param {string} tableName - Name of the table to update\n * @param {string} [aslAlias='main'] - Alias of the main database\n * @returns {string} SQL UPDATE statement for patching title data\n */\nexport const buildTitlePatchQuery = (\n patchAlias: string,\n tableName: string,\n aslAlias: string = MAIN_DB_ALIAS,\n): string => `\n UPDATE ${aslAlias}.${tableName}\n SET content = ${updateTitleColumn('content', aslAlias, patchAlias)},\n page = ${updateTitleColumn('page', aslAlias, patchAlias)},\n parent = ${updateTitleColumn('parent', aslAlias, patchAlias)}\n WHERE EXISTS (\n SELECT 1\n FROM ${patchAlias}.${tableName}\n WHERE ${aslAlias}.${tableName}.id = ${patchAlias}.${tableName}.id\n );\n`;\n\n/**\n * Generates SQL to create a table with specified fields.\n * @param {string} name - Name of the table to create\n * @param {string[]} fields - Array of field definitions\n * @returns {string} SQL CREATE TABLE statement\n */\nexport const createTable = (name: string, fields: string[]) => {\n // Validate table name\n if (!/^[a-zA-Z0-9_]+$/.test(name)) {\n throw new Error('Invalid table name');\n }\n // Basic validation for field definitions\n fields.forEach((field) => {\n if (field.includes(';') || field.includes('--')) {\n throw new Error('Invalid field definition');\n }\n });\n return `CREATE TABLE IF NOT EXISTS ${name} (${fields.join(', ')})`;\n};\n\n/**\n * Generates SQL to detach a database by alias.\n * @param {string} alias - Alias of the database to detach\n * @returns {string} SQL DETACH DATABASE statement\n */\nexport const detachDB = (alias: string) => {\n // Validate alias contains only alphanumeric characters and underscores\n if (!/^[a-zA-Z0-9_]+$/.test(alias)) {\n throw new Error('Invalid database alias');\n }\n return `DETACH DATABASE ${alias}`;\n};\n\nconst updatePageColumn = (columnName: string, aslAlias: string, patchAlias: string) => `\n (SELECT CASE \n WHEN ${patchAlias}.page.${columnName} != '#' THEN ${patchAlias}.page.${columnName}\n ELSE ${aslAlias}.page.${columnName}\n END \n FROM ${patchAlias}.page\n WHERE ${aslAlias}.page.id = ${patchAlias}.page.id)\n`;\n\n/**\n * Generates an unsafe SQL INSERT statement with provided field values.\n * @param {string} table - Name of the table to insert into\n * @param {Record<string, any>} fieldToValue - Object mapping field names to values\n * @param {boolean} [isDeleted=false] - Whether to mark the record as deleted\n * @returns {string} SQL INSERT statement (unsafe - does not escape values properly)\n * @warning This function does not properly escape SQL values and should not be used with untrusted input\n */\nexport const insertUnsafely = (table: string, fieldToValue: Record<string, any>, isDeleted = false) => {\n const combinedRecords: Record<string, any> = { ...fieldToValue, is_deleted: isDeleted ? '1' : '0' };\n\n const sortedKeys = Object.keys(combinedRecords).sort();\n\n const sortedValues = sortedKeys.map((key) => combinedRecords[key]);\n\n return `INSERT INTO ${table} (${sortedKeys.toString()}) VALUES (${sortedValues\n .map((val) => {\n if (val === null) {\n return 'NULL';\n }\n\n return typeof val === 'string' ? `'${val}'` : val;\n })\n .toString()})`;\n};\n","import { Database } from 'bun:sqlite';\n\nimport { BookData, Page, Title } from '../types';\nimport logger from '../utils/logger';\nimport { getInternalTables, InternalTable } from './common';\nimport { attachDB, buildPagePatchQuery, buildTitlePatchQuery, detachDB } from './queryBuilder';\nimport { PageRow, Tables, TitleRow } from './types';\n\nconst PATCH_DB_ALIAS = 'patch';\nconst ASL_DB_ALIAS = 'asl';\n\nconst buildCopyStatements = (\n patchTables: InternalTable[],\n aslTables: InternalTable[],\n table: Tables,\n fields: string[],\n patchQuery: string,\n): string[] => {\n const statements = [];\n\n if (patchTables.find((t) => t.name === table)) {\n statements.push(\n `INSERT INTO main.${table} \n SELECT ${fields.join(',')} \n FROM ${ASL_DB_ALIAS}.${table} \n WHERE id NOT IN (\n SELECT id \n FROM ${PATCH_DB_ALIAS}.${table} \n WHERE is_deleted='1'\n )`,\n );\n statements.push(patchQuery);\n } else {\n let copyStatement = `INSERT INTO main.${table} SELECT ${fields.join(',')} FROM ${ASL_DB_ALIAS}.${table}`;\n\n if (aslTables.find((t) => t.name === table)?.fields.includes('is_deleted')) {\n copyStatement += ` WHERE is_deleted='0'`;\n }\n\n statements.push(copyStatement);\n }\n\n return statements;\n};\n\n/**\n * Applies patches from a patch database to the main book database.\n *\n * This function handles the process of applying updates and patches to book data\n * by attaching both the original ASL database and patch database, then merging\n * the data while excluding deleted records and applying updates from patches.\n *\n * @param db - The database client instance for the main database\n * @param aslDB - Path to the original ASL database file\n * @param patchDB - Path to the patch database file containing updates\n *\n * @throws {Error} When database operations fail or tables cannot be attached\n *\n * @example\n * ```typescript\n * const client = new Database(dbPath);\n * applyPatches(client, './original.db', './patch.db');\n * ```\n */\nexport const applyPatches = (db: Database, aslDB: string, patchDB: string) => {\n db.run(attachDB(aslDB, ASL_DB_ALIAS));\n db.run(attachDB(patchDB, PATCH_DB_ALIAS));\n\n try {\n const patchTables = getInternalTables(db, PATCH_DB_ALIAS);\n const aslTables = getInternalTables(db, ASL_DB_ALIAS);\n logger.debug({ aslTables, patchTables }, `Applying patches for...`);\n const pageStatements = buildCopyStatements(\n patchTables,\n aslTables,\n Tables.Page,\n ['id', 'content', 'part', 'page', 'number'],\n buildPagePatchQuery(PATCH_DB_ALIAS, Tables.Page),\n );\n const titleStatements = buildCopyStatements(\n patchTables,\n aslTables,\n Tables.Title,\n ['id', 'content', 'page', 'parent'],\n buildTitlePatchQuery(PATCH_DB_ALIAS, Tables.Title),\n );\n // Prepare all statements\n const allStatements = [...pageStatements, ...titleStatements].map((sql) => db.prepare(sql));\n // Execute all in one transaction\n db.transaction(() => {\n allStatements.forEach((stmt) => stmt.run());\n })();\n } finally {\n db.run(detachDB(ASL_DB_ALIAS));\n db.run(detachDB(PATCH_DB_ALIAS));\n }\n};\n\n/**\n * Copies table data from an ASL database to the main database.\n *\n * This function is used when no patches are available and data needs to be\n * copied directly from the original ASL database to the main database.\n * It handles both page and title data.\n *\n * @param db - The database client instance for the main database\n * @param aslDB - Path to the ASL database file to copy data from\n *\n * @throws {Error} When database operations fail or the ASL database cannot be attached\n */\nexport const copyTableData = (db: Database, aslDB: string) => {\n db.run(attachDB(aslDB, ASL_DB_ALIAS));\n const tables = getInternalTables(db, ASL_DB_ALIAS);\n\n logger.debug({ tables }, `copyTableData...`);\n\n const titleInsert = db.prepare(\n `INSERT INTO main.${Tables.Title} SELECT id,content,page,parent FROM ${ASL_DB_ALIAS}.${Tables.Title}`,\n );\n const pageInsert = db.prepare(\n `INSERT INTO main.${Tables.Page} SELECT id,content,part,page,number FROM ${ASL_DB_ALIAS}.${Tables.Page}`,\n );\n\n db.transaction(() => {\n titleInsert.run();\n pageInsert.run();\n })();\n\n db.run(detachDB(ASL_DB_ALIAS));\n};\n\n/**\n * Creates the necessary database tables for storing book data.\n *\n * This function sets up the schema for the book database by creating\n * the 'page' and 'title' tables with their respective columns and constraints.\n *\n * @param db - The database client instance where tables should be created\n *\n * @throws {Error} When table creation fails due to database constraints or permissions\n */\nexport const createTables = (db: Database) => {\n db.run(`CREATE TABLE page (id INTEGER PRIMARY KEY, content TEXT, part INTEGER, page INTEGER, number INTEGER)`);\n db.run(`CREATE TABLE title (id INTEGER PRIMARY KEY, content TEXT, page INTEGER, parent INTEGER)`);\n};\n\n/**\n * Retrieves all pages from the book database.\n *\n * This function queries the database for all page records and transforms\n * them into a structured format, filtering out null values and organizing\n * the data according to the Page type interface.\n *\n * @param db - The database client instance to query\n * @returns An array of Page objects\n *\n * @throws {Error} When database query fails or data transformation encounters issues\n */\nexport const getAllPages = (db: Database) => {\n const pages: Page[] = db\n .query(`SELECT * FROM ${Tables.Page}`)\n .all()\n .map((row: any) => {\n const { content, id, number, page, part } = row as PageRow;\n\n return {\n content,\n id,\n ...(page && { page }),\n ...(number && { number }),\n ...(part && { part }),\n };\n });\n\n return pages;\n};\n\n/**\n * Retrieves all titles from the book database.\n *\n * This function queries the database for all title records and transforms\n * them into a structured format. Titles represent the hierarchical structure\n * and table of contents for the book.\n *\n * @param db - The database client instance to query\n * @returns An array of Title objects\n *\n * @throws {Error} When database query fails or data transformation encounters issues\n */\nexport const getAllTitles = (db: Database) => {\n const titles: Title[] = db\n .query(`SELECT * FROM ${Tables.Title}`)\n .all()\n .map((row: any) => {\n const r = row as TitleRow;\n\n return {\n content: r.content,\n id: r.id,\n page: r.page,\n ...(r.parent && { parent: r.parent }),\n };\n });\n\n return titles;\n};\n\n/**\n * Retrieves complete book data including both pages and titles.\n *\n * This function combines the results from getAllPages and getAllTitles\n * to provide a complete representation of the book's content and structure.\n * This is typically the final step in processing book data.\n *\n * @param db - The database client instance to query\n *\n * @throws {Error} When database queries fail or data processing encounters issues\n */\nexport const getData = (db: Database): BookData => {\n return { pages: getAllPages(db), titles: getAllTitles(db) };\n};\n","import { Database } from 'bun:sqlite';\nimport path from 'node:path';\n\nimport type { Author, Book, Category, MasterData, PDFLinks } from '../types';\n\nimport { UNKNOWN_VALUE_PLACEHOLDER } from '../utils/constants';\nimport { attachDB, detachDB } from './queryBuilder';\nimport { type BookRow, Tables } from './types';\n\n/**\n * Copies data from foreign master table files into the main master database.\n *\n * This function processes the source table files (author.sqlite, book.sqlite, category.sqlite)\n * by attaching them to the current database connection, then copying their data into\n * the main master database tables. It handles data transformation including filtering\n * out deleted records and converting placeholder values.\n *\n * @param db - The database client instance for the master database\n * @param sourceTables - Array of file paths to the source SQLite table files\n *\n * @throws {Error} When source files cannot be attached or data copying operations fail\n */\nexport const copyForeignMasterTableData = (db: Database, sourceTables: string[]) => {\n const aliasToPath: Record<string, string> = sourceTables.reduce((acc, tablePath) => {\n const { name } = path.parse(tablePath);\n return { ...acc, [name]: tablePath };\n }, {});\n\n Object.entries(aliasToPath).forEach(([alias, dbPath]) => db.run(attachDB(dbPath, alias)));\n\n const insertAuthors = db.prepare(\n `INSERT INTO ${Tables.Authors} SELECT id,name,biography,(CASE WHEN death_number = ${UNKNOWN_VALUE_PLACEHOLDER} THEN NULL ELSE death_number END) AS death_number FROM author WHERE is_deleted='0'`,\n );\n const insertBooks = db.prepare(\n `INSERT INTO ${Tables.Books} SELECT id,name,category,type,(CASE WHEN date = ${UNKNOWN_VALUE_PLACEHOLDER} THEN NULL ELSE date END) AS date,author,printed,major_release,minor_release,bibliography,hint,pdf_links,metadata FROM book WHERE is_deleted='0'`,\n );\n const insertCategories = db.prepare(\n `INSERT INTO ${Tables.Categories} SELECT id,name FROM category WHERE is_deleted='0'`,\n );\n\n db.transaction(() => {\n insertAuthors.run();\n insertBooks.run();\n insertCategories.run();\n })();\n\n Object.keys(aliasToPath).forEach((statement) => db.run(detachDB(statement)));\n};\n\n/**\n * Creates the necessary database tables for the master database.\n *\n * This function sets up the schema for the master database by creating\n * tables for authors, books, and categories with their respective columns\n * and data types. This is typically the first step in setting up a new\n * master database.\n *\n * @param db - The database client instance where tables should be created\n *\n * @throws {Error} When table creation fails due to database constraints or permissions\n */\nexport const createTables = (db: Database) => {\n db.run('CREATE TABLE authors (id INTEGER PRIMARY KEY, name TEXT, biography TEXT, death INTEGER)');\n db.run(\n 'CREATE TABLE books (id INTEGER PRIMARY KEY, name TEXT, category INTEGER, type INTEGER, date INTEGER, author TEXT, printed INTEGER, major INTEGER, minor INTEGER, bibliography TEXT, hint TEXT, pdf_links TEXT, metadata TEXT)',\n );\n db.run('CREATE TABLE categories (id INTEGER PRIMARY KEY, name TEXT)');\n};\n\nexport const getAllAuthors = (db: Database) => {\n const rows = db.query(`SELECT * FROM ${Tables.Authors}`).all();\n\n const authors: Author[] = rows.map((r: any) => ({\n ...(r.biography && { biography: r.biography }),\n ...(r.death && { death: r.death }),\n id: r.id,\n name: r.name,\n }));\n\n return authors;\n};\n\nexport const getAllBooks = (db: Database) => {\n const rows = db.query(`SELECT * FROM ${Tables.Books}`).all();\n\n const books: Book[] = rows.map((row: any) => {\n const r = row as BookRow;\n\n return {\n author: parseAuthor(r.author),\n bibliography: r.bibliography,\n category: r.category,\n id: r.id,\n major: r.major,\n metadata: JSON.parse(r.metadata),\n name: r.name,\n printed: r.printed,\n type: r.type,\n ...(r.date && r.date.toString() !== UNKNOWN_VALUE_PLACEHOLDER && { date: r.date }),\n ...(r.hint && { hint: r.hint }),\n ...(r.pdf_links && { pdfLinks: parsePdfLinks(r.pdf_links) }),\n ...(r.minor && { minorRelease: r.minor }),\n };\n });\n\n return books;\n};\n\nexport const getAllCategories = (db: Database) => {\n const rows = db.query(`SELECT * FROM ${Tables.Categories}`).all();\n\n const categories: Category[] = rows.map((r: any) => ({\n id: r.id,\n name: r.name,\n }));\n\n return categories;\n};\n\nconst parseAuthor = (value: string) => {\n const result: number[] = value.split(',\\\\s+').map((id) => parseInt(id.trim()));\n return result.length > 1 ? result : result[0];\n};\n\nconst parsePdfLinks = (value: string) => {\n const result = JSON.parse(value);\n\n if (result.files) {\n result.files = (result.files as string[]).map((f: string) => {\n const [file, id] = f.split('|');\n return { ...(id && { id }), file };\n });\n }\n\n return result as PDFLinks;\n};\n\nexport const getData = (db: Database) => {\n return { authors: getAllAuthors(db), books: getAllBooks(db), categories: getAllCategories(db) } as MasterData;\n};\n","/**\n * The default version number for master metadata.\n * @constant {number}\n */\nexport const DEFAULT_MASTER_METADATA_VERSION = 0;\n\n/**\n * Placeholder value used to represent unknown or missing data.\n * @constant {string}\n */\nexport const UNKNOWN_VALUE_PLACEHOLDER = '99999';\n","import { createWriteStream, promises as fs } from 'node:fs';\nimport { IncomingMessage } from 'node:http';\nimport https from 'node:https';\nimport os from 'node:os';\nimport path from 'node:path';\nimport { pipeline } from 'node:stream/promises';\nimport unzipper, { Entry } from 'unzipper';\n\n/**\n * Creates a temporary directory with an optional prefix.\n * @param {string} [prefix='shamela'] - The prefix to use for the temporary directory name\n * @returns {Promise<string>} A promise that resolves to the path of the created temporary directory\n */\nexport const createTempDir = async (prefix = 'shamela') => {\n const tempDirBase = path.join(os.tmpdir(), prefix);\n return fs.mkdtemp(tempDirBase);\n};\n\n/**\n * Checks if a file exists at the given path.\n * @param {string} path - The file path to check\n * @returns {Promise<boolean>} A promise that resolves to true if the file exists, false otherwise\n */\nexport const fileExists = async (filePath: string) => !!(await fs.stat(filePath).catch(() => false));\n\n/**\n * Downloads and extracts a ZIP file from a given URL without loading the entire file into memory.\n * @param {string} url - The URL of the ZIP file to download and extract\n * @param {string} outputDir - The directory where the files should be extracted\n * @returns {Promise<string[]>} A promise that resolves with the list of all extracted file paths\n * @throws {Error} When the download fails, extraction fails, or other network/filesystem errors occur\n */\nexport async function unzipFromUrl(url: string, outputDir: string): Promise<string[]> {\n const extractedFiles: string[] = [];\n\n try {\n // Make HTTPS request and get the response stream\n const response = await new Promise<IncomingMessage>((resolve, reject) => {\n https\n .get(url, (res) => {\n if (res.statusCode !== 200) {\n reject(new Error(`Failed to download ZIP file: ${res.statusCode} ${res.statusMessage}`));\n } else {\n resolve(res);\n }\n })\n .on('error', (err) => {\n reject(new Error(`HTTPS request failed: ${err.message}`));\n });\n });\n\n // Process the ZIP file using unzipper.Extract with proper event handling\n await new Promise<void>((resolve, reject) => {\n const unzipStream = unzipper.Parse();\n const entryPromises: Promise<void>[] = [];\n\n unzipStream.on('entry', (entry: Entry) => {\n const entryPromise = (async () => {\n const filePath = path.join(outputDir, entry.path);\n\n if (entry.type === 'Directory') {\n // Ensure the directory exists\n await fs.mkdir(filePath, { recursive: true });\n entry.autodrain();\n } else {\n // Ensure the parent directory exists\n const dir = path.dirname(filePath);\n await fs.mkdir(dir, { recursive: true });\n\n // Create write stream and pipe entry to it\n const writeStream = createWriteStream(filePath);\n await pipeline(entry, writeStream);\n extractedFiles.push(filePath);\n }\n })();\n\n entryPromises.push(entryPromise);\n });\n\n unzipStream.on('finish', async () => {\n try {\n // Wait for all entries to be processed\n await Promise.all(entryPromises);\n resolve();\n } catch (error) {\n reject(error);\n }\n });\n\n unzipStream.on('error', (error) => {\n reject(new Error(`Error during extraction: ${error.message}`));\n });\n\n // Pipe the response to the unzip stream\n response.pipe(unzipStream);\n });\n\n return extractedFiles;\n } catch (error: any) {\n throw new Error(`Error processing URL: ${error.message}`);\n }\n}\n","import { Buffer } from 'node:buffer';\nimport { IncomingMessage } from 'node:http';\nimport https from 'node:https';\nimport process from 'node:process';\nimport { URL, URLSearchParams } from 'node:url';\n\n/**\n * Builds a URL with query parameters and optional authentication.\n * @param {string} endpoint - The base endpoint URL\n * @param {Record<string, any>} queryParams - Object containing query parameters to append\n * @param {boolean} [useAuth=true] - Whether to include the API key from environment variables\n * @returns {URL} The constructed URL object with query parameters\n */\nexport const buildUrl = (endpoint: string, queryParams: Record<string, any>, useAuth: boolean = true): URL => {\n const url = new URL(endpoint);\n {\n const params = new URLSearchParams();\n\n Object.entries(queryParams).forEach(([key, value]) => {\n params.append(key, value.toString());\n });\n\n if (useAuth) {\n params.append('api_key', process.env.SHAMELA_API_KEY!);\n }\n\n url.search = params.toString();\n }\n\n return url;\n};\n\n/**\n * Makes an HTTPS GET request and returns the response data.\n * @template T - The expected return type (Buffer or Record<string, any>)\n * @param {string | URL} url - The URL to make the request to\n * @returns {Promise<T>} A promise that resolves to the response data, parsed as JSON if content-type is application/json, otherwise as Buffer\n * @throws {Error} When the request fails or JSON parsing fails\n */\nexport const httpsGet = <T extends Buffer | Record<string, any>>(url: string | URL): Promise<T> => {\n return new Promise((resolve, reject) => {\n https\n .get(url, (res: IncomingMessage) => {\n const contentType = res.headers['content-type'] || '';\n const dataChunks: Buffer[] = [];\n\n res.on('data', (chunk: Buffer) => {\n dataChunks.push(chunk);\n });\n\n res.on('end', () => {\n const fullData = Buffer.concat(dataChunks);\n\n if (contentType.includes('application/json')) {\n try {\n const json = JSON.parse(fullData.toString('utf-8'));\n resolve(json);\n } catch (error: any) {\n reject(new Error(`Failed to parse JSON: ${error.message}`));\n }\n } else {\n resolve(fullData as T);\n }\n });\n })\n .on('error', (error) => {\n reject(new Error(`Error making request: ${error.message}`));\n });\n });\n};\n","import path from 'node:path';\nimport process from 'node:process';\n\nconst SOURCE_TABLES = ['author.sqlite', 'book.sqlite', 'category.sqlite'];\n\n/**\n * Validates that required environment variables are set.\n * @throws {Error} When any required environment variable is missing\n */\nexport const validateEnvVariables = () => {\n const envVariablesNotFound = [\n 'SHAMELA_API_MASTER_PATCH_ENDPOINT',\n 'SHAMELA_API_BOOKS_ENDPOINT',\n 'SHAMELA_API_KEY',\n ].filter((key) => !process.env[key]);\n\n if (envVariablesNotFound.length) {\n throw new Error(`${envVariablesNotFound.join(', ')} environment variables not set`);\n }\n};\n\n/**\n * Validates that all required master source tables are present in the provided paths.\n * @param {string[]} sourceTablePaths - Array of file paths to validate\n * @returns {boolean} True if all required source tables (author.sqlite, book.sqlite, category.sqlite) are present\n */\nexport const validateMasterSourceTables = (sourceTablePaths: string[]) => {\n const sourceTableNames = new Set(sourceTablePaths.map((tablePath) => path.basename(tablePath).toLowerCase()));\n return SOURCE_TABLES.every((table) => sourceTableNames.has(table.toLowerCase()));\n};\n"],"mappings":"AAAA,OAAS,YAAAA,MAAgB,aACzB,OAAS,YAAYC,MAAU,KAC/B,OAAOC,MAAU,OACjB,OAAOC,MAAa,UACpB,OAAS,OAAAC,MAAW,MCKpB,IAAMC,EAAgB,CAAE,MAAO,IAAM,CAAC,EAAG,MAAO,IAAM,CAAC,EAAG,KAAM,IAAM,CAAC,EAAG,KAAM,IAAM,CAAC,CAAE,EACrFC,EAAiBD,EAERE,EAAY,CAACC,EAAoBH,IAAkB,CAC5D,GAAI,CAACG,EAAU,OAAS,CAACA,EAAU,OAAS,CAACA,EAAU,KACnD,MAAM,IAAI,MAAM,sDAAsD,EAG1EF,EAASE,CACb,ECXA,IAAMC,EAAqBC,GAAc,IAAIA,EAAE,QAAQ,KAAM,IAAI,CAAC,IAgBrDC,EAAoB,CAACC,EAAcC,IAAoC,CAEhF,IAAMC,EAAgBJ,GAAc,2BAA2B,KAAKA,CAAC,EACrE,GAAI,CAACI,EAAaD,CAAM,EACpB,MAAM,IAAI,MAAM,0BAA0BA,CAAM,EAAE,EAQtD,OAJeD,EACV,MAAM,oBAAoBC,CAAM,gEAAgE,EAChG,IAAI,EAEK,IAAI,CAAC,CAAE,KAAAE,CAAK,IAAM,CAC5B,IAAMC,EAAYF,EAAaC,CAAI,EAAIA,EAAON,EAAkBM,CAAI,EAIpE,MAAO,CAAE,OAFIH,EAAG,MAAM,UAAUC,CAAM,eAAeG,CAAS,GAAG,EAAE,IAAI,EACnD,IAAKC,GAAMA,EAAE,IAAI,EACpB,KAAAF,CAAK,CAC1B,CAAC,CACL,EC1CA,IAAMG,EAAgB,OAQTC,EAAW,CAACC,EAAgBC,IAAkB,CAEvD,IAAMC,EAAcF,EAAO,QAAQ,KAAM,IAAI,EAE7C,GAAI,CAAC,kBAAkB,KAAKC,CAAK,EAC7B,MAAM,IAAI,MAAM,wBAAwB,EAE5C,MAAO,oBAAoBC,CAAW,QAAQD,CAAK,EACvD,EASaE,EAAsB,CAC/BC,EACAC,EACAC,EAAmBR,IACV;AAAA,WACFQ,CAAQ,IAAID,CAAS;AAAA,kBACdE,EAAiB,UAAWD,EAAUF,CAAU,CAAC;AAAA,eACpDG,EAAiB,OAAQD,EAAUF,CAAU,CAAC;AAAA,eAC9CG,EAAiB,OAAQD,EAAUF,CAAU,CAAC;AAAA,iBAC5CG,EAAiB,SAAUD,EAAUF,CAAU,CAAC;AAAA;AAAA;AAAA,WAGtDA,CAAU,IAAIC,CAAS;AAAA,YACtBC,CAAQ,IAAID,CAAS,SAASD,CAAU,IAAIC,CAAS;AAAA;AAAA,EAI3DG,EAAoB,CAACC,EAAoBH,EAAkBF,IAAuB;AAAA;AAAA,oBAEpEA,CAAU,UAAUK,CAAU,gBAAgBL,CAAU,UAAUK,CAAU;AAAA,oBAC5EH,CAAQ,UAAUG,CAAU;AAAA;AAAA,WAErCL,CAAU;AAAA,YACTE,CAAQ,eAAeF,CAAU;AAAA,EAUhCM,EAAuB,CAChCN,EACAC,EACAC,EAAmBR,IACV;AAAA,WACFQ,CAAQ,IAAID,CAAS;AAAA,kBACdG,EAAkB,UAAWF,EAAUF,CAAU,CAAC;AAAA,eACrDI,EAAkB,OAAQF,EAAUF,CAAU,CAAC;AAAA,iBAC7CI,EAAkB,SAAUF,EAAUF,CAAU,CAAC;AAAA;AAAA;AAAA,WAGvDA,CAAU,IAAIC,CAAS;AAAA,YACtBC,CAAQ,IAAID,CAAS,SAASD,CAAU,IAAIC,CAAS;AAAA;EA6B1D,IAAMM,EAAYC,GAAkB,CAEvC,GAAI,CAAC,kBAAkB,KAAKA,CAAK,EAC7B,MAAM,IAAI,MAAM,wBAAwB,EAE5C,MAAO,mBAAmBA,CAAK,EACnC,EAEMC,EAAmB,CAACC,EAAoBC,EAAkBC,IAAuB;AAAA;AAAA,oBAEnEA,CAAU,SAASF,CAAU,gBAAgBE,CAAU,SAASF,CAAU;AAAA,oBAC1EC,CAAQ,SAASD,CAAU;AAAA;AAAA,WAEpCE,CAAU;AAAA,YACTD,CAAQ,cAAcC,CAAU;ECzG5C,IAAMC,EAAiB,QACjBC,EAAe,MAEfC,EAAsB,CACxBC,EACAC,EACAC,EACAC,EACAC,IACW,CACX,IAAMC,EAAa,CAAC,EAEpB,GAAIL,EAAY,KAAMM,GAAMA,EAAE,OAASJ,CAAK,EACxCG,EAAW,KACP,oBAAoBH,CAAK;AAAA,sBACfC,EAAO,KAAK,GAAG,CAAC;AAAA,oBAClBL,CAAY,IAAII,CAAK;AAAA;AAAA;AAAA,wBAGjBL,CAAc,IAAIK,CAAK;AAAA;AAAA,eAGvC,EACAG,EAAW,KAAKD,CAAU,MACvB,CACH,IAAIG,EAAgB,oBAAoBL,CAAK,WAAWC,EAAO,KAAK,GAAG,CAAC,SAASL,CAAY,IAAII,CAAK,GAElGD,EAAU,KAAMK,GAAMA,EAAE,OAASJ,CAAK,GAAG,OAAO,SAAS,YAAY,IACrEK,GAAiB,yBAGrBF,EAAW,KAAKE,CAAa,CACjC,CAEA,OAAOF,CACX,EAqBaG,EAAe,CAACC,EAAcC,EAAeC,IAAoB,CAC1EF,EAAG,IAAIG,EAASF,EAAOZ,CAAY,CAAC,EACpCW,EAAG,IAAIG,EAASD,EAASd,CAAc,CAAC,EAExC,GAAI,CACA,IAAMG,EAAca,EAAkBJ,EAAIZ,CAAc,EAClDI,EAAYY,EAAkBJ,EAAIX,CAAY,EACpDgB,EAAO,MAAM,CAAE,UAAAb,EAAW,YAAAD,CAAY,EAAG,yBAAyB,EAClE,IAAMe,EAAiBhB,EACnBC,EACAC,SAEA,CAAC,KAAM,UAAW,OAAQ,OAAQ,QAAQ,EAC1Ce,EAAoBnB,QAA2B,CACnD,EACMoB,EAAkBlB,EACpBC,EACAC,UAEA,CAAC,KAAM,UAAW,OAAQ,QAAQ,EAClCiB,EAAqBrB,SAA4B,CACrD,EAEMsB,EAAgB,CAAC,GAAGJ,EAAgB,GAAGE,CAAe,EAAE,IAAKG,GAAQX,EAAG,QAAQW,CAAG,CAAC,EAE1FX,EAAG,YAAY,IAAM,CACjBU,EAAc,QAASE,GAASA,EAAK,IAAI,CAAC,CAC9C,CAAC,EAAE,CACP,QAAE,CACEZ,EAAG,IAAIa,EAASxB,CAAY,CAAC,EAC7BW,EAAG,IAAIa,EAASzB,CAAc,CAAC,CACnC,CACJ,EAca0B,EAAgB,CAACd,EAAcC,IAAkB,CAC1DD,EAAG,IAAIG,EAASF,EAAOZ,CAAY,CAAC,EACpC,IAAM0B,EAASX,EAAkBJ,EAAIX,CAAY,EAEjDgB,EAAO,MAAM,CAAE,OAAAU,CAAO,EAAG,kBAAkB,EAE3C,IAAMC,EAAchB,EAAG,QACnB,6DAAuEX,CAAY,QACvF,EACM4B,EAAajB,EAAG,QAClB,iEAA2EX,CAAY,OAC3F,EAEAW,EAAG,YAAY,IAAM,CACjBgB,EAAY,IAAI,EAChBC,EAAW,IAAI,CACnB,CAAC,EAAE,EAEHjB,EAAG,IAAIa,EAASxB,CAAY,CAAC,CACjC,EAYa6B,EAAgBlB,GAAiB,CAC1CA,EAAG,IAAI,sGAAsG,EAC7GA,EAAG,IAAI,yFAAyF,CACpG,EAcamB,EAAenB,GACFA,EACjB,0BAAoC,EACpC,IAAI,EACJ,IAAKoB,GAAa,CACf,GAAM,CAAE,QAAAC,EAAS,GAAAC,EAAI,OAAAC,EAAQ,KAAAC,EAAM,KAAAC,CAAK,EAAIL,EAE5C,MAAO,CACH,QAAAC,EACA,GAAAC,EACA,GAAIE,GAAQ,CAAE,KAAAA,CAAK,EACnB,GAAID,GAAU,CAAE,OAAAA,CAAO,EACvB,GAAIE,GAAQ,CAAE,KAAAA,CAAK,CACvB,CACJ,CAAC,EAiBIC,EAAgB1B,GACDA,EACnB,2BAAqC,EACrC,IAAI,EACJ,IAAKoB,GAAa,CACf,IAAMO,EAAIP,EAEV,MAAO,CACH,QAASO,EAAE,QACX,GAAIA,EAAE,GACN,KAAMA,EAAE,KACR,GAAIA,EAAE,QAAU,CAAE,OAAQA,EAAE,MAAO,CACvC,CACJ,CAAC,EAgBIC,EAAW5B,IACb,CAAE,MAAOmB,EAAYnB,CAAE,EAAG,OAAQ0B,EAAa1B,CAAE,CAAE,GC1N9D,OAAO6B,MAAU,OCSV,IAAMC,EAA4B,QDYlC,IAAMC,EAA6B,CAACC,EAAcC,IAA2B,CAChF,IAAMC,EAAsCD,EAAa,OAAO,CAACE,EAAKC,IAAc,CAChF,GAAM,CAAE,KAAAC,CAAK,EAAIC,EAAK,MAAMF,CAAS,EACrC,MAAO,CAAE,GAAGD,EAAK,CAACE,CAAI,EAAGD,CAAU,CACvC,EAAG,CAAC,CAAC,EAEL,OAAO,QAAQF,CAAW,EAAE,QAAQ,CAAC,CAACK,EAAOC,CAAM,IAAMR,EAAG,IAAIS,EAASD,EAAQD,CAAK,CAAC,CAAC,EAExF,IAAMG,EAAgBV,EAAG,QACrB,0EAAoFW,CAAyB,oFACjH,EACMC,EAAcZ,EAAG,QACnB,oEAA8EW,CAAyB,kJAC3G,EACME,EAAmBb,EAAG,kFAE5B,EAEAA,EAAG,YAAY,IAAM,CACjBU,EAAc,IAAI,EAClBE,EAAY,IAAI,EAChBC,EAAiB,IAAI,CACzB,CAAC,EAAE,EAEH,OAAO,KAAKX,CAAW,EAAE,QAASY,GAAcd,EAAG,IAAIe,EAASD,CAAS,CAAC,CAAC,CAC/E,EAcaE,EAAgBhB,GAAiB,CAC1CA,EAAG,IAAI,yFAAyF,EAChGA,EAAG,IACC,+NACJ,EACAA,EAAG,IAAI,6DAA6D,CACxE,EAEaiB,EAAiBjB,GACbA,EAAG,6BAAuC,EAAE,IAAI,EAE9B,IAAKkB,IAAY,CAC5C,GAAIA,EAAE,WAAa,CAAE,UAAWA,EAAE,SAAU,EAC5C,GAAIA,EAAE,OAAS,CAAE,MAAOA,EAAE,KAAM,EAChC,GAAIA,EAAE,GACN,KAAMA,EAAE,IACZ,EAAE,EAKOC,GAAenB,GACXA,EAAG,2BAAqC,EAAE,IAAI,EAEhC,IAAKoB,GAAa,CACzC,IAAMF,EAAIE,EAEV,MAAO,CACH,OAAQC,GAAYH,EAAE,MAAM,EAC5B,aAAcA,EAAE,aAChB,SAAUA,EAAE,SACZ,GAAIA,EAAE,GACN,MAAOA,EAAE,MACT,SAAU,KAAK,MAAMA,EAAE,QAAQ,EAC/B,KAAMA,EAAE,KACR,QAASA,EAAE,QACX,KAAMA,EAAE,KACR,GAAIA,EAAE,MAAQA,EAAE,KAAK,SAAS,IAAMP,GAA6B,CAAE,KAAMO,EAAE,IAAK,EAChF,GAAIA,EAAE,MAAQ,CAAE,KAAMA,EAAE,IAAK,EAC7B,GAAIA,EAAE,WAAa,CAAE,SAAUI,GAAcJ,EAAE,SAAS,CAAE,EAC1D,GAAIA,EAAE,OAAS,CAAE,aAAcA,EAAE,KAAM,CAC3C,CACJ,CAAC,EAKQK,GAAoBvB,GAChBA,EAAG,gCAA0C,EAAE,IAAI,EAE5B,IAAKkB,IAAY,CACjD,GAAIA,EAAE,GACN,KAAMA,EAAE,IACZ,EAAE,EAKAG,GAAeG,GAAkB,CACnC,IAAMC,EAAmBD,EAAM,MAAM,OAAO,EAAE,IAAKE,GAAO,SAASA,EAAG,KAAK,CAAC,CAAC,EAC7E,OAAOD,EAAO,OAAS,EAAIA,EAASA,EAAO,CAAC,CAChD,EAEMH,GAAiBE,GAAkB,CACrC,IAAMC,EAAS,KAAK,MAAMD,CAAK,EAE/B,OAAIC,EAAO,QACPA,EAAO,MAASA,EAAO,MAAmB,IAAKE,GAAc,CACzD,GAAM,CAACC,EAAMF,CAAE,EAAIC,EAAE,MAAM,GAAG,EAC9B,MAAO,CAAE,GAAID,GAAM,CAAE,GAAAA,CAAG,EAAI,KAAAE,CAAK,CACrC,CAAC,GAGEH,CACX,EAEaI,EAAW7B,IACb,CAAE,QAASiB,EAAcjB,CAAE,EAAG,MAAOmB,GAAYnB,CAAE,EAAG,WAAYuB,GAAiBvB,CAAE,CAAE,GE1IlG,OAAS,qBAAA8B,GAAmB,YAAYC,MAAU,KAElD,OAAOC,OAAW,QAClB,OAAOC,OAAQ,KACf,OAAOC,MAAU,OACjB,OAAS,YAAAC,OAAgB,kBACzB,OAAOC,OAAyB,WAOzB,IAAMC,EAAgB,MAAOC,EAAS,YAAc,CACvD,IAAMC,EAAcL,EAAK,KAAKD,GAAG,OAAO,EAAGK,CAAM,EACjD,OAAOP,EAAG,QAAQQ,CAAW,CACjC,EAgBA,eAAsBC,EAAaC,EAAaC,EAAsC,CAClF,IAAMC,EAA2B,CAAC,EAElC,GAAI,CAEA,IAAMC,EAAW,MAAM,IAAI,QAAyB,CAACC,EAASC,IAAW,CACrEC,GACK,IAAIN,EAAMO,GAAQ,CACXA,EAAI,aAAe,IACnBF,EAAO,IAAI,MAAM,gCAAgCE,EAAI,UAAU,IAAIA,EAAI,aAAa,EAAE,CAAC,EAEvFH,EAAQG,CAAG,CAEnB,CAAC,EACA,GAAG,QAAUC,GAAQ,CAClBH,EAAO,IAAI,MAAM,yBAAyBG,EAAI,OAAO,EAAE,CAAC,CAC5D,CAAC,CACT,CAAC,EAGD,aAAM,IAAI,QAAc,CAACJ,EAASC,IAAW,CACzC,IAAMI,EAAcC,GAAS,MAAM,EAC7BC,EAAiC,CAAC,EAExCF,EAAY,GAAG,QAAUG,GAAiB,CACtC,IAAMC,GAAgB,SAAY,CAC9B,IAAMC,EAAWC,EAAK,KAAKd,EAAWW,EAAM,IAAI,EAEhD,GAAIA,EAAM,OAAS,YAEf,MAAMI,EAAG,MAAMF,EAAU,CAAE,UAAW,EAAK,CAAC,EAC5CF,EAAM,UAAU,MACb,CAEH,IAAMK,EAAMF,EAAK,QAAQD,CAAQ,EACjC,MAAME,EAAG,MAAMC,EAAK,CAAE,UAAW,EAAK,CAAC,EAGvC,IAAMC,EAAcC,GAAkBL,CAAQ,EAC9C,MAAMM,GAASR,EAAOM,CAAW,EACjChB,EAAe,KAAKY,CAAQ,CAChC,CACJ,GAAG,EAEHH,EAAc,KAAKE,CAAY,CACnC,CAAC,EAEDJ,EAAY,GAAG,SAAU,SAAY,CACjC,GAAI,CAEA,MAAM,QAAQ,IAAIE,CAAa,EAC/BP,EAAQ,CACZ,OAASiB,EAAO,CACZhB,EAAOgB,CAAK,CAChB,CACJ,CAAC,EAEDZ,EAAY,GAAG,QAAUY,GAAU,CAC/BhB,EAAO,IAAI,MAAM,4BAA4BgB,EAAM,OAAO,EAAE,CAAC,CACjE,CAAC,EAGDlB,EAAS,KAAKM,CAAW,CAC7B,CAAC,EAEMP,CACX,OAASmB,EAAY,CACjB,MAAM,IAAI,MAAM,yBAAyBA,EAAM,OAAO,EAAE,CAC5D,CACJ,CCrGA,OAAS,UAAAC,OAAc,SAEvB,OAAOC,OAAW,QAClB,OAAOC,OAAa,UACpB,OAAS,OAAAC,GAAK,mBAAAC,OAAuB,MAS9B,IAAMC,EAAW,CAACC,EAAkBC,EAAkCC,EAAmB,KAAc,CAC1G,IAAMC,EAAM,IAAIN,GAAIG,CAAQ,EAC5B,CACI,IAAMI,EAAS,IAAIN,GAEnB,OAAO,QAAQG,CAAW,EAAE,QAAQ,CAAC,CAACI,EAAKC,CAAK,IAAM,CAClDF,EAAO,OAAOC,EAAKC,EAAM,SAAS,CAAC,CACvC,CAAC,EAEGJ,GACAE,EAAO,OAAO,UAAWR,GAAQ,IAAI,eAAgB,EAGzDO,EAAI,OAASC,EAAO,SAAS,CACjC,CAEA,OAAOD,CACX,EASaI,EAAoDJ,GACtD,IAAI,QAAQ,CAACK,EAASC,IAAW,CACpCd,GACK,IAAIQ,EAAMO,GAAyB,CAChC,IAAMC,EAAcD,EAAI,QAAQ,cAAc,GAAK,GAC7CE,EAAuB,CAAC,EAE9BF,EAAI,GAAG,OAASG,GAAkB,CAC9BD,EAAW,KAAKC,CAAK,CACzB,CAAC,EAEDH,EAAI,GAAG,MAAO,IAAM,CAChB,IAAMI,EAAWpB,GAAO,OAAOkB,CAAU,EAEzC,GAAID,EAAY,SAAS,kBAAkB,EACvC,GAAI,CACA,IAAMI,EAAO,KAAK,MAAMD,EAAS,SAAS,OAAO,CAAC,EAClDN,EAAQO,CAAI,CAChB,OAASC,EAAY,CACjBP,EAAO,IAAI,MAAM,yBAAyBO,EAAM,OAAO,EAAE,CAAC,CAC9D,MAEAR,EAAQM,CAAa,CAE7B,CAAC,CACL,CAAC,EACA,GAAG,QAAUE,GAAU,CACpBP,EAAO,IAAI,MAAM,yBAAyBO,EAAM,OAAO,EAAE,CAAC,CAC9D,CAAC,CACT,CAAC,ECpEL,OAAOC,OAAU,OACjB,OAAOC,OAAa,UAEpB,IAAMC,GAAgB,CAAC,gBAAiB,cAAe,iBAAiB,EAM3DC,EAAuB,IAAM,CACtC,IAAMC,EAAuB,CACzB,oCACA,6BACA,iBACJ,EAAE,OAAQC,GAAQ,CAACJ,GAAQ,IAAII,CAAG,CAAC,EAEnC,GAAID,EAAqB,OACrB,MAAM,IAAI,MAAM,GAAGA,EAAqB,KAAK,IAAI,CAAC,gCAAgC,CAE1F,EAOaE,EAA8BC,GAA+B,CACtE,IAAMC,EAAmB,IAAI,IAAID,EAAiB,IAAKE,GAAcT,GAAK,SAASS,CAAS,EAAE,YAAY,CAAC,CAAC,EAC5G,OAAOP,GAAc,MAAOQ,GAAUF,EAAiB,IAAIE,EAAM,YAAY,CAAC,CAAC,CACnF,ETHA,IAAMC,EAAoBC,GAAwB,CAC9C,IAAMC,EAAM,IAAIC,EAAIF,CAAW,EAC/B,OAAAC,EAAI,SAAW,QAERA,EAAI,SAAS,CACxB,EA2BaE,GAAkB,MAC3BC,EACAC,IAC0C,CAC1CC,EAAqB,EAErB,IAAML,EAAMM,EAAS,GAAGC,EAAQ,IAAI,0BAA0B,IAAIJ,CAAE,GAAI,CACpE,eAAgBC,GAAS,cAAgB,GAAG,SAAS,EACrD,eAAgBA,GAAS,cAAgB,GAAG,SAAS,CACzD,CAAC,EAEDI,EAAO,KAAK,kCAAkCR,EAAI,SAAS,CAAC,EAAE,EAE9D,GAAI,CACA,IAAMS,EAAY,MAAMC,EAASV,CAAG,EACpC,MAAO,CACH,aAAcS,EAAS,cACvB,gBAAiBX,EAAiBW,EAAS,iBAAiB,EAC5D,GAAIA,EAAS,mBAAqB,CAAE,gBAAiBX,EAAiBW,EAAS,iBAAiB,CAAE,EAClG,GAAIA,EAAS,mBAAqB,CAAE,aAAcA,EAAS,aAAc,CAC7E,CACJ,OAASE,EAAY,CACjB,MAAM,IAAI,MAAM,iCAAiCA,EAAM,OAAO,EAAE,CACpE,CACJ,EA4BaC,GAAe,MAAOT,EAAYC,IAAkD,CAC7FI,EAAO,KAAK,gBAAgBL,CAAE,IAAI,KAAK,UAAUC,CAAO,CAAC,EAAE,EAE3D,IAAMS,EAAY,MAAMC,EAAc,sBAAsB,EAEtDC,EAA+CX,GAAS,cAAiB,MAAMF,GAAgBC,CAAE,EACjG,CAAC,CAACa,CAAY,EAAG,CAACC,CAAa,EAAI,CAAC,CAAC,EAAgB,MAAM,QAAQ,IAAI,CACzEC,EAAaH,EAAa,gBAAiBF,CAAS,EACpD,GAAIE,EAAa,gBAAkB,CAACG,EAAaH,EAAa,gBAAiBF,CAAS,CAAC,EAAI,CAAC,CAClG,CAAC,EACKM,EAASC,EAAK,KAAKP,EAAW,SAAS,EAEvCQ,EAAS,IAAIC,EAASH,CAAM,EAElC,GAAI,CACAX,EAAO,KAAK,iBAAiB,EAC7B,MAAMe,EAAiBF,CAAM,EAEzBJ,GACAT,EAAO,KAAK,yBAAyBS,CAAa,OAAOD,CAAY,EAAE,EACvE,MAAMQ,EAAaH,EAAQL,EAAcC,CAAa,IAEtDT,EAAO,KAAK,2BAA2BQ,CAAY,EAAE,EACrD,MAAMS,EAAcJ,EAAQL,CAAY,GAG5C,GAAM,CAAE,IAAKU,CAAU,EAAIN,EAAK,MAAMhB,EAAQ,WAAW,IAAI,EAE7D,GAAIsB,IAAc,QAAS,CACvB,IAAMC,EAAS,MAAMC,EAAYP,CAAM,EACvC,MAAM,IAAI,KAAKjB,EAAQ,WAAW,IAAI,EAAE,MAAM,KAAK,UAAUuB,EAAQ,KAAM,CAAC,CAAC,CACjF,CAEAN,EAAO,MAAM,GAETK,IAAc,OAASA,IAAc,YACrC,MAAMG,EAAG,OAAOV,EAAQf,EAAQ,WAAW,IAAI,EAGnD,MAAMyB,EAAG,GAAGhB,EAAW,CAAE,UAAW,EAAK,CAAC,CAC9C,QAAE,CACEQ,EAAO,MAAM,CACjB,CAEA,OAAOjB,EAAQ,WAAW,IAC9B,EAqBa0B,GAAoB,MAAOC,EAAkB,IAAiD,CACvG1B,EAAqB,EAErB,IAAML,EAAMM,EAASC,EAAQ,IAAI,kCAA6C,CAAE,QAASwB,EAAQ,SAAS,CAAE,CAAC,EAE7GvB,EAAO,KAAK,mDAAmDR,EAAI,SAAS,CAAC,EAAE,EAE/E,GAAI,CACA,IAAMS,EAAgC,MAAMC,EAASV,CAAG,EACxD,MAAO,CAAE,IAAKS,EAAS,UAAW,QAASA,EAAS,OAAQ,CAChE,OAASE,EAAY,CACjB,MAAM,IAAI,MAAM,gCAAgCA,EAAM,OAAO,EAAE,CACnE,CACJ,EAiBaqB,GAAeC,GAAmB,CAC3C,GAAM,CAAE,OAAAC,CAAO,EAAI,IAAIjC,EAAIM,EAAQ,IAAI,iCAAkC,EACzE,MAAO,GAAG2B,CAAM,WAAWD,CAAM,MACrC,EA4BaE,GAAyB,MAAO/B,GAAoD,CAC7FI,EAAO,KAAK,0BAA0B,KAAK,UAAUJ,CAAO,CAAC,EAAE,EAE/D,IAAMS,EAAY,MAAMC,EAAc,wBAAwB,EAExDsB,EACFhC,EAAQ,gBAAmB,MAAM0B,GAAkB,CAA+B,EAEtFtB,EAAO,KAAK,qCAAqC,KAAK,UAAU4B,CAAc,CAAC,EAAE,EACjF,IAAMC,EAAyB,MAAMnB,EAAapB,EAAiBsC,EAAe,GAAG,EAAGvB,CAAS,EAIjG,GAFAL,EAAO,KAAK,4BAA4B6B,EAAa,SAAS,CAAC,EAAE,EAE7D,CAACC,EAA2BD,CAAY,EACxC,MAAA7B,EAAO,MAAM,sCAAsC6B,EAAa,SAAS,CAAC,EAAE,EACtE,IAAI,MAAM,4BAA4B,EAGhD,IAAMlB,EAASC,EAAK,KAAKP,EAAW,WAAW,EAEzCQ,EAAS,IAAIC,EAASH,CAAM,EAElC,GAAI,CACAX,EAAO,KAAK,iBAAiB,EAC7B,MAAMe,EAAmBF,CAAM,EAE/Bb,EAAO,KAAK,8BAA8B,EAC1C,MAAM+B,EAA2BlB,EAAQgB,CAAY,EAErD,GAAM,CAAE,IAAKX,CAAU,EAAIN,EAAK,MAAMhB,EAAQ,WAAW,IAAI,EAE7D,GAAIsB,IAAc,QAAS,CACvB,IAAMC,EAAS,MAAMC,EAAcP,CAAM,EACzC,MAAM,IAAI,KAAKjB,EAAQ,WAAW,IAAI,EAAE,MAAM,KAAK,UAAUuB,EAAQ,KAAM,CAAC,CAAC,CACjF,CAEAN,EAAO,MAAM,GAETK,IAAc,OAASA,IAAc,YACrC,MAAMG,EAAG,OAAOV,EAAQf,EAAQ,WAAW,IAAI,EAGnD,MAAMyB,EAAG,GAAGhB,EAAW,CAAE,UAAW,EAAK,CAAC,CAC9C,QAAE,CACEQ,EAAO,MAAM,CACjB,CAEA,OAAOjB,EAAQ,WAAW,IAC9B,EAqBaoC,GAAU,MAAOrC,GAAkC,CAC5D,IAAMU,EAAY,MAAMC,EAAc,qBAAqB,EACrD2B,EAAa,MAAM7B,GAAaT,EAAI,CAAE,WAAY,CAAE,KAAMiB,EAAK,KAAKP,EAAW,GAAGV,CAAE,OAAO,CAAE,CAAE,CAAC,EAEhGuC,EAAiB,MAAM,IAAI,KAAKD,CAAU,EAAE,KAAK,EACvD,aAAMZ,EAAG,GAAGhB,EAAW,CAAE,UAAW,EAAK,CAAC,EAEnC6B,CACX","names":["Database","fs","path","process","URL","SILENT_LOGGER","logger","setLogger","newLogger","escapeAsSqlString","v","getInternalTables","db","dbName","isValidIdent","name","tableExpr","c","MAIN_DB_ALIAS","attachDB","dbFile","alias","escapedPath","buildPagePatchQuery","patchAlias","tableName","aslAlias","updatePageColumn","updateTitleColumn","columnName","buildTitlePatchQuery","detachDB","alias","updatePageColumn","columnName","aslAlias","patchAlias","PATCH_DB_ALIAS","ASL_DB_ALIAS","buildCopyStatements","patchTables","aslTables","table","fields","patchQuery","statements","t","copyStatement","applyPatches","db","aslDB","patchDB","attachDB","getInternalTables","logger","pageStatements","buildPagePatchQuery","titleStatements","buildTitlePatchQuery","allStatements","sql","stmt","detachDB","copyTableData","tables","titleInsert","pageInsert","createTables","getAllPages","row","content","id","number","page","part","getAllTitles","r","getData","path","UNKNOWN_VALUE_PLACEHOLDER","copyForeignMasterTableData","db","sourceTables","aliasToPath","acc","tablePath","name","path","alias","dbPath","attachDB","insertAuthors","UNKNOWN_VALUE_PLACEHOLDER","insertBooks","insertCategories","statement","detachDB","createTables","getAllAuthors","r","getAllBooks","row","parseAuthor","parsePdfLinks","getAllCategories","value","result","id","f","file","getData","createWriteStream","fs","https","os","path","pipeline","unzipper","createTempDir","prefix","tempDirBase","unzipFromUrl","url","outputDir","extractedFiles","response","resolve","reject","https","res","err","unzipStream","unzipper","entryPromises","entry","entryPromise","filePath","path","fs","dir","writeStream","createWriteStream","pipeline","error","Buffer","https","process","URL","URLSearchParams","buildUrl","endpoint","queryParams","useAuth","url","params","key","value","httpsGet","resolve","reject","res","contentType","dataChunks","chunk","fullData","json","error","path","process","SOURCE_TABLES","validateEnvVariables","envVariablesNotFound","key","validateMasterSourceTables","sourceTablePaths","sourceTableNames","tablePath","table","fixHttpsProtocol","originalUrl","url","URL","getBookMetadata","id","options","validateEnvVariables","buildUrl","process","logger","response","httpsGet","error","downloadBook","outputDir","createTempDir","bookResponse","bookDatabase","patchDatabase","unzipFromUrl","dbPath","path","client","Database","createTables","applyPatches","copyTableData","extension","result","getData","fs","getMasterMetadata","version","getCoverUrl","bookId","origin","downloadMasterDatabase","masterResponse","sourceTables","validateMasterSourceTables","copyForeignMasterTableData","getBook","outputPath","data"]}