shamela 1.1.0 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +20 -12
- package/dist/index.d.ts +110 -124
- package/dist/index.js +46 -43
- package/dist/index.js.map +1 -1
- package/package.json +31 -40
package/README.md
CHANGED
|
@@ -371,27 +371,35 @@ The library provides comprehensive TypeScript types for all data structures:
|
|
|
371
371
|
|
|
372
372
|
### BookData
|
|
373
373
|
|
|
374
|
-
- `pages`: Array of page
|
|
375
|
-
- `titles`:
|
|
374
|
+
- `pages`: Array of raw rows from the `page` table, including `content`, `id`, `part`, `page`, `number`, `services`, and `is_deleted`.
|
|
375
|
+
- `titles`: Array of raw rows from the `title` table with `content`, `id`, `page`, `parent`, and `is_deleted`.
|
|
376
376
|
|
|
377
377
|
### MasterData
|
|
378
378
|
|
|
379
|
-
- `authors`:
|
|
380
|
-
- `books`:
|
|
381
|
-
- `categories`:
|
|
379
|
+
- `authors`: Raw entries from the `author` table with the original `biography`, `death_text`, `death_number`, `is_deleted`, and `name` fields.
|
|
380
|
+
- `books`: Raw entries from the `book` table containing the original metadata columns (`author`, `bibliography`, `category`, `date`, `hint`, `major_release`, `metadata`, `minor_release`, `pdf_links`, `printed`, `type`, and `is_deleted`).
|
|
381
|
+
- `categories`: Raw entries from the `category` table including `is_deleted`, `order`, and `name`.
|
|
382
382
|
|
|
383
383
|
### Page
|
|
384
384
|
|
|
385
|
-
- `id`: Unique identifier
|
|
386
|
-
- `content`: Text content of the page
|
|
387
|
-
- `
|
|
385
|
+
- `id`: Unique identifier.
|
|
386
|
+
- `content`: Text content of the page.
|
|
387
|
+
- `part`, `page`, `number`: Numeric references stored exactly as they appear in the source database.
|
|
388
|
+
- `services`: Optional metadata column from the source database.
|
|
389
|
+
- `is_deleted`: Flag indicating whether the page has been marked as deleted in Shamela updates.
|
|
388
390
|
|
|
389
391
|
### Title
|
|
390
392
|
|
|
391
|
-
- `id`: Unique identifier
|
|
392
|
-
- `content`: Title text
|
|
393
|
-
- `page`: Page number where title appears
|
|
394
|
-
- `parent`: Optional parent title ID for hierarchical structure
|
|
393
|
+
- `id`: Unique identifier.
|
|
394
|
+
- `content`: Title text.
|
|
395
|
+
- `page`: Page number where title appears (if available).
|
|
396
|
+
- `parent`: Optional parent title ID for hierarchical structure.
|
|
397
|
+
- `is_deleted`: Flag indicating whether the title has been marked as deleted.
|
|
398
|
+
|
|
399
|
+
### Content helpers
|
|
400
|
+
|
|
401
|
+
- `parseContentRobust(content: string)`: Converts Shamela page HTML into a list of structured lines while preserving title markers and punctuation.
|
|
402
|
+
- `sanitizePageContent(content: string)`: Removes common footnote markers and normalises ligatures from Shamela pages.
|
|
395
403
|
|
|
396
404
|
## Testing
|
|
397
405
|
|
package/dist/index.d.ts
CHANGED
|
@@ -1,47 +1,100 @@
|
|
|
1
1
|
/**
|
|
2
|
-
*
|
|
2
|
+
* A record that can be deleted by patches.
|
|
3
3
|
*/
|
|
4
|
-
type
|
|
5
|
-
/**
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
/** Unique identifier
|
|
4
|
+
type Deletable = {
|
|
5
|
+
/** Indicates if it was deleted in the patch if it is set to '1 */
|
|
6
|
+
is_deleted: string;
|
|
7
|
+
};
|
|
8
|
+
type Unique = {
|
|
9
|
+
/** Unique identifier */
|
|
10
10
|
id: number;
|
|
11
|
-
|
|
11
|
+
};
|
|
12
|
+
/**
|
|
13
|
+
* Database row structure for the author table.
|
|
14
|
+
*/
|
|
15
|
+
type AuthorRow = Deletable & Unique & {
|
|
16
|
+
/** Author biography */
|
|
17
|
+
biography: string;
|
|
18
|
+
/** Death year */
|
|
19
|
+
death_number: string;
|
|
20
|
+
death_text: string;
|
|
21
|
+
/** Author name */
|
|
12
22
|
name: string;
|
|
13
23
|
};
|
|
14
24
|
/**
|
|
15
|
-
*
|
|
25
|
+
* Database row structure for the book table.
|
|
16
26
|
*/
|
|
17
|
-
type
|
|
18
|
-
/**
|
|
19
|
-
author:
|
|
27
|
+
type BookRow = Deletable & Unique & {
|
|
28
|
+
/** Serialized author ID(s) "2747, 3147" or "513" */
|
|
29
|
+
author: string;
|
|
20
30
|
/** Bibliography information */
|
|
21
31
|
bibliography: string;
|
|
22
|
-
/** Category ID
|
|
23
|
-
category:
|
|
24
|
-
/** Publication date */
|
|
25
|
-
date
|
|
26
|
-
/**
|
|
27
|
-
hint
|
|
28
|
-
/**
|
|
29
|
-
|
|
30
|
-
/**
|
|
31
|
-
|
|
32
|
-
/**
|
|
33
|
-
|
|
34
|
-
/**
|
|
35
|
-
minor?: number;
|
|
36
|
-
/** Name/title of the book */
|
|
32
|
+
/** Category ID */
|
|
33
|
+
category: string;
|
|
34
|
+
/** Publication date (or 99999 for unavailable) */
|
|
35
|
+
date: string;
|
|
36
|
+
/** Hint or description (nullable) */
|
|
37
|
+
hint: string;
|
|
38
|
+
/** Major version */
|
|
39
|
+
major_release: string;
|
|
40
|
+
/** Serialized metadata */
|
|
41
|
+
metadata: string;
|
|
42
|
+
/** Minor version */
|
|
43
|
+
minor_release: string;
|
|
44
|
+
/** Book name */
|
|
37
45
|
name: string;
|
|
38
|
-
/**
|
|
39
|
-
|
|
40
|
-
/**
|
|
41
|
-
printed:
|
|
42
|
-
/**
|
|
43
|
-
type:
|
|
46
|
+
/** Serialized PDF links (nullable) */
|
|
47
|
+
pdf_links: string;
|
|
48
|
+
/** Printed flag */
|
|
49
|
+
printed: string;
|
|
50
|
+
/** Book type */
|
|
51
|
+
type: string;
|
|
44
52
|
};
|
|
53
|
+
/**
|
|
54
|
+
* Database row structure for the category table.
|
|
55
|
+
*/
|
|
56
|
+
type CategoryRow = Deletable & Unique & {
|
|
57
|
+
/** Category name */
|
|
58
|
+
name: string;
|
|
59
|
+
order: string;
|
|
60
|
+
};
|
|
61
|
+
/**
|
|
62
|
+
* Database row structure for the page table.
|
|
63
|
+
*/
|
|
64
|
+
type PageRow = Deletable & Unique & {
|
|
65
|
+
/** Page content */
|
|
66
|
+
content: string;
|
|
67
|
+
/** Page number (nullable) */
|
|
68
|
+
number: string;
|
|
69
|
+
/** Page reference (nullable) */
|
|
70
|
+
page: string;
|
|
71
|
+
/** Part number (nullable) */
|
|
72
|
+
part: string;
|
|
73
|
+
services: string;
|
|
74
|
+
};
|
|
75
|
+
/**
|
|
76
|
+
* Database row structure for the title table.
|
|
77
|
+
*/
|
|
78
|
+
type TitleRow = Deletable & Unique & {
|
|
79
|
+
/** Title content */
|
|
80
|
+
content: string;
|
|
81
|
+
/** Page number */
|
|
82
|
+
page: string;
|
|
83
|
+
/** Parent title ID (nullable) */
|
|
84
|
+
parent: string;
|
|
85
|
+
};
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Represents an author entity.
|
|
89
|
+
*/
|
|
90
|
+
type Author = AuthorRow;
|
|
91
|
+
/**
|
|
92
|
+
* Represents a book entity.
|
|
93
|
+
*/
|
|
94
|
+
type Book = BookRow;
|
|
95
|
+
type Category = CategoryRow;
|
|
96
|
+
type Page = PageRow;
|
|
97
|
+
type Title = TitleRow;
|
|
45
98
|
/**
|
|
46
99
|
* Represents book content data.
|
|
47
100
|
*/
|
|
@@ -52,13 +105,15 @@ type BookData = {
|
|
|
52
105
|
titles?: Title[];
|
|
53
106
|
};
|
|
54
107
|
/**
|
|
55
|
-
*
|
|
108
|
+
* Master data structure containing all core entities.
|
|
56
109
|
*/
|
|
57
|
-
type
|
|
58
|
-
/**
|
|
59
|
-
|
|
60
|
-
/**
|
|
61
|
-
|
|
110
|
+
type MasterData = {
|
|
111
|
+
/** Array of all authors */
|
|
112
|
+
authors: Author[];
|
|
113
|
+
/** Array of all books */
|
|
114
|
+
books: Book[];
|
|
115
|
+
/** Array of all categories */
|
|
116
|
+
categories: Category[];
|
|
62
117
|
};
|
|
63
118
|
/**
|
|
64
119
|
* Options for downloading a book.
|
|
@@ -109,40 +164,6 @@ type GetMasterMetadataResponsePayload = {
|
|
|
109
164
|
/** Version number */
|
|
110
165
|
version: number;
|
|
111
166
|
};
|
|
112
|
-
/**
|
|
113
|
-
* Master data structure containing all core entities.
|
|
114
|
-
*/
|
|
115
|
-
type MasterData = {
|
|
116
|
-
/** Array of all authors */
|
|
117
|
-
authors: Author[];
|
|
118
|
-
/** Array of all books */
|
|
119
|
-
books: Book[];
|
|
120
|
-
/** Array of all categories */
|
|
121
|
-
categories: Category[];
|
|
122
|
-
};
|
|
123
|
-
/**
|
|
124
|
-
* Metadata structure for books.
|
|
125
|
-
*/
|
|
126
|
-
type Metadata = {
|
|
127
|
-
/** Optional co-author IDs */
|
|
128
|
-
coauthor?: number[];
|
|
129
|
-
/** Date information */
|
|
130
|
-
date: string;
|
|
131
|
-
/** Optional group identifier */
|
|
132
|
-
group?: number;
|
|
133
|
-
/** Whether to hide diacritics */
|
|
134
|
-
hide_diacritic?: boolean;
|
|
135
|
-
/** Minimum version requirement */
|
|
136
|
-
min_ver?: number;
|
|
137
|
-
/** Optional prefix text */
|
|
138
|
-
prefix?: string;
|
|
139
|
-
/** Short codes mapping */
|
|
140
|
-
shorts: Record<string, string>;
|
|
141
|
-
/** Sub-book IDs */
|
|
142
|
-
sub_books: number[];
|
|
143
|
-
/** Optional suffix text */
|
|
144
|
-
suffix?: string;
|
|
145
|
-
};
|
|
146
167
|
/**
|
|
147
168
|
* Output file options.
|
|
148
169
|
*/
|
|
@@ -150,55 +171,6 @@ interface OutputOptions {
|
|
|
150
171
|
/** Output file path */
|
|
151
172
|
path: string;
|
|
152
173
|
}
|
|
153
|
-
/**
|
|
154
|
-
* Represents a page in a book.
|
|
155
|
-
*/
|
|
156
|
-
type Page = {
|
|
157
|
-
/** Content of the page */
|
|
158
|
-
content: string;
|
|
159
|
-
/** Unique identifier for the page */
|
|
160
|
-
id: number;
|
|
161
|
-
/** Optional page number */
|
|
162
|
-
number?: number;
|
|
163
|
-
/** Optional page reference */
|
|
164
|
-
page?: number;
|
|
165
|
-
/** Optional part number */
|
|
166
|
-
part?: number;
|
|
167
|
-
};
|
|
168
|
-
/**
|
|
169
|
-
* PDF links structure for books.
|
|
170
|
-
*/
|
|
171
|
-
type PDFLinks = {
|
|
172
|
-
/** Optional alias ID */
|
|
173
|
-
alias?: number;
|
|
174
|
-
/** Optional cover ID */
|
|
175
|
-
cover?: number;
|
|
176
|
-
/** Optional cover alias ID */
|
|
177
|
-
cover_alias?: number;
|
|
178
|
-
/** Optional array of PDF files */
|
|
179
|
-
files?: PDFFile[];
|
|
180
|
-
/** Optional root path */
|
|
181
|
-
root?: string;
|
|
182
|
-
/** Optional file size */
|
|
183
|
-
size?: number;
|
|
184
|
-
};
|
|
185
|
-
/**
|
|
186
|
-
* Represents a title or chapter heading.
|
|
187
|
-
*/
|
|
188
|
-
type Title = {
|
|
189
|
-
/** Content of the title */
|
|
190
|
-
content: string;
|
|
191
|
-
/** Unique identifier for the title */
|
|
192
|
-
id: number;
|
|
193
|
-
/** Page number where title appears */
|
|
194
|
-
page: number;
|
|
195
|
-
/** Optional parent title ID for hierarchical structure */
|
|
196
|
-
parent?: number;
|
|
197
|
-
};
|
|
198
|
-
type PDFFile = {
|
|
199
|
-
file: string;
|
|
200
|
-
id?: string;
|
|
201
|
-
};
|
|
202
174
|
|
|
203
175
|
/**
|
|
204
176
|
* Retrieves metadata for a specific book from the Shamela API.
|
|
@@ -330,13 +302,27 @@ declare const downloadMasterDatabase: (options: DownloadMasterOptions) => Promis
|
|
|
330
302
|
*/
|
|
331
303
|
declare const getBook: (id: number) => Promise<BookData>;
|
|
332
304
|
|
|
305
|
+
type Line = {
|
|
306
|
+
id?: string;
|
|
307
|
+
text: string;
|
|
308
|
+
};
|
|
309
|
+
declare const parseContentRobust: (content: string) => Line[];
|
|
310
|
+
/**
|
|
311
|
+
* Sanitizes page content by applying regex replacement rules
|
|
312
|
+
* @param text - The text to sanitize
|
|
313
|
+
* @param rules - Optional custom rules (defaults to DEFAULT_SANITIZATION_RULES)
|
|
314
|
+
* @returns The sanitized text
|
|
315
|
+
*/
|
|
316
|
+
declare const sanitizePageContent: (text: string, rules?: Record<string, string>) => string;
|
|
317
|
+
declare const splitPageBodyFromFooter: (content: string, footnoteMarker?: string) => readonly [string, string];
|
|
318
|
+
|
|
333
319
|
type LogFunction = (...args: unknown[]) => void;
|
|
334
320
|
interface Logger {
|
|
335
321
|
debug: LogFunction;
|
|
336
322
|
error: LogFunction;
|
|
337
323
|
info: LogFunction;
|
|
338
|
-
warn
|
|
324
|
+
warn: LogFunction;
|
|
339
325
|
}
|
|
340
326
|
declare const setLogger: (newLogger?: Logger) => void;
|
|
341
327
|
|
|
342
|
-
export { type Author, type Book, type BookData, type Category, type DownloadBookOptions, type DownloadMasterOptions, type GetBookMetadataOptions, type GetBookMetadataResponsePayload, type GetMasterMetadataResponsePayload, type
|
|
328
|
+
export { type Author, type Book, type BookData, type Category, type DownloadBookOptions, type DownloadMasterOptions, type GetBookMetadataOptions, type GetBookMetadataResponsePayload, type GetMasterMetadataResponsePayload, type Line, type MasterData, type OutputOptions, type Page, type Title, downloadBook, downloadMasterDatabase, getBook, getBookMetadata, getCoverUrl, getMasterMetadata, parseContentRobust, sanitizePageContent, setLogger, splitPageBodyFromFooter };
|
package/dist/index.js
CHANGED
|
@@ -1,44 +1,47 @@
|
|
|
1
|
-
import{Database as
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
1
|
+
import{Database as H}from"bun:sqlite";import{promises as m}from"fs";import g from"path";import L from"process";import{URL as V}from"url";import{Database as y}from"bun:sqlite";var P={debug:()=>{},error:()=>{},info:()=>{},warn:()=>{}},l=P,Z=(e=P)=>{if(!e.debug||!e.error||!e.info)throw new Error("Logger must implement debug, error, and info methods");l=e};var K="#",O=(e,t)=>e.query(`PRAGMA table_info(${t})`).all(),w=(e,t)=>!!e.query("SELECT name FROM sqlite_master WHERE type='table' AND name = ?1").get(t),B=(e,t)=>w(e,t)?e.query(`SELECT * FROM ${t}`).all():[],I=e=>String(e.is_deleted)==="1",M=(e,t,r)=>{let o={};for(let s of r){if(s==="id"){o.id=(t??e)?.id??null;continue}if(t&&Object.hasOwn(t,s)){let i=t[s];if(i!==K&&i!==null&&i!==void 0){o[s]=i;continue}}if(e&&Object.hasOwn(e,s)){o[s]=e[s];continue}o[s]=null}return o},Y=(e,t,r)=>{let o=new Set,s=new Map;for(let a of e)o.add(String(a.id));for(let a of t)s.set(String(a.id),a);let i=[];for(let a of e){let n=s.get(String(a.id));n&&I(n)||i.push(M(a,n,r))}for(let a of t){let n=String(a.id);o.has(n)||I(a)||i.push(M(void 0,a,r))}return i},Q=(e,t,r,o)=>{if(o.length===0)return;let s=r.map(()=>"?").join(","),i=e.prepare(`INSERT INTO ${t} (${r.join(",")}) VALUES (${s})`);o.forEach(a=>{let n=r.map(c=>c in a?a[c]:null);i.run(...n)}),i.finalize()},ee=(e,t,r)=>{let o=t.query("SELECT sql FROM sqlite_master WHERE type='table' AND name = ?1").get(r);return o?.sql?(e.run(`DROP TABLE IF EXISTS ${r}`),e.run(o.sql),!0):(l.warn(`${r} table definition missing in source database`),!1)},f=(e,t,r,o)=>{if(!w(t,o)){l.warn(`${o} table missing in source database`);return}if(!ee(e,t,o))return;let s=O(t,o),i=r&&w(r,o)?O(r,o):[],a=s.map(p=>p.name);for(let p of i)if(!a.includes(p.name)){let h=p.type&&p.type.length>0?p.type:"TEXT";e.run(`ALTER TABLE ${o} ADD COLUMN ${p.name} ${h}`),a.push(p.name)}let n=B(t,o),c=r?B(r,o):[],u=Y(n,c,a);Q(e,o,a,u)},k=(e,t,r)=>{let o=new y(t),s=new y(r);try{e.transaction(()=>{f(e,o,s,"page"),f(e,o,s,"title")})()}finally{o.close(),s.close()}},C=(e,t)=>{let r=new y(t);try{e.transaction(()=>{f(e,r,null,"page"),f(e,r,null,"title")})()}finally{r.close()}},N=e=>{e.run(`CREATE TABLE page (
|
|
2
|
+
id INTEGER,
|
|
3
|
+
content TEXT,
|
|
4
|
+
part TEXT,
|
|
5
|
+
page TEXT,
|
|
6
|
+
number TEXT,
|
|
7
|
+
services TEXT,
|
|
8
|
+
is_deleted TEXT
|
|
9
|
+
)`),e.run(`CREATE TABLE title (
|
|
10
|
+
id INTEGER,
|
|
11
|
+
content TEXT,
|
|
12
|
+
page INTEGER,
|
|
13
|
+
parent INTEGER,
|
|
14
|
+
is_deleted TEXT
|
|
15
|
+
)`)},te=e=>e.query("SELECT * FROM page").all(),re=e=>e.query("SELECT * FROM title").all(),F=e=>({pages:te(e),titles:re(e)});import oe from"path";var U=(e,t)=>{let r=e.replace(/'/g,"''");if(!/^[a-zA-Z0-9_]+$/.test(t))throw new Error("Invalid database alias");return`ATTACH DATABASE '${r}' AS ${t}`};var v=e=>{if(!/^[a-zA-Z0-9_]+$/.test(e))throw new Error("Invalid database alias");return`DETACH DATABASE ${e}`};var R=(e,t,r)=>{let o=e.query(`SELECT sql FROM ${t}.sqlite_master WHERE type='table' AND name = ?1`).get(r);if(!o?.sql)throw new Error(`Missing table definition for ${r} in ${t}`);e.run(`DROP TABLE IF EXISTS ${r}`),e.run(o.sql)},X=(e,t)=>{let r={};for(let a of t){let{name:n}=oe.parse(a);r[n]=a}Object.entries(r).forEach(([a,n])=>e.run(U(n,a))),R(e,"author","author"),R(e,"book","book"),R(e,"category","category");let o=e.prepare("INSERT INTO author SELECT * FROM author.author"),s=e.prepare("INSERT INTO book SELECT * FROM book.book"),i=e.prepare("INSERT INTO category SELECT * FROM category.category");e.transaction(()=>{o.run(),s.run(),i.run()})(),Object.keys(r).forEach(a=>{e.run(v(a))})},A=(e,t,r)=>{e.run(`DROP VIEW IF EXISTS ${t}`),e.run(`CREATE VIEW ${t} AS SELECT * FROM ${r}`)},j=e=>{e.run(`CREATE TABLE author (
|
|
16
|
+
id INTEGER,
|
|
17
|
+
is_deleted TEXT,
|
|
18
|
+
name TEXT,
|
|
19
|
+
biography TEXT,
|
|
20
|
+
death_text TEXT,
|
|
21
|
+
death_number TEXT
|
|
22
|
+
)`),e.run(`CREATE TABLE book (
|
|
23
|
+
id INTEGER,
|
|
24
|
+
name TEXT,
|
|
25
|
+
is_deleted TEXT,
|
|
26
|
+
category TEXT,
|
|
27
|
+
type TEXT,
|
|
28
|
+
date TEXT,
|
|
29
|
+
author TEXT,
|
|
30
|
+
printed TEXT,
|
|
31
|
+
minor_release TEXT,
|
|
32
|
+
major_release TEXT,
|
|
33
|
+
bibliography TEXT,
|
|
34
|
+
hint TEXT,
|
|
35
|
+
pdf_links TEXT,
|
|
36
|
+
metadata TEXT
|
|
37
|
+
)`),e.run(`CREATE TABLE category (
|
|
38
|
+
id INTEGER,
|
|
39
|
+
is_deleted TEXT,
|
|
40
|
+
"order" TEXT,
|
|
41
|
+
name TEXT
|
|
42
|
+
)`),A(e,"authors","author"),A(e,"books","book"),A(e,"categories","category")},ne=e=>e.query("SELECT * FROM author").all(),ae=e=>e.query("SELECT * FROM book").all(),se=e=>e.query("SELECT * FROM category").all(),q=e=>({authors:ne(e),books:ae(e),categories:se(e)});var T={"<img[^>]*>>":"",\u8204:"","\uFD4C":"\u0635\u0644\u0649 \u0627\u0644\u0644\u0647 \u0639\u0644\u064A\u0647 \u0648\u0622\u0644\u0647 \u0648\u0633\u0644\u0645"};import{createWriteStream as ie,promises as b}from"fs";import ce from"https";import le from"os";import D from"path";import{pipeline as pe}from"stream/promises";import ue from"unzipper";var d=async(e="shamela")=>{let t=D.join(le.tmpdir(),e);return b.mkdtemp(t)};async function E(e,t){let r=[];try{let o=await new Promise((s,i)=>{ce.get(e,a=>{a.statusCode!==200?i(new Error(`Failed to download ZIP file: ${a.statusCode} ${a.statusMessage}`)):s(a)}).on("error",a=>{i(new Error(`HTTPS request failed: ${a.message}`))})});return await new Promise((s,i)=>{let a=ue.Parse(),n=[];a.on("entry",c=>{let u=(async()=>{let p=D.join(t,c.path);if(c.type==="Directory")await b.mkdir(p,{recursive:!0}),c.autodrain();else{let h=D.dirname(p);await b.mkdir(h,{recursive:!0});let W=ie(p);await pe(c,W),r.push(p)}})();n.push(u)}),a.on("finish",async()=>{try{await Promise.all(n),s()}catch(c){i(c)}}),a.on("error",c=>{i(new Error(`Error during extraction: ${c.message}`))}),o.pipe(a)}),r}catch(o){throw new Error(`Error processing URL: ${o.message}`)}}import{Buffer as me}from"buffer";import ge from"https";import fe from"process";import{URL as Te,URLSearchParams as de}from"url";var _=(e,t,r=!0)=>{let o=new Te(e);{let s=new de;Object.entries(t).forEach(([i,a])=>{s.append(i,a.toString())}),r&&s.append("api_key",fe.env.SHAMELA_API_KEY),o.search=s.toString()}return o},S=e=>new Promise((t,r)=>{ge.get(e,o=>{let s=o.headers["content-type"]||"",i=[];o.on("data",a=>{i.push(a)}),o.on("end",()=>{let a=me.concat(i);if(s.includes("application/json"))try{let n=JSON.parse(a.toString("utf-8"));t(n)}catch(n){r(new Error(`Failed to parse JSON: ${n.message}`))}else t(a)})}).on("error",o=>{r(new Error(`Error making request: ${o.message}`))})});import Ee from"path";import he from"process";var ye=["author.sqlite","book.sqlite","category.sqlite"],x=()=>{let e=["SHAMELA_API_MASTER_PATCH_ENDPOINT","SHAMELA_API_BOOKS_ENDPOINT","SHAMELA_API_KEY"].filter(t=>!he.env[t]);if(e.length)throw new Error(`${e.join(", ")} environment variables not set`)},G=e=>{let t=new Set(e.map(r=>Ee.basename(r).toLowerCase()));return ye.every(r=>t.has(r.toLowerCase()))};var $=e=>{let t=new V(e);return t.protocol="https",t.toString()},we=async(e,t)=>{x();let r=_(`${L.env.SHAMELA_API_BOOKS_ENDPOINT}/${e}`,{major_release:(t?.majorVersion||0).toString(),minor_release:(t?.minorVersion||0).toString()});l.info(`Fetching shamela.ws book link: ${r.toString()}`);try{let o=await S(r);return{majorRelease:o.major_release,majorReleaseUrl:$(o.major_release_url),...o.minor_release_url&&{minorReleaseUrl:$(o.minor_release_url)},...o.minor_release_url&&{minorRelease:o.minor_release}}}catch(o){throw new Error(`Error fetching book metadata: ${o.message}`)}},Re=async(e,t)=>{l.info(`downloadBook ${e} ${JSON.stringify(t)}`);let r=await d("shamela_downloadBook"),o=t?.bookMetadata||await we(e),[[s],[i]=[]]=await Promise.all([E(o.majorReleaseUrl,r),...o.minorReleaseUrl?[E(o.minorReleaseUrl,r)]:[]]),a=g.join(r,"book.db"),n=new H(a);try{l.info("Creating tables"),await N(n),i?(l.info(`Applying patches from ${i} to ${s}`),await k(n,s,i)):(l.info(`Copying table data from ${s}`),await C(n,s));let{ext:c}=g.parse(t.outputFile.path);if(c===".json"){let u=await F(n);await Bun.file(t.outputFile.path).write(JSON.stringify(u,null,2))}n.close(),(c===".db"||c===".sqlite")&&await m.rename(a,t.outputFile.path),await m.rm(r,{recursive:!0})}finally{n.close()}return t.outputFile.path},Ae=async(e=0)=>{x();let t=_(L.env.SHAMELA_API_MASTER_PATCH_ENDPOINT,{version:e.toString()});l.info(`Fetching shamela.ws master database patch link: ${t.toString()}`);try{let r=await S(t);return{url:r.patch_url,version:r.version}}catch(r){throw new Error(`Error fetching master patch: ${r.message}`)}},gt=e=>{let{origin:t}=new V(L.env.SHAMELA_API_MASTER_PATCH_ENDPOINT);return`${t}/covers/${e}.jpg`},ft=async e=>{l.info(`downloadMasterDatabase ${JSON.stringify(e)}`);let t=await d("shamela_downloadMaster"),r=e.masterMetadata||await Ae(0);l.info(`Downloading master database from: ${JSON.stringify(r)}`);let o=await E($(r.url),t);if(l.info(`sourceTables downloaded: ${o.toString()}`),!G(o))throw l.error(`Some source tables were not found: ${o.toString()}`),new Error("Expected tables not found!");let s=g.join(t,"master.db"),i=new H(s);try{l.info("Creating tables"),await j(i),l.info("Copying data to master table"),await X(i,o);let{ext:a}=g.parse(e.outputFile.path);if(a===".json"){let n=await q(i);await Bun.file(e.outputFile.path).write(JSON.stringify(n,null,2))}i.close(),(a===".db"||a===".sqlite")&&await m.rename(s,e.outputFile.path),await m.rm(t,{recursive:!0})}finally{i.close()}return e.outputFile.path},Tt=async e=>{let t=await d("shamela_getBookData"),r=await Re(e,{outputFile:{path:g.join(t,`${e}.json`)}}),o=await Bun.file(r).json();return await m.rm(t,{recursive:!0}),o};var be=/^[)\]\u00BB"”'’.,?!:\u061B\u060C\u061F\u06D4\u2026]+$/,De=/[[({«“‘]$/,_e=e=>{let t=[];for(let r of e){let o=t[t.length-1];o?.id&&be.test(r.text)?o.text+=r.text:t.push(r)}return t},Se=e=>{let t=e.replace(/\r\n/g,`
|
|
43
|
+
`).replace(/\r/g,`
|
|
44
|
+
`);return/\n/.test(t)||(t=t.replace(/([.?!\u061F\u061B\u06D4\u2026]["“”'’»«)\]]?)\s+(?=[\u0600-\u06FF])/,`$1
|
|
45
|
+
`)),t.split(`
|
|
46
|
+
`).map(r=>r.replace(/^\*+/,"").trim()).filter(Boolean)},z=e=>Se(e).map(t=>({text:t})),J=(e,t)=>{let r=new RegExp(`${t}\\s*=\\s*("([^"]*)"|'([^']*)'|([^s>]+))`,"i"),o=e.match(r);if(o)return o[2]??o[3]??o[4]},xe=e=>{let t=[],r=/<[^>]+>/g,o=0,s;for(s=r.exec(e);s;){s.index>o&&t.push({type:"text",value:e.slice(o,s.index)});let i=s[0],a=/^<\//.test(i),n=i.match(/^<\/?\s*([a-zA-Z0-9:-]+)/),c=n?n[1].toLowerCase():"";if(a)t.push({name:c,type:"end"});else{let u={};u.id=J(i,"id"),u["data-type"]=J(i,"data-type"),t.push({attributes:u,name:c,type:"start"})}o=r.lastIndex,s=r.exec(e)}return o<e.length&&t.push({type:"text",value:e.slice(o)}),t},$e=(e,t)=>{let r=e[e.length-1];return!t||!r||!r.id||!De.test(r.text)||/\n/.test(t)?!1:(r.text+=t.replace(/^\s+/,""),!0)},ht=e=>{if(!/<span[^>]*>/i.test(e))return z(e);let t=xe(`<root>${e}</root>`),r=[],o=0,s=null,i=n=>{if(!n)return;if(o>0&&s){let u=o===1?n.replace(/^\s+/,""):n;s.text+=u;return}if($e(r,n))return;let c=n.trim();c&&r.push(...z(c))};for(let n of t)n.type==="text"?i(n.value):n.type==="start"&&n.name==="span"?n.attributes["data-type"]==="title"&&(o===0&&(s={id:n.attributes.id?.replace(/^toc-/,"")??"",text:""},r.push(s)),o+=1):n.type==="end"&&n.name==="span"&&o>0&&(o-=1,o===0&&(s=null));let a=r.map(n=>n.id?n:{...n,text:n.text.trim()});return _e(a.map(n=>n.id?n:{...n,text:n.text})).filter(n=>n.text.length>0)},Le=Object.entries(T).map(([e,t])=>({regex:new RegExp(e,"g"),replacement:t})),Pe=e=>{if(e===T)return Le;let t=[];for(let r in e)t.push({regex:new RegExp(r,"g"),replacement:e[r]});return t},yt=(e,t=T)=>{let r=Pe(t),o=e;for(let s=0;s<r.length;s++){let{regex:i,replacement:a}=r[s];o=o.replace(i,a)}return o},wt=(e,t="_________")=>{let r="",o=e.lastIndexOf(t);return o>=0&&(r=e.slice(o+t.length),e=e.slice(0,o)),[e,r]};export{Re as downloadBook,ft as downloadMasterDatabase,Tt as getBook,we as getBookMetadata,gt as getCoverUrl,Ae as getMasterMetadata,ht as parseContentRobust,yt as sanitizePageContent,Z as setLogger,wt as splitPageBodyFromFooter};
|
|
44
47
|
//# sourceMappingURL=index.js.map
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/api.ts","../src/utils/logger.ts","../src/db/common.ts","../src/db/queryBuilder.ts","../src/db/book.ts","../src/db/master.ts","../src/utils/constants.ts","../src/utils/io.ts","../src/utils/network.ts","../src/utils/validation.ts"],"sourcesContent":["import { Database } from 'bun:sqlite';\nimport { promises as fs } from 'node:fs';\nimport path from 'node:path';\nimport process from 'node:process';\nimport { URL } from 'node:url';\n\nimport { applyPatches, copyTableData, createTables as createBookTables, getData as getBookData } from './db/book.js';\nimport {\n copyForeignMasterTableData,\n createTables as createMasterTables,\n getData as getMasterData,\n} from './db/master.js';\nimport {\n BookData,\n DownloadBookOptions,\n DownloadMasterOptions,\n GetBookMetadataOptions,\n GetBookMetadataResponsePayload,\n GetMasterMetadataResponsePayload,\n} from './types.js';\nimport { DEFAULT_MASTER_METADATA_VERSION } from './utils/constants.js';\nimport { createTempDir, unzipFromUrl } from './utils/io.js';\nimport logger from './utils/logger.js';\nimport { buildUrl, httpsGet } from './utils/network.js';\nimport { validateEnvVariables, validateMasterSourceTables } from './utils/validation.js';\n\nconst fixHttpsProtocol = (originalUrl: string) => {\n const url = new URL(originalUrl);\n url.protocol = 'https';\n\n return url.toString();\n};\n\ntype BookUpdatesResponse = {\n major_release: number;\n major_release_url: string;\n minor_release?: number;\n minor_release_url?: string;\n};\n\n/**\n * Retrieves metadata for a specific book from the Shamela API.\n *\n * This function fetches book release information including major and minor release\n * URLs and version numbers from the Shamela web service.\n *\n * @param id - The unique identifier of the book to fetch metadata for\n * @param options - Optional parameters for specifying major and minor versions\n * @returns A promise that resolves to book metadata including release URLs and versions\n *\n * @throws {Error} When environment variables are not set or API request fails\n *\n * @example\n * ```typescript\n * const metadata = await getBookMetadata(123, { majorVersion: 1, minorVersion: 2 });\n * console.log(metadata.majorReleaseUrl); // Download URL for the book\n * ```\n */\nexport const getBookMetadata = async (\n id: number,\n options?: GetBookMetadataOptions,\n): Promise<GetBookMetadataResponsePayload> => {\n validateEnvVariables();\n\n const url = buildUrl(`${process.env.SHAMELA_API_BOOKS_ENDPOINT}/${id}`, {\n major_release: (options?.majorVersion || 0).toString(),\n minor_release: (options?.minorVersion || 0).toString(),\n });\n\n logger.info(`Fetching shamela.ws book link: ${url.toString()}`);\n\n try {\n const response = (await httpsGet(url)) as BookUpdatesResponse;\n return {\n majorRelease: response.major_release,\n majorReleaseUrl: fixHttpsProtocol(response.major_release_url),\n ...(response.minor_release_url && { minorReleaseUrl: fixHttpsProtocol(response.minor_release_url) }),\n ...(response.minor_release_url && { minorRelease: response.minor_release }),\n };\n } catch (error: any) {\n throw new Error(`Error fetching book metadata: ${error.message}`);\n }\n};\n\n/**\n * Downloads and processes a book from the Shamela database.\n *\n * This function downloads the book's database files, applies patches if available,\n * creates the necessary database tables, and exports the data to the specified format.\n * The output can be either a JSON file or a SQLite database file.\n *\n * @param id - The unique identifier of the book to download\n * @param options - Configuration options including output file path and optional book metadata\n * @returns A promise that resolves to the path of the created output file\n *\n * @throws {Error} When download fails, database operations fail, or file operations fail\n *\n * @example\n * ```typescript\n * // Download as JSON\n * const jsonPath = await downloadBook(123, {\n * outputFile: { path: './book.json' }\n * });\n *\n * // Download as SQLite database\n * const dbPath = await downloadBook(123, {\n * outputFile: { path: './book.db' }\n * });\n * ```\n */\nexport const downloadBook = async (id: number, options: DownloadBookOptions): Promise<string> => {\n logger.info(`downloadBook ${id} ${JSON.stringify(options)}`);\n\n const outputDir = await createTempDir('shamela_downloadBook');\n\n const bookResponse: GetBookMetadataResponsePayload = options?.bookMetadata || (await getBookMetadata(id));\n const [[bookDatabase], [patchDatabase] = []]: string[][] = await Promise.all([\n unzipFromUrl(bookResponse.majorReleaseUrl, outputDir),\n ...(bookResponse.minorReleaseUrl ? [unzipFromUrl(bookResponse.minorReleaseUrl, outputDir)] : []),\n ]);\n const dbPath = path.join(outputDir, 'book.db');\n\n const client = new Database(dbPath);\n\n try {\n logger.info(`Creating tables`);\n await createBookTables(client);\n\n if (patchDatabase) {\n logger.info(`Applying patches from ${patchDatabase} to ${bookDatabase}`);\n await applyPatches(client, bookDatabase, patchDatabase);\n } else {\n logger.info(`Copying table data from ${bookDatabase}`);\n await copyTableData(client, bookDatabase);\n }\n\n const { ext: extension } = path.parse(options.outputFile.path);\n\n if (extension === '.json') {\n const result = await getBookData(client);\n await Bun.file(options.outputFile.path).write(JSON.stringify(result, null, 2));\n }\n\n client.close();\n\n if (extension === '.db' || extension === '.sqlite') {\n await fs.rename(dbPath, options.outputFile.path);\n }\n\n await fs.rm(outputDir, { recursive: true });\n } finally {\n client.close();\n }\n\n return options.outputFile.path;\n};\n\n/**\n * Retrieves metadata for the master database from the Shamela API.\n *\n * The master database contains information about all books, authors, and categories\n * in the Shamela library. This function fetches the download URL and version\n * information for the master database patches.\n *\n * @param version - The version number to check for updates (defaults to 0)\n * @returns A promise that resolves to master database metadata including download URL and version\n *\n * @throws {Error} When environment variables are not set or API request fails\n *\n * @example\n * ```typescript\n * const masterMetadata = await getMasterMetadata(5);\n * console.log(masterMetadata.url); // URL to download master database patch\n * console.log(masterMetadata.version); // Latest version number\n * ```\n */\nexport const getMasterMetadata = async (version: number = 0): Promise<GetMasterMetadataResponsePayload> => {\n validateEnvVariables();\n\n const url = buildUrl(process.env.SHAMELA_API_MASTER_PATCH_ENDPOINT as string, { version: version.toString() });\n\n logger.info(`Fetching shamela.ws master database patch link: ${url.toString()}`);\n\n try {\n const response: Record<string, any> = await httpsGet(url);\n return { url: response.patch_url, version: response.version };\n } catch (error: any) {\n throw new Error(`Error fetching master patch: ${error.message}`);\n }\n};\n\n/**\n * Generates the URL for a book's cover image.\n *\n * This function constructs the URL to access the cover image for a specific book\n * using the book's ID and the API endpoint host.\n *\n * @param bookId - The unique identifier of the book\n * @returns The complete URL to the book's cover image\n *\n * @example\n * ```typescript\n * const coverUrl = getCoverUrl(123);\n * console.log(coverUrl); // \"https://api.shamela.ws/covers/123.jpg\"\n * ```\n */\nexport const getCoverUrl = (bookId: number) => {\n const { origin } = new URL(process.env.SHAMELA_API_MASTER_PATCH_ENDPOINT!);\n return `${origin}/covers/${bookId}.jpg`;\n};\n\n/**\n * Downloads and processes the master database from the Shamela service.\n *\n * The master database contains comprehensive information about all books, authors,\n * and categories available in the Shamela library. This function downloads the\n * database files, creates the necessary tables, and exports the data in the\n * specified format (JSON or SQLite).\n *\n * @param options - Configuration options including output file path and optional master metadata\n * @returns A promise that resolves to the path of the created output file\n *\n * @throws {Error} When download fails, expected tables are missing, database operations fail, or file operations fail\n *\n * @example\n * ```typescript\n * // Download master database as JSON\n * const jsonPath = await downloadMasterDatabase({\n * outputFile: { path: './master.json' }\n * });\n *\n * // Download master database as SQLite\n * const dbPath = await downloadMasterDatabase({\n * outputFile: { path: './master.db' }\n * });\n * ```\n */\nexport const downloadMasterDatabase = async (options: DownloadMasterOptions): Promise<string> => {\n logger.info(`downloadMasterDatabase ${JSON.stringify(options)}`);\n\n const outputDir = await createTempDir('shamela_downloadMaster');\n\n const masterResponse: GetMasterMetadataResponsePayload =\n options.masterMetadata || (await getMasterMetadata(DEFAULT_MASTER_METADATA_VERSION));\n\n logger.info(`Downloading master database from: ${JSON.stringify(masterResponse)}`);\n const sourceTables: string[] = await unzipFromUrl(fixHttpsProtocol(masterResponse.url), outputDir);\n\n logger.info(`sourceTables downloaded: ${sourceTables.toString()}`);\n\n if (!validateMasterSourceTables(sourceTables)) {\n logger.error(`Some source tables were not found: ${sourceTables.toString()}`);\n throw new Error('Expected tables not found!');\n }\n\n const dbPath = path.join(outputDir, 'master.db');\n\n const client = new Database(dbPath);\n\n try {\n logger.info(`Creating tables`);\n await createMasterTables(client);\n\n logger.info(`Copying data to master table`);\n await copyForeignMasterTableData(client, sourceTables);\n\n const { ext: extension } = path.parse(options.outputFile.path);\n\n if (extension === '.json') {\n const result = await getMasterData(client);\n await Bun.file(options.outputFile.path).write(JSON.stringify(result, null, 2));\n }\n\n client.close();\n\n if (extension === '.db' || extension === '.sqlite') {\n await fs.rename(dbPath, options.outputFile.path);\n }\n\n await fs.rm(outputDir, { recursive: true });\n } finally {\n client.close();\n }\n\n return options.outputFile.path;\n};\n\n/**\n * Retrieves complete book data including pages and titles.\n *\n * This is a convenience function that downloads a book's data and returns it\n * as a structured JavaScript object. The function handles the temporary file\n * creation and cleanup automatically.\n *\n * @param id - The unique identifier of the book to retrieve\n * @returns A promise that resolves to the complete book data including pages and titles\n *\n * @throws {Error} When download fails, file operations fail, or JSON parsing fails\n *\n * @example\n * ```typescript\n * const bookData = await getBook(123);\n * console.log(bookData.pages.length); // Number of pages in the book\n * console.log(bookData.titles?.length); // Number of title entries\n * ```\n */\nexport const getBook = async (id: number): Promise<BookData> => {\n const outputDir = await createTempDir('shamela_getBookData');\n const outputPath = await downloadBook(id, { outputFile: { path: path.join(outputDir, `${id}.json`) } });\n\n const data: BookData = await Bun.file(outputPath).json();\n await fs.rm(outputDir, { recursive: true });\n\n return data;\n};\n","type LogFunction = (...args: unknown[]) => void;\n\ninterface Logger {\n debug: LogFunction;\n error: LogFunction;\n info: LogFunction;\n warn?: LogFunction;\n}\n\nconst SILENT_LOGGER = { debug: () => {}, error: () => {}, info: () => {}, warn: () => {} };\nlet logger: Logger = SILENT_LOGGER;\n\nexport const setLogger = (newLogger: Logger = SILENT_LOGGER) => {\n if (!newLogger.debug || !newLogger.error || !newLogger.info) {\n throw new Error('Logger must implement debug, error, and info methods');\n }\n\n logger = newLogger;\n};\n\nexport { logger as default };\n","import { Database } from 'bun:sqlite';\n\nexport type InternalTable = {\n fields: string[];\n name: string;\n};\n\nconst escapeAsSqlString = (v: string) => `'${v.replace(/'/g, \"''\")}'`; // safe string literal\n\n/**\n * Retrieves information about internal tables in a specified database.\n *\n * This function queries the SQLite master table to get metadata about all\n * tables in the specified database, including table names and field information.\n * It's useful for database introspection and validation operations.\n *\n * @param db - The database client instance to query\n * @param dbName - The name/alias of the database to inspect (e.g., 'main', 'patch')\n * @returns An array of InternalTable objects containing table metadata\n *\n * @throws {Error} When database query fails or table metadata cannot be parsed\n */\n\nexport const getInternalTables = (db: Database, dbName: string): InternalTable[] => {\n // Allow-list/validate schema name to prevent SQL injection (e.g., 'main', 'temp', 'patch')\n const isValidIdent = (v: string) => /^[A-Za-z_][A-Za-z0-9_]*$/.test(v);\n if (!isValidIdent(dbName)) {\n throw new Error(`Invalid database name: ${dbName}`);\n }\n\n // Get only user tables; exclude SQLite internal tables\n const tables = db\n .query(`SELECT name FROM ${dbName}.sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'`)\n .all() as { name: string }[];\n\n return tables.map(({ name }) => {\n const tableExpr = isValidIdent(name) ? name : escapeAsSqlString(name);\n // Use PRAGMA table_info to reliably fetch column names\n const cols = db.query(`PRAGMA ${dbName}.table_info(${tableExpr})`).all() as { name: string }[];\n const fields = cols.map((c) => c.name);\n return { fields, name } as InternalTable;\n });\n};\n","const MAIN_DB_ALIAS = 'main';\n\n/**\n * Generates SQL to attach a database file with an alias.\n * @param {string} dbFile - Path to the database file to attach\n * @param {string} alias - Alias name for the attached database\n * @returns {string} SQL ATTACH DATABASE statement\n */\nexport const attachDB = (dbFile: string, alias: string) => {\n // Escape single quotes in the file path\n const escapedPath = dbFile.replace(/'/g, \"''\");\n // Validate alias contains only alphanumeric characters and underscores\n if (!/^[a-zA-Z0-9_]+$/.test(alias)) {\n throw new Error('Invalid database alias');\n }\n return `ATTACH DATABASE '${escapedPath}' AS ${alias}`;\n};\n\n/**\n * Builds a SQL query to patch page data from one database to another.\n * @param {string} patchAlias - Alias of the patch database\n * @param {string} tableName - Name of the table to update\n * @param {string} [aslAlias='main'] - Alias of the main database\n * @returns {string} SQL UPDATE statement for patching page data\n */\nexport const buildPagePatchQuery = (\n patchAlias: string,\n tableName: string,\n aslAlias: string = MAIN_DB_ALIAS,\n): string => `\n UPDATE ${aslAlias}.${tableName}\n SET content = ${updatePageColumn('content', aslAlias, patchAlias)},\n part = ${updatePageColumn('part', aslAlias, patchAlias)},\n page = ${updatePageColumn('page', aslAlias, patchAlias)},\n number = ${updatePageColumn('number', aslAlias, patchAlias)}\n WHERE EXISTS (\n SELECT 1\n FROM ${patchAlias}.${tableName}\n WHERE ${aslAlias}.${tableName}.id = ${patchAlias}.${tableName}.id\n );\n`;\n\nconst updateTitleColumn = (columnName: string, aslAlias: string, patchAlias: string) => `\n (SELECT CASE \n WHEN ${patchAlias}.title.${columnName} != '#' THEN ${patchAlias}.title.${columnName}\n ELSE ${aslAlias}.title.${columnName}\n END \n FROM ${patchAlias}.title\n WHERE ${aslAlias}.title.id = ${patchAlias}.title.id)\n`;\n\n/**\n * Builds a SQL query to patch title data from one database to another.\n * @param {string} patchAlias - Alias of the patch database\n * @param {string} tableName - Name of the table to update\n * @param {string} [aslAlias='main'] - Alias of the main database\n * @returns {string} SQL UPDATE statement for patching title data\n */\nexport const buildTitlePatchQuery = (\n patchAlias: string,\n tableName: string,\n aslAlias: string = MAIN_DB_ALIAS,\n): string => `\n UPDATE ${aslAlias}.${tableName}\n SET content = ${updateTitleColumn('content', aslAlias, patchAlias)},\n page = ${updateTitleColumn('page', aslAlias, patchAlias)},\n parent = ${updateTitleColumn('parent', aslAlias, patchAlias)}\n WHERE EXISTS (\n SELECT 1\n FROM ${patchAlias}.${tableName}\n WHERE ${aslAlias}.${tableName}.id = ${patchAlias}.${tableName}.id\n );\n`;\n\n/**\n * Generates SQL to create a table with specified fields.\n * @param {string} name - Name of the table to create\n * @param {string[]} fields - Array of field definitions\n * @returns {string} SQL CREATE TABLE statement\n */\nexport const createTable = (name: string, fields: string[]) => {\n // Validate table name\n if (!/^[a-zA-Z0-9_]+$/.test(name)) {\n throw new Error('Invalid table name');\n }\n // Basic validation for field definitions\n fields.forEach((field) => {\n if (field.includes(';') || field.includes('--')) {\n throw new Error('Invalid field definition');\n }\n });\n return `CREATE TABLE IF NOT EXISTS ${name} (${fields.join(', ')})`;\n};\n\n/**\n * Generates SQL to detach a database by alias.\n * @param {string} alias - Alias of the database to detach\n * @returns {string} SQL DETACH DATABASE statement\n */\nexport const detachDB = (alias: string) => {\n // Validate alias contains only alphanumeric characters and underscores\n if (!/^[a-zA-Z0-9_]+$/.test(alias)) {\n throw new Error('Invalid database alias');\n }\n return `DETACH DATABASE ${alias}`;\n};\n\nconst updatePageColumn = (columnName: string, aslAlias: string, patchAlias: string) => `\n (SELECT CASE \n WHEN ${patchAlias}.page.${columnName} != '#' THEN ${patchAlias}.page.${columnName}\n ELSE ${aslAlias}.page.${columnName}\n END \n FROM ${patchAlias}.page\n WHERE ${aslAlias}.page.id = ${patchAlias}.page.id)\n`;\n\n/**\n * Generates an unsafe SQL INSERT statement with provided field values.\n * @param {string} table - Name of the table to insert into\n * @param {Record<string, any>} fieldToValue - Object mapping field names to values\n * @param {boolean} [isDeleted=false] - Whether to mark the record as deleted\n * @returns {string} SQL INSERT statement (unsafe - does not escape values properly)\n * @warning This function does not properly escape SQL values and should not be used with untrusted input\n */\nexport const insertUnsafely = (table: string, fieldToValue: Record<string, any>, isDeleted = false) => {\n const combinedRecords: Record<string, any> = { ...fieldToValue, is_deleted: isDeleted ? '1' : '0' };\n\n const sortedKeys = Object.keys(combinedRecords).sort();\n\n const sortedValues = sortedKeys.map((key) => combinedRecords[key]);\n\n return `INSERT INTO ${table} (${sortedKeys.toString()}) VALUES (${sortedValues\n .map((val) => {\n if (val === null) {\n return 'NULL';\n }\n\n return typeof val === 'string' ? `'${val}'` : val;\n })\n .toString()})`;\n};\n","import { Database } from 'bun:sqlite';\n\nimport { BookData, Page, Title } from '../types';\nimport logger from '../utils/logger';\nimport { getInternalTables, InternalTable } from './common';\nimport { attachDB, buildPagePatchQuery, buildTitlePatchQuery, detachDB } from './queryBuilder';\nimport { PageRow, Tables, TitleRow } from './types';\n\nconst PATCH_DB_ALIAS = 'patch';\nconst ASL_DB_ALIAS = 'asl';\n\nconst buildCopyStatements = (\n patchTables: InternalTable[],\n aslTables: InternalTable[],\n table: Tables,\n fields: string[],\n patchQuery: string,\n): string[] => {\n const statements = [];\n\n if (patchTables.find((t) => t.name === table)) {\n statements.push(\n `INSERT INTO main.${table} \n SELECT ${fields.join(',')} \n FROM ${ASL_DB_ALIAS}.${table} \n WHERE id NOT IN (\n SELECT id \n FROM ${PATCH_DB_ALIAS}.${table} \n WHERE is_deleted='1'\n )`,\n );\n statements.push(patchQuery);\n } else {\n let copyStatement = `INSERT INTO main.${table} SELECT ${fields.join(',')} FROM ${ASL_DB_ALIAS}.${table}`;\n\n if (aslTables.find((t) => t.name === table)?.fields.includes('is_deleted')) {\n copyStatement += ` WHERE is_deleted='0'`;\n }\n\n statements.push(copyStatement);\n }\n\n return statements;\n};\n\n/**\n * Applies patches from a patch database to the main book database.\n *\n * This function handles the process of applying updates and patches to book data\n * by attaching both the original ASL database and patch database, then merging\n * the data while excluding deleted records and applying updates from patches.\n *\n * @param db - The database client instance for the main database\n * @param aslDB - Path to the original ASL database file\n * @param patchDB - Path to the patch database file containing updates\n *\n * @throws {Error} When database operations fail or tables cannot be attached\n *\n * @example\n * ```typescript\n * const client = new Database(dbPath);\n * applyPatches(client, './original.db', './patch.db');\n * ```\n */\nexport const applyPatches = (db: Database, aslDB: string, patchDB: string) => {\n db.run(attachDB(aslDB, ASL_DB_ALIAS));\n db.run(attachDB(patchDB, PATCH_DB_ALIAS));\n\n try {\n const patchTables = getInternalTables(db, PATCH_DB_ALIAS);\n const aslTables = getInternalTables(db, ASL_DB_ALIAS);\n logger.debug({ aslTables, patchTables }, `Applying patches for...`);\n const pageStatements = buildCopyStatements(\n patchTables,\n aslTables,\n Tables.Page,\n ['id', 'content', 'part', 'page', 'number'],\n buildPagePatchQuery(PATCH_DB_ALIAS, Tables.Page),\n );\n const titleStatements = buildCopyStatements(\n patchTables,\n aslTables,\n Tables.Title,\n ['id', 'content', 'page', 'parent'],\n buildTitlePatchQuery(PATCH_DB_ALIAS, Tables.Title),\n );\n // Prepare all statements\n const allStatements = [...pageStatements, ...titleStatements].map((sql) => db.prepare(sql));\n // Execute all in one transaction\n db.transaction(() => {\n allStatements.forEach((stmt) => stmt.run());\n })();\n } finally {\n db.run(detachDB(ASL_DB_ALIAS));\n db.run(detachDB(PATCH_DB_ALIAS));\n }\n};\n\n/**\n * Copies table data from an ASL database to the main database.\n *\n * This function is used when no patches are available and data needs to be\n * copied directly from the original ASL database to the main database.\n * It handles both page and title data.\n *\n * @param db - The database client instance for the main database\n * @param aslDB - Path to the ASL database file to copy data from\n *\n * @throws {Error} When database operations fail or the ASL database cannot be attached\n */\nexport const copyTableData = (db: Database, aslDB: string) => {\n db.run(attachDB(aslDB, ASL_DB_ALIAS));\n const tables = getInternalTables(db, ASL_DB_ALIAS);\n\n logger.debug({ tables }, `copyTableData...`);\n\n const titleInsert = db.prepare(\n `INSERT INTO main.${Tables.Title} SELECT id,content,page,parent FROM ${ASL_DB_ALIAS}.${Tables.Title}`,\n );\n const pageInsert = db.prepare(\n `INSERT INTO main.${Tables.Page} SELECT id,content,part,page,number FROM ${ASL_DB_ALIAS}.${Tables.Page}`,\n );\n\n db.transaction(() => {\n titleInsert.run();\n pageInsert.run();\n })();\n\n db.run(detachDB(ASL_DB_ALIAS));\n};\n\n/**\n * Creates the necessary database tables for storing book data.\n *\n * This function sets up the schema for the book database by creating\n * the 'page' and 'title' tables with their respective columns and constraints.\n *\n * @param db - The database client instance where tables should be created\n *\n * @throws {Error} When table creation fails due to database constraints or permissions\n */\nexport const createTables = (db: Database) => {\n db.run(`CREATE TABLE page (id INTEGER PRIMARY KEY, content TEXT, part INTEGER, page INTEGER, number INTEGER)`);\n db.run(`CREATE TABLE title (id INTEGER PRIMARY KEY, content TEXT, page INTEGER, parent INTEGER)`);\n};\n\n/**\n * Retrieves all pages from the book database.\n *\n * This function queries the database for all page records and transforms\n * them into a structured format, filtering out null values and organizing\n * the data according to the Page type interface.\n *\n * @param db - The database client instance to query\n * @returns An array of Page objects\n *\n * @throws {Error} When database query fails or data transformation encounters issues\n */\nexport const getAllPages = (db: Database) => {\n const pages: Page[] = db\n .query(`SELECT * FROM ${Tables.Page}`)\n .all()\n .map((row: any) => {\n const { content, id, number, page, part } = row as PageRow;\n\n return {\n content,\n id,\n ...(page && { page }),\n ...(number && { number }),\n ...(part && { part }),\n };\n });\n\n return pages;\n};\n\n/**\n * Retrieves all titles from the book database.\n *\n * This function queries the database for all title records and transforms\n * them into a structured format. Titles represent the hierarchical structure\n * and table of contents for the book.\n *\n * @param db - The database client instance to query\n * @returns An array of Title objects\n *\n * @throws {Error} When database query fails or data transformation encounters issues\n */\nexport const getAllTitles = (db: Database) => {\n const titles: Title[] = db\n .query(`SELECT * FROM ${Tables.Title}`)\n .all()\n .map((row: any) => {\n const r = row as TitleRow;\n\n return {\n content: r.content,\n id: r.id,\n page: r.page,\n ...(r.parent && { parent: r.parent }),\n };\n });\n\n return titles;\n};\n\n/**\n * Retrieves complete book data including both pages and titles.\n *\n * This function combines the results from getAllPages and getAllTitles\n * to provide a complete representation of the book's content and structure.\n * This is typically the final step in processing book data.\n *\n * @param db - The database client instance to query\n *\n * @throws {Error} When database queries fail or data processing encounters issues\n */\nexport const getData = (db: Database): BookData => {\n return { pages: getAllPages(db), titles: getAllTitles(db) };\n};\n","import { Database } from 'bun:sqlite';\nimport path from 'node:path';\n\nimport type { Author, Book, Category, MasterData, PDFLinks } from '../types';\n\nimport { UNKNOWN_VALUE_PLACEHOLDER } from '../utils/constants';\nimport { attachDB, detachDB } from './queryBuilder';\nimport { type BookRow, Tables } from './types';\n\n/**\n * Copies data from foreign master table files into the main master database.\n *\n * This function processes the source table files (author.sqlite, book.sqlite, category.sqlite)\n * by attaching them to the current database connection, then copying their data into\n * the main master database tables. It handles data transformation including filtering\n * out deleted records and converting placeholder values.\n *\n * @param db - The database client instance for the master database\n * @param sourceTables - Array of file paths to the source SQLite table files\n *\n * @throws {Error} When source files cannot be attached or data copying operations fail\n */\nexport const copyForeignMasterTableData = (db: Database, sourceTables: string[]) => {\n const aliasToPath: Record<string, string> = sourceTables.reduce((acc, tablePath) => {\n const { name } = path.parse(tablePath);\n return { ...acc, [name]: tablePath };\n }, {});\n\n Object.entries(aliasToPath).forEach(([alias, dbPath]) => db.run(attachDB(dbPath, alias)));\n\n const insertAuthors = db.prepare(\n `INSERT INTO ${Tables.Authors} SELECT id,name,biography,(CASE WHEN death_number = ${UNKNOWN_VALUE_PLACEHOLDER} THEN NULL ELSE death_number END) AS death_number FROM author WHERE is_deleted='0'`,\n );\n const insertBooks = db.prepare(\n `INSERT INTO ${Tables.Books} SELECT id,name,category,type,(CASE WHEN date = ${UNKNOWN_VALUE_PLACEHOLDER} THEN NULL ELSE date END) AS date,author,printed,major_release,minor_release,bibliography,hint,pdf_links,metadata FROM book WHERE is_deleted='0'`,\n );\n const insertCategories = db.prepare(\n `INSERT INTO ${Tables.Categories} SELECT id,name FROM category WHERE is_deleted='0'`,\n );\n\n db.transaction(() => {\n insertAuthors.run();\n insertBooks.run();\n insertCategories.run();\n })();\n\n Object.keys(aliasToPath).forEach((statement) => db.run(detachDB(statement)));\n};\n\n/**\n * Creates the necessary database tables for the master database.\n *\n * This function sets up the schema for the master database by creating\n * tables for authors, books, and categories with their respective columns\n * and data types. This is typically the first step in setting up a new\n * master database.\n *\n * @param db - The database client instance where tables should be created\n *\n * @throws {Error} When table creation fails due to database constraints or permissions\n */\nexport const createTables = (db: Database) => {\n db.run('CREATE TABLE authors (id INTEGER PRIMARY KEY, name TEXT, biography TEXT, death INTEGER)');\n db.run(\n 'CREATE TABLE books (id INTEGER PRIMARY KEY, name TEXT, category INTEGER, type INTEGER, date INTEGER, author TEXT, printed INTEGER, major INTEGER, minor INTEGER, bibliography TEXT, hint TEXT, pdf_links TEXT, metadata TEXT)',\n );\n db.run('CREATE TABLE categories (id INTEGER PRIMARY KEY, name TEXT)');\n};\n\nexport const getAllAuthors = (db: Database) => {\n const rows = db.query(`SELECT * FROM ${Tables.Authors}`).all();\n\n const authors: Author[] = rows.map((r: any) => ({\n ...(r.biography && { biography: r.biography }),\n ...(r.death && { death: r.death }),\n id: r.id,\n name: r.name,\n }));\n\n return authors;\n};\n\nexport const getAllBooks = (db: Database) => {\n const rows = db.query(`SELECT * FROM ${Tables.Books}`).all();\n\n const books: Book[] = rows.map((row: any) => {\n const r = row as BookRow;\n\n return {\n author: parseAuthor(r.author),\n bibliography: r.bibliography,\n category: r.category,\n id: r.id,\n major: r.major,\n metadata: JSON.parse(r.metadata),\n name: r.name,\n printed: r.printed,\n type: r.type,\n ...(r.date && r.date.toString() !== UNKNOWN_VALUE_PLACEHOLDER && { date: r.date }),\n ...(r.hint && { hint: r.hint }),\n ...(r.pdf_links && { pdfLinks: parsePdfLinks(r.pdf_links) }),\n ...(r.minor && { minorRelease: r.minor }),\n };\n });\n\n return books;\n};\n\nexport const getAllCategories = (db: Database) => {\n const rows = db.query(`SELECT * FROM ${Tables.Categories}`).all();\n\n const categories: Category[] = rows.map((r: any) => ({\n id: r.id,\n name: r.name,\n }));\n\n return categories;\n};\n\nconst parseAuthor = (value: string) => {\n const result: number[] = value.split(',\\\\s+').map((id) => parseInt(id.trim()));\n return result.length > 1 ? result : result[0];\n};\n\nconst parsePdfLinks = (value: string) => {\n const result = JSON.parse(value);\n\n if (result.files) {\n result.files = (result.files as string[]).map((f: string) => {\n const [file, id] = f.split('|');\n return { ...(id && { id }), file };\n });\n }\n\n return result as PDFLinks;\n};\n\nexport const getData = (db: Database) => {\n return { authors: getAllAuthors(db), books: getAllBooks(db), categories: getAllCategories(db) } as MasterData;\n};\n","/**\n * The default version number for master metadata.\n * @constant {number}\n */\nexport const DEFAULT_MASTER_METADATA_VERSION = 0;\n\n/**\n * Placeholder value used to represent unknown or missing data.\n * @constant {string}\n */\nexport const UNKNOWN_VALUE_PLACEHOLDER = '99999';\n","import { createWriteStream, promises as fs } from 'node:fs';\nimport { IncomingMessage } from 'node:http';\nimport https from 'node:https';\nimport os from 'node:os';\nimport path from 'node:path';\nimport { pipeline } from 'node:stream/promises';\nimport unzipper, { Entry } from 'unzipper';\n\n/**\n * Creates a temporary directory with an optional prefix.\n * @param {string} [prefix='shamela'] - The prefix to use for the temporary directory name\n * @returns {Promise<string>} A promise that resolves to the path of the created temporary directory\n */\nexport const createTempDir = async (prefix = 'shamela') => {\n const tempDirBase = path.join(os.tmpdir(), prefix);\n return fs.mkdtemp(tempDirBase);\n};\n\n/**\n * Checks if a file exists at the given path.\n * @param {string} path - The file path to check\n * @returns {Promise<boolean>} A promise that resolves to true if the file exists, false otherwise\n */\nexport const fileExists = async (filePath: string) => !!(await fs.stat(filePath).catch(() => false));\n\n/**\n * Downloads and extracts a ZIP file from a given URL without loading the entire file into memory.\n * @param {string} url - The URL of the ZIP file to download and extract\n * @param {string} outputDir - The directory where the files should be extracted\n * @returns {Promise<string[]>} A promise that resolves with the list of all extracted file paths\n * @throws {Error} When the download fails, extraction fails, or other network/filesystem errors occur\n */\nexport async function unzipFromUrl(url: string, outputDir: string): Promise<string[]> {\n const extractedFiles: string[] = [];\n\n try {\n // Make HTTPS request and get the response stream\n const response = await new Promise<IncomingMessage>((resolve, reject) => {\n https\n .get(url, (res) => {\n if (res.statusCode !== 200) {\n reject(new Error(`Failed to download ZIP file: ${res.statusCode} ${res.statusMessage}`));\n } else {\n resolve(res);\n }\n })\n .on('error', (err) => {\n reject(new Error(`HTTPS request failed: ${err.message}`));\n });\n });\n\n // Process the ZIP file using unzipper.Extract with proper event handling\n await new Promise<void>((resolve, reject) => {\n const unzipStream = unzipper.Parse();\n const entryPromises: Promise<void>[] = [];\n\n unzipStream.on('entry', (entry: Entry) => {\n const entryPromise = (async () => {\n const filePath = path.join(outputDir, entry.path);\n\n if (entry.type === 'Directory') {\n // Ensure the directory exists\n await fs.mkdir(filePath, { recursive: true });\n entry.autodrain();\n } else {\n // Ensure the parent directory exists\n const dir = path.dirname(filePath);\n await fs.mkdir(dir, { recursive: true });\n\n // Create write stream and pipe entry to it\n const writeStream = createWriteStream(filePath);\n await pipeline(entry, writeStream);\n extractedFiles.push(filePath);\n }\n })();\n\n entryPromises.push(entryPromise);\n });\n\n unzipStream.on('finish', async () => {\n try {\n // Wait for all entries to be processed\n await Promise.all(entryPromises);\n resolve();\n } catch (error) {\n reject(error);\n }\n });\n\n unzipStream.on('error', (error) => {\n reject(new Error(`Error during extraction: ${error.message}`));\n });\n\n // Pipe the response to the unzip stream\n response.pipe(unzipStream);\n });\n\n return extractedFiles;\n } catch (error: any) {\n throw new Error(`Error processing URL: ${error.message}`);\n }\n}\n","import { Buffer } from 'node:buffer';\nimport { IncomingMessage } from 'node:http';\nimport https from 'node:https';\nimport process from 'node:process';\nimport { URL, URLSearchParams } from 'node:url';\n\n/**\n * Builds a URL with query parameters and optional authentication.\n * @param {string} endpoint - The base endpoint URL\n * @param {Record<string, any>} queryParams - Object containing query parameters to append\n * @param {boolean} [useAuth=true] - Whether to include the API key from environment variables\n * @returns {URL} The constructed URL object with query parameters\n */\nexport const buildUrl = (endpoint: string, queryParams: Record<string, any>, useAuth: boolean = true): URL => {\n const url = new URL(endpoint);\n {\n const params = new URLSearchParams();\n\n Object.entries(queryParams).forEach(([key, value]) => {\n params.append(key, value.toString());\n });\n\n if (useAuth) {\n params.append('api_key', process.env.SHAMELA_API_KEY!);\n }\n\n url.search = params.toString();\n }\n\n return url;\n};\n\n/**\n * Makes an HTTPS GET request and returns the response data.\n * @template T - The expected return type (Buffer or Record<string, any>)\n * @param {string | URL} url - The URL to make the request to\n * @returns {Promise<T>} A promise that resolves to the response data, parsed as JSON if content-type is application/json, otherwise as Buffer\n * @throws {Error} When the request fails or JSON parsing fails\n */\nexport const httpsGet = <T extends Buffer | Record<string, any>>(url: string | URL): Promise<T> => {\n return new Promise((resolve, reject) => {\n https\n .get(url, (res: IncomingMessage) => {\n const contentType = res.headers['content-type'] || '';\n const dataChunks: Buffer[] = [];\n\n res.on('data', (chunk: Buffer) => {\n dataChunks.push(chunk);\n });\n\n res.on('end', () => {\n const fullData = Buffer.concat(dataChunks);\n\n if (contentType.includes('application/json')) {\n try {\n const json = JSON.parse(fullData.toString('utf-8'));\n resolve(json);\n } catch (error: any) {\n reject(new Error(`Failed to parse JSON: ${error.message}`));\n }\n } else {\n resolve(fullData as T);\n }\n });\n })\n .on('error', (error) => {\n reject(new Error(`Error making request: ${error.message}`));\n });\n });\n};\n","import path from 'node:path';\nimport process from 'node:process';\n\nconst SOURCE_TABLES = ['author.sqlite', 'book.sqlite', 'category.sqlite'];\n\n/**\n * Validates that required environment variables are set.\n * @throws {Error} When any required environment variable is missing\n */\nexport const validateEnvVariables = () => {\n const envVariablesNotFound = [\n 'SHAMELA_API_MASTER_PATCH_ENDPOINT',\n 'SHAMELA_API_BOOKS_ENDPOINT',\n 'SHAMELA_API_KEY',\n ].filter((key) => !process.env[key]);\n\n if (envVariablesNotFound.length) {\n throw new Error(`${envVariablesNotFound.join(', ')} environment variables not set`);\n }\n};\n\n/**\n * Validates that all required master source tables are present in the provided paths.\n * @param {string[]} sourceTablePaths - Array of file paths to validate\n * @returns {boolean} True if all required source tables (author.sqlite, book.sqlite, category.sqlite) are present\n */\nexport const validateMasterSourceTables = (sourceTablePaths: string[]) => {\n const sourceTableNames = new Set(sourceTablePaths.map((tablePath) => path.basename(tablePath).toLowerCase()));\n return SOURCE_TABLES.every((table) => sourceTableNames.has(table.toLowerCase()));\n};\n"],"mappings":"AAAA,OAAS,YAAAA,MAAgB,aACzB,OAAS,YAAYC,MAAU,KAC/B,OAAOC,MAAU,OACjB,OAAOC,MAAa,UACpB,OAAS,OAAAC,MAAW,MCKpB,IAAMC,EAAgB,CAAE,MAAO,IAAM,CAAC,EAAG,MAAO,IAAM,CAAC,EAAG,KAAM,IAAM,CAAC,EAAG,KAAM,IAAM,CAAC,CAAE,EACrFC,EAAiBD,EAERE,EAAY,CAACC,EAAoBH,IAAkB,CAC5D,GAAI,CAACG,EAAU,OAAS,CAACA,EAAU,OAAS,CAACA,EAAU,KACnD,MAAM,IAAI,MAAM,sDAAsD,EAG1EF,EAASE,CACb,ECXA,IAAMC,EAAqBC,GAAc,IAAIA,EAAE,QAAQ,KAAM,IAAI,CAAC,IAgBrDC,EAAoB,CAACC,EAAcC,IAAoC,CAEhF,IAAMC,EAAgBJ,GAAc,2BAA2B,KAAKA,CAAC,EACrE,GAAI,CAACI,EAAaD,CAAM,EACpB,MAAM,IAAI,MAAM,0BAA0BA,CAAM,EAAE,EAQtD,OAJeD,EACV,MAAM,oBAAoBC,CAAM,gEAAgE,EAChG,IAAI,EAEK,IAAI,CAAC,CAAE,KAAAE,CAAK,IAAM,CAC5B,IAAMC,EAAYF,EAAaC,CAAI,EAAIA,EAAON,EAAkBM,CAAI,EAIpE,MAAO,CAAE,OAFIH,EAAG,MAAM,UAAUC,CAAM,eAAeG,CAAS,GAAG,EAAE,IAAI,EACnD,IAAKC,GAAMA,EAAE,IAAI,EACpB,KAAAF,CAAK,CAC1B,CAAC,CACL,EC1CA,IAAMG,EAAgB,OAQTC,EAAW,CAACC,EAAgBC,IAAkB,CAEvD,IAAMC,EAAcF,EAAO,QAAQ,KAAM,IAAI,EAE7C,GAAI,CAAC,kBAAkB,KAAKC,CAAK,EAC7B,MAAM,IAAI,MAAM,wBAAwB,EAE5C,MAAO,oBAAoBC,CAAW,QAAQD,CAAK,EACvD,EASaE,EAAsB,CAC/BC,EACAC,EACAC,EAAmBR,IACV;AAAA,WACFQ,CAAQ,IAAID,CAAS;AAAA,kBACdE,EAAiB,UAAWD,EAAUF,CAAU,CAAC;AAAA,eACpDG,EAAiB,OAAQD,EAAUF,CAAU,CAAC;AAAA,eAC9CG,EAAiB,OAAQD,EAAUF,CAAU,CAAC;AAAA,iBAC5CG,EAAiB,SAAUD,EAAUF,CAAU,CAAC;AAAA;AAAA;AAAA,WAGtDA,CAAU,IAAIC,CAAS;AAAA,YACtBC,CAAQ,IAAID,CAAS,SAASD,CAAU,IAAIC,CAAS;AAAA;AAAA,EAI3DG,EAAoB,CAACC,EAAoBH,EAAkBF,IAAuB;AAAA;AAAA,oBAEpEA,CAAU,UAAUK,CAAU,gBAAgBL,CAAU,UAAUK,CAAU;AAAA,oBAC5EH,CAAQ,UAAUG,CAAU;AAAA;AAAA,WAErCL,CAAU;AAAA,YACTE,CAAQ,eAAeF,CAAU;AAAA,EAUhCM,EAAuB,CAChCN,EACAC,EACAC,EAAmBR,IACV;AAAA,WACFQ,CAAQ,IAAID,CAAS;AAAA,kBACdG,EAAkB,UAAWF,EAAUF,CAAU,CAAC;AAAA,eACrDI,EAAkB,OAAQF,EAAUF,CAAU,CAAC;AAAA,iBAC7CI,EAAkB,SAAUF,EAAUF,CAAU,CAAC;AAAA;AAAA;AAAA,WAGvDA,CAAU,IAAIC,CAAS;AAAA,YACtBC,CAAQ,IAAID,CAAS,SAASD,CAAU,IAAIC,CAAS;AAAA;EA6B1D,IAAMM,EAAYC,GAAkB,CAEvC,GAAI,CAAC,kBAAkB,KAAKA,CAAK,EAC7B,MAAM,IAAI,MAAM,wBAAwB,EAE5C,MAAO,mBAAmBA,CAAK,EACnC,EAEMC,EAAmB,CAACC,EAAoBC,EAAkBC,IAAuB;AAAA;AAAA,oBAEnEA,CAAU,SAASF,CAAU,gBAAgBE,CAAU,SAASF,CAAU;AAAA,oBAC1EC,CAAQ,SAASD,CAAU;AAAA;AAAA,WAEpCE,CAAU;AAAA,YACTD,CAAQ,cAAcC,CAAU;ECzG5C,IAAMC,EAAiB,QACjBC,EAAe,MAEfC,EAAsB,CACxBC,EACAC,EACAC,EACAC,EACAC,IACW,CACX,IAAMC,EAAa,CAAC,EAEpB,GAAIL,EAAY,KAAMM,GAAMA,EAAE,OAASJ,CAAK,EACxCG,EAAW,KACP,oBAAoBH,CAAK;AAAA,sBACfC,EAAO,KAAK,GAAG,CAAC;AAAA,oBAClBL,CAAY,IAAII,CAAK;AAAA;AAAA;AAAA,wBAGjBL,CAAc,IAAIK,CAAK;AAAA;AAAA,eAGvC,EACAG,EAAW,KAAKD,CAAU,MACvB,CACH,IAAIG,EAAgB,oBAAoBL,CAAK,WAAWC,EAAO,KAAK,GAAG,CAAC,SAASL,CAAY,IAAII,CAAK,GAElGD,EAAU,KAAMK,GAAMA,EAAE,OAASJ,CAAK,GAAG,OAAO,SAAS,YAAY,IACrEK,GAAiB,yBAGrBF,EAAW,KAAKE,CAAa,CACjC,CAEA,OAAOF,CACX,EAqBaG,EAAe,CAACC,EAAcC,EAAeC,IAAoB,CAC1EF,EAAG,IAAIG,EAASF,EAAOZ,CAAY,CAAC,EACpCW,EAAG,IAAIG,EAASD,EAASd,CAAc,CAAC,EAExC,GAAI,CACA,IAAMG,EAAca,EAAkBJ,EAAIZ,CAAc,EAClDI,EAAYY,EAAkBJ,EAAIX,CAAY,EACpDgB,EAAO,MAAM,CAAE,UAAAb,EAAW,YAAAD,CAAY,EAAG,yBAAyB,EAClE,IAAMe,EAAiBhB,EACnBC,EACAC,SAEA,CAAC,KAAM,UAAW,OAAQ,OAAQ,QAAQ,EAC1Ce,EAAoBnB,QAA2B,CACnD,EACMoB,EAAkBlB,EACpBC,EACAC,UAEA,CAAC,KAAM,UAAW,OAAQ,QAAQ,EAClCiB,EAAqBrB,SAA4B,CACrD,EAEMsB,EAAgB,CAAC,GAAGJ,EAAgB,GAAGE,CAAe,EAAE,IAAKG,GAAQX,EAAG,QAAQW,CAAG,CAAC,EAE1FX,EAAG,YAAY,IAAM,CACjBU,EAAc,QAASE,GAASA,EAAK,IAAI,CAAC,CAC9C,CAAC,EAAE,CACP,QAAE,CACEZ,EAAG,IAAIa,EAASxB,CAAY,CAAC,EAC7BW,EAAG,IAAIa,EAASzB,CAAc,CAAC,CACnC,CACJ,EAca0B,EAAgB,CAACd,EAAcC,IAAkB,CAC1DD,EAAG,IAAIG,EAASF,EAAOZ,CAAY,CAAC,EACpC,IAAM0B,EAASX,EAAkBJ,EAAIX,CAAY,EAEjDgB,EAAO,MAAM,CAAE,OAAAU,CAAO,EAAG,kBAAkB,EAE3C,IAAMC,EAAchB,EAAG,QACnB,6DAAuEX,CAAY,QACvF,EACM4B,EAAajB,EAAG,QAClB,iEAA2EX,CAAY,OAC3F,EAEAW,EAAG,YAAY,IAAM,CACjBgB,EAAY,IAAI,EAChBC,EAAW,IAAI,CACnB,CAAC,EAAE,EAEHjB,EAAG,IAAIa,EAASxB,CAAY,CAAC,CACjC,EAYa6B,EAAgBlB,GAAiB,CAC1CA,EAAG,IAAI,sGAAsG,EAC7GA,EAAG,IAAI,yFAAyF,CACpG,EAcamB,EAAenB,GACFA,EACjB,0BAAoC,EACpC,IAAI,EACJ,IAAKoB,GAAa,CACf,GAAM,CAAE,QAAAC,EAAS,GAAAC,EAAI,OAAAC,EAAQ,KAAAC,EAAM,KAAAC,CAAK,EAAIL,EAE5C,MAAO,CACH,QAAAC,EACA,GAAAC,EACA,GAAIE,GAAQ,CAAE,KAAAA,CAAK,EACnB,GAAID,GAAU,CAAE,OAAAA,CAAO,EACvB,GAAIE,GAAQ,CAAE,KAAAA,CAAK,CACvB,CACJ,CAAC,EAiBIC,EAAgB1B,GACDA,EACnB,2BAAqC,EACrC,IAAI,EACJ,IAAKoB,GAAa,CACf,IAAMO,EAAIP,EAEV,MAAO,CACH,QAASO,EAAE,QACX,GAAIA,EAAE,GACN,KAAMA,EAAE,KACR,GAAIA,EAAE,QAAU,CAAE,OAAQA,EAAE,MAAO,CACvC,CACJ,CAAC,EAgBIC,EAAW5B,IACb,CAAE,MAAOmB,EAAYnB,CAAE,EAAG,OAAQ0B,EAAa1B,CAAE,CAAE,GC1N9D,OAAO6B,MAAU,OCSV,IAAMC,EAA4B,QDYlC,IAAMC,EAA6B,CAACC,EAAcC,IAA2B,CAChF,IAAMC,EAAsCD,EAAa,OAAO,CAACE,EAAKC,IAAc,CAChF,GAAM,CAAE,KAAAC,CAAK,EAAIC,EAAK,MAAMF,CAAS,EACrC,MAAO,CAAE,GAAGD,EAAK,CAACE,CAAI,EAAGD,CAAU,CACvC,EAAG,CAAC,CAAC,EAEL,OAAO,QAAQF,CAAW,EAAE,QAAQ,CAAC,CAACK,EAAOC,CAAM,IAAMR,EAAG,IAAIS,EAASD,EAAQD,CAAK,CAAC,CAAC,EAExF,IAAMG,EAAgBV,EAAG,QACrB,0EAAoFW,CAAyB,oFACjH,EACMC,EAAcZ,EAAG,QACnB,oEAA8EW,CAAyB,kJAC3G,EACME,EAAmBb,EAAG,kFAE5B,EAEAA,EAAG,YAAY,IAAM,CACjBU,EAAc,IAAI,EAClBE,EAAY,IAAI,EAChBC,EAAiB,IAAI,CACzB,CAAC,EAAE,EAEH,OAAO,KAAKX,CAAW,EAAE,QAASY,GAAcd,EAAG,IAAIe,EAASD,CAAS,CAAC,CAAC,CAC/E,EAcaE,EAAgBhB,GAAiB,CAC1CA,EAAG,IAAI,yFAAyF,EAChGA,EAAG,IACC,+NACJ,EACAA,EAAG,IAAI,6DAA6D,CACxE,EAEaiB,EAAiBjB,GACbA,EAAG,6BAAuC,EAAE,IAAI,EAE9B,IAAKkB,IAAY,CAC5C,GAAIA,EAAE,WAAa,CAAE,UAAWA,EAAE,SAAU,EAC5C,GAAIA,EAAE,OAAS,CAAE,MAAOA,EAAE,KAAM,EAChC,GAAIA,EAAE,GACN,KAAMA,EAAE,IACZ,EAAE,EAKOC,GAAenB,GACXA,EAAG,2BAAqC,EAAE,IAAI,EAEhC,IAAKoB,GAAa,CACzC,IAAMF,EAAIE,EAEV,MAAO,CACH,OAAQC,GAAYH,EAAE,MAAM,EAC5B,aAAcA,EAAE,aAChB,SAAUA,EAAE,SACZ,GAAIA,EAAE,GACN,MAAOA,EAAE,MACT,SAAU,KAAK,MAAMA,EAAE,QAAQ,EAC/B,KAAMA,EAAE,KACR,QAASA,EAAE,QACX,KAAMA,EAAE,KACR,GAAIA,EAAE,MAAQA,EAAE,KAAK,SAAS,IAAMP,GAA6B,CAAE,KAAMO,EAAE,IAAK,EAChF,GAAIA,EAAE,MAAQ,CAAE,KAAMA,EAAE,IAAK,EAC7B,GAAIA,EAAE,WAAa,CAAE,SAAUI,GAAcJ,EAAE,SAAS,CAAE,EAC1D,GAAIA,EAAE,OAAS,CAAE,aAAcA,EAAE,KAAM,CAC3C,CACJ,CAAC,EAKQK,GAAoBvB,GAChBA,EAAG,gCAA0C,EAAE,IAAI,EAE5B,IAAKkB,IAAY,CACjD,GAAIA,EAAE,GACN,KAAMA,EAAE,IACZ,EAAE,EAKAG,GAAeG,GAAkB,CACnC,IAAMC,EAAmBD,EAAM,MAAM,OAAO,EAAE,IAAKE,GAAO,SAASA,EAAG,KAAK,CAAC,CAAC,EAC7E,OAAOD,EAAO,OAAS,EAAIA,EAASA,EAAO,CAAC,CAChD,EAEMH,GAAiBE,GAAkB,CACrC,IAAMC,EAAS,KAAK,MAAMD,CAAK,EAE/B,OAAIC,EAAO,QACPA,EAAO,MAASA,EAAO,MAAmB,IAAKE,GAAc,CACzD,GAAM,CAACC,EAAMF,CAAE,EAAIC,EAAE,MAAM,GAAG,EAC9B,MAAO,CAAE,GAAID,GAAM,CAAE,GAAAA,CAAG,EAAI,KAAAE,CAAK,CACrC,CAAC,GAGEH,CACX,EAEaI,EAAW7B,IACb,CAAE,QAASiB,EAAcjB,CAAE,EAAG,MAAOmB,GAAYnB,CAAE,EAAG,WAAYuB,GAAiBvB,CAAE,CAAE,GE1IlG,OAAS,qBAAA8B,GAAmB,YAAYC,MAAU,KAElD,OAAOC,OAAW,QAClB,OAAOC,OAAQ,KACf,OAAOC,MAAU,OACjB,OAAS,YAAAC,OAAgB,kBACzB,OAAOC,OAAyB,WAOzB,IAAMC,EAAgB,MAAOC,EAAS,YAAc,CACvD,IAAMC,EAAcL,EAAK,KAAKD,GAAG,OAAO,EAAGK,CAAM,EACjD,OAAOP,EAAG,QAAQQ,CAAW,CACjC,EAgBA,eAAsBC,EAAaC,EAAaC,EAAsC,CAClF,IAAMC,EAA2B,CAAC,EAElC,GAAI,CAEA,IAAMC,EAAW,MAAM,IAAI,QAAyB,CAACC,EAASC,IAAW,CACrEC,GACK,IAAIN,EAAMO,GAAQ,CACXA,EAAI,aAAe,IACnBF,EAAO,IAAI,MAAM,gCAAgCE,EAAI,UAAU,IAAIA,EAAI,aAAa,EAAE,CAAC,EAEvFH,EAAQG,CAAG,CAEnB,CAAC,EACA,GAAG,QAAUC,GAAQ,CAClBH,EAAO,IAAI,MAAM,yBAAyBG,EAAI,OAAO,EAAE,CAAC,CAC5D,CAAC,CACT,CAAC,EAGD,aAAM,IAAI,QAAc,CAACJ,EAASC,IAAW,CACzC,IAAMI,EAAcC,GAAS,MAAM,EAC7BC,EAAiC,CAAC,EAExCF,EAAY,GAAG,QAAUG,GAAiB,CACtC,IAAMC,GAAgB,SAAY,CAC9B,IAAMC,EAAWC,EAAK,KAAKd,EAAWW,EAAM,IAAI,EAEhD,GAAIA,EAAM,OAAS,YAEf,MAAMI,EAAG,MAAMF,EAAU,CAAE,UAAW,EAAK,CAAC,EAC5CF,EAAM,UAAU,MACb,CAEH,IAAMK,EAAMF,EAAK,QAAQD,CAAQ,EACjC,MAAME,EAAG,MAAMC,EAAK,CAAE,UAAW,EAAK,CAAC,EAGvC,IAAMC,EAAcC,GAAkBL,CAAQ,EAC9C,MAAMM,GAASR,EAAOM,CAAW,EACjChB,EAAe,KAAKY,CAAQ,CAChC,CACJ,GAAG,EAEHH,EAAc,KAAKE,CAAY,CACnC,CAAC,EAEDJ,EAAY,GAAG,SAAU,SAAY,CACjC,GAAI,CAEA,MAAM,QAAQ,IAAIE,CAAa,EAC/BP,EAAQ,CACZ,OAASiB,EAAO,CACZhB,EAAOgB,CAAK,CAChB,CACJ,CAAC,EAEDZ,EAAY,GAAG,QAAUY,GAAU,CAC/BhB,EAAO,IAAI,MAAM,4BAA4BgB,EAAM,OAAO,EAAE,CAAC,CACjE,CAAC,EAGDlB,EAAS,KAAKM,CAAW,CAC7B,CAAC,EAEMP,CACX,OAASmB,EAAY,CACjB,MAAM,IAAI,MAAM,yBAAyBA,EAAM,OAAO,EAAE,CAC5D,CACJ,CCrGA,OAAS,UAAAC,OAAc,SAEvB,OAAOC,OAAW,QAClB,OAAOC,OAAa,UACpB,OAAS,OAAAC,GAAK,mBAAAC,OAAuB,MAS9B,IAAMC,EAAW,CAACC,EAAkBC,EAAkCC,EAAmB,KAAc,CAC1G,IAAMC,EAAM,IAAIN,GAAIG,CAAQ,EAC5B,CACI,IAAMI,EAAS,IAAIN,GAEnB,OAAO,QAAQG,CAAW,EAAE,QAAQ,CAAC,CAACI,EAAKC,CAAK,IAAM,CAClDF,EAAO,OAAOC,EAAKC,EAAM,SAAS,CAAC,CACvC,CAAC,EAEGJ,GACAE,EAAO,OAAO,UAAWR,GAAQ,IAAI,eAAgB,EAGzDO,EAAI,OAASC,EAAO,SAAS,CACjC,CAEA,OAAOD,CACX,EASaI,EAAoDJ,GACtD,IAAI,QAAQ,CAACK,EAASC,IAAW,CACpCd,GACK,IAAIQ,EAAMO,GAAyB,CAChC,IAAMC,EAAcD,EAAI,QAAQ,cAAc,GAAK,GAC7CE,EAAuB,CAAC,EAE9BF,EAAI,GAAG,OAASG,GAAkB,CAC9BD,EAAW,KAAKC,CAAK,CACzB,CAAC,EAEDH,EAAI,GAAG,MAAO,IAAM,CAChB,IAAMI,EAAWpB,GAAO,OAAOkB,CAAU,EAEzC,GAAID,EAAY,SAAS,kBAAkB,EACvC,GAAI,CACA,IAAMI,EAAO,KAAK,MAAMD,EAAS,SAAS,OAAO,CAAC,EAClDN,EAAQO,CAAI,CAChB,OAASC,EAAY,CACjBP,EAAO,IAAI,MAAM,yBAAyBO,EAAM,OAAO,EAAE,CAAC,CAC9D,MAEAR,EAAQM,CAAa,CAE7B,CAAC,CACL,CAAC,EACA,GAAG,QAAUE,GAAU,CACpBP,EAAO,IAAI,MAAM,yBAAyBO,EAAM,OAAO,EAAE,CAAC,CAC9D,CAAC,CACT,CAAC,ECpEL,OAAOC,OAAU,OACjB,OAAOC,OAAa,UAEpB,IAAMC,GAAgB,CAAC,gBAAiB,cAAe,iBAAiB,EAM3DC,EAAuB,IAAM,CACtC,IAAMC,EAAuB,CACzB,oCACA,6BACA,iBACJ,EAAE,OAAQC,GAAQ,CAACJ,GAAQ,IAAII,CAAG,CAAC,EAEnC,GAAID,EAAqB,OACrB,MAAM,IAAI,MAAM,GAAGA,EAAqB,KAAK,IAAI,CAAC,gCAAgC,CAE1F,EAOaE,EAA8BC,GAA+B,CACtE,IAAMC,EAAmB,IAAI,IAAID,EAAiB,IAAKE,GAAcT,GAAK,SAASS,CAAS,EAAE,YAAY,CAAC,CAAC,EAC5G,OAAOP,GAAc,MAAOQ,GAAUF,EAAiB,IAAIE,EAAM,YAAY,CAAC,CAAC,CACnF,ETHA,IAAMC,EAAoBC,GAAwB,CAC9C,IAAMC,EAAM,IAAIC,EAAIF,CAAW,EAC/B,OAAAC,EAAI,SAAW,QAERA,EAAI,SAAS,CACxB,EA2BaE,GAAkB,MAC3BC,EACAC,IAC0C,CAC1CC,EAAqB,EAErB,IAAML,EAAMM,EAAS,GAAGC,EAAQ,IAAI,0BAA0B,IAAIJ,CAAE,GAAI,CACpE,eAAgBC,GAAS,cAAgB,GAAG,SAAS,EACrD,eAAgBA,GAAS,cAAgB,GAAG,SAAS,CACzD,CAAC,EAEDI,EAAO,KAAK,kCAAkCR,EAAI,SAAS,CAAC,EAAE,EAE9D,GAAI,CACA,IAAMS,EAAY,MAAMC,EAASV,CAAG,EACpC,MAAO,CACH,aAAcS,EAAS,cACvB,gBAAiBX,EAAiBW,EAAS,iBAAiB,EAC5D,GAAIA,EAAS,mBAAqB,CAAE,gBAAiBX,EAAiBW,EAAS,iBAAiB,CAAE,EAClG,GAAIA,EAAS,mBAAqB,CAAE,aAAcA,EAAS,aAAc,CAC7E,CACJ,OAASE,EAAY,CACjB,MAAM,IAAI,MAAM,iCAAiCA,EAAM,OAAO,EAAE,CACpE,CACJ,EA4BaC,GAAe,MAAOT,EAAYC,IAAkD,CAC7FI,EAAO,KAAK,gBAAgBL,CAAE,IAAI,KAAK,UAAUC,CAAO,CAAC,EAAE,EAE3D,IAAMS,EAAY,MAAMC,EAAc,sBAAsB,EAEtDC,EAA+CX,GAAS,cAAiB,MAAMF,GAAgBC,CAAE,EACjG,CAAC,CAACa,CAAY,EAAG,CAACC,CAAa,EAAI,CAAC,CAAC,EAAgB,MAAM,QAAQ,IAAI,CACzEC,EAAaH,EAAa,gBAAiBF,CAAS,EACpD,GAAIE,EAAa,gBAAkB,CAACG,EAAaH,EAAa,gBAAiBF,CAAS,CAAC,EAAI,CAAC,CAClG,CAAC,EACKM,EAASC,EAAK,KAAKP,EAAW,SAAS,EAEvCQ,EAAS,IAAIC,EAASH,CAAM,EAElC,GAAI,CACAX,EAAO,KAAK,iBAAiB,EAC7B,MAAMe,EAAiBF,CAAM,EAEzBJ,GACAT,EAAO,KAAK,yBAAyBS,CAAa,OAAOD,CAAY,EAAE,EACvE,MAAMQ,EAAaH,EAAQL,EAAcC,CAAa,IAEtDT,EAAO,KAAK,2BAA2BQ,CAAY,EAAE,EACrD,MAAMS,EAAcJ,EAAQL,CAAY,GAG5C,GAAM,CAAE,IAAKU,CAAU,EAAIN,EAAK,MAAMhB,EAAQ,WAAW,IAAI,EAE7D,GAAIsB,IAAc,QAAS,CACvB,IAAMC,EAAS,MAAMC,EAAYP,CAAM,EACvC,MAAM,IAAI,KAAKjB,EAAQ,WAAW,IAAI,EAAE,MAAM,KAAK,UAAUuB,EAAQ,KAAM,CAAC,CAAC,CACjF,CAEAN,EAAO,MAAM,GAETK,IAAc,OAASA,IAAc,YACrC,MAAMG,EAAG,OAAOV,EAAQf,EAAQ,WAAW,IAAI,EAGnD,MAAMyB,EAAG,GAAGhB,EAAW,CAAE,UAAW,EAAK,CAAC,CAC9C,QAAE,CACEQ,EAAO,MAAM,CACjB,CAEA,OAAOjB,EAAQ,WAAW,IAC9B,EAqBa0B,GAAoB,MAAOC,EAAkB,IAAiD,CACvG1B,EAAqB,EAErB,IAAML,EAAMM,EAASC,EAAQ,IAAI,kCAA6C,CAAE,QAASwB,EAAQ,SAAS,CAAE,CAAC,EAE7GvB,EAAO,KAAK,mDAAmDR,EAAI,SAAS,CAAC,EAAE,EAE/E,GAAI,CACA,IAAMS,EAAgC,MAAMC,EAASV,CAAG,EACxD,MAAO,CAAE,IAAKS,EAAS,UAAW,QAASA,EAAS,OAAQ,CAChE,OAASE,EAAY,CACjB,MAAM,IAAI,MAAM,gCAAgCA,EAAM,OAAO,EAAE,CACnE,CACJ,EAiBaqB,GAAeC,GAAmB,CAC3C,GAAM,CAAE,OAAAC,CAAO,EAAI,IAAIjC,EAAIM,EAAQ,IAAI,iCAAkC,EACzE,MAAO,GAAG2B,CAAM,WAAWD,CAAM,MACrC,EA4BaE,GAAyB,MAAO/B,GAAoD,CAC7FI,EAAO,KAAK,0BAA0B,KAAK,UAAUJ,CAAO,CAAC,EAAE,EAE/D,IAAMS,EAAY,MAAMC,EAAc,wBAAwB,EAExDsB,EACFhC,EAAQ,gBAAmB,MAAM0B,GAAkB,CAA+B,EAEtFtB,EAAO,KAAK,qCAAqC,KAAK,UAAU4B,CAAc,CAAC,EAAE,EACjF,IAAMC,EAAyB,MAAMnB,EAAapB,EAAiBsC,EAAe,GAAG,EAAGvB,CAAS,EAIjG,GAFAL,EAAO,KAAK,4BAA4B6B,EAAa,SAAS,CAAC,EAAE,EAE7D,CAACC,EAA2BD,CAAY,EACxC,MAAA7B,EAAO,MAAM,sCAAsC6B,EAAa,SAAS,CAAC,EAAE,EACtE,IAAI,MAAM,4BAA4B,EAGhD,IAAMlB,EAASC,EAAK,KAAKP,EAAW,WAAW,EAEzCQ,EAAS,IAAIC,EAASH,CAAM,EAElC,GAAI,CACAX,EAAO,KAAK,iBAAiB,EAC7B,MAAMe,EAAmBF,CAAM,EAE/Bb,EAAO,KAAK,8BAA8B,EAC1C,MAAM+B,EAA2BlB,EAAQgB,CAAY,EAErD,GAAM,CAAE,IAAKX,CAAU,EAAIN,EAAK,MAAMhB,EAAQ,WAAW,IAAI,EAE7D,GAAIsB,IAAc,QAAS,CACvB,IAAMC,EAAS,MAAMC,EAAcP,CAAM,EACzC,MAAM,IAAI,KAAKjB,EAAQ,WAAW,IAAI,EAAE,MAAM,KAAK,UAAUuB,EAAQ,KAAM,CAAC,CAAC,CACjF,CAEAN,EAAO,MAAM,GAETK,IAAc,OAASA,IAAc,YACrC,MAAMG,EAAG,OAAOV,EAAQf,EAAQ,WAAW,IAAI,EAGnD,MAAMyB,EAAG,GAAGhB,EAAW,CAAE,UAAW,EAAK,CAAC,CAC9C,QAAE,CACEQ,EAAO,MAAM,CACjB,CAEA,OAAOjB,EAAQ,WAAW,IAC9B,EAqBaoC,GAAU,MAAOrC,GAAkC,CAC5D,IAAMU,EAAY,MAAMC,EAAc,qBAAqB,EACrD2B,EAAa,MAAM7B,GAAaT,EAAI,CAAE,WAAY,CAAE,KAAMiB,EAAK,KAAKP,EAAW,GAAGV,CAAE,OAAO,CAAE,CAAE,CAAC,EAEhGuC,EAAiB,MAAM,IAAI,KAAKD,CAAU,EAAE,KAAK,EACvD,aAAMZ,EAAG,GAAGhB,EAAW,CAAE,UAAW,EAAK,CAAC,EAEnC6B,CACX","names":["Database","fs","path","process","URL","SILENT_LOGGER","logger","setLogger","newLogger","escapeAsSqlString","v","getInternalTables","db","dbName","isValidIdent","name","tableExpr","c","MAIN_DB_ALIAS","attachDB","dbFile","alias","escapedPath","buildPagePatchQuery","patchAlias","tableName","aslAlias","updatePageColumn","updateTitleColumn","columnName","buildTitlePatchQuery","detachDB","alias","updatePageColumn","columnName","aslAlias","patchAlias","PATCH_DB_ALIAS","ASL_DB_ALIAS","buildCopyStatements","patchTables","aslTables","table","fields","patchQuery","statements","t","copyStatement","applyPatches","db","aslDB","patchDB","attachDB","getInternalTables","logger","pageStatements","buildPagePatchQuery","titleStatements","buildTitlePatchQuery","allStatements","sql","stmt","detachDB","copyTableData","tables","titleInsert","pageInsert","createTables","getAllPages","row","content","id","number","page","part","getAllTitles","r","getData","path","UNKNOWN_VALUE_PLACEHOLDER","copyForeignMasterTableData","db","sourceTables","aliasToPath","acc","tablePath","name","path","alias","dbPath","attachDB","insertAuthors","UNKNOWN_VALUE_PLACEHOLDER","insertBooks","insertCategories","statement","detachDB","createTables","getAllAuthors","r","getAllBooks","row","parseAuthor","parsePdfLinks","getAllCategories","value","result","id","f","file","getData","createWriteStream","fs","https","os","path","pipeline","unzipper","createTempDir","prefix","tempDirBase","unzipFromUrl","url","outputDir","extractedFiles","response","resolve","reject","https","res","err","unzipStream","unzipper","entryPromises","entry","entryPromise","filePath","path","fs","dir","writeStream","createWriteStream","pipeline","error","Buffer","https","process","URL","URLSearchParams","buildUrl","endpoint","queryParams","useAuth","url","params","key","value","httpsGet","resolve","reject","res","contentType","dataChunks","chunk","fullData","json","error","path","process","SOURCE_TABLES","validateEnvVariables","envVariablesNotFound","key","validateMasterSourceTables","sourceTablePaths","sourceTableNames","tablePath","table","fixHttpsProtocol","originalUrl","url","URL","getBookMetadata","id","options","validateEnvVariables","buildUrl","process","logger","response","httpsGet","error","downloadBook","outputDir","createTempDir","bookResponse","bookDatabase","patchDatabase","unzipFromUrl","dbPath","path","client","Database","createTables","applyPatches","copyTableData","extension","result","getData","fs","getMasterMetadata","version","getCoverUrl","bookId","origin","downloadMasterDatabase","masterResponse","sourceTables","validateMasterSourceTables","copyForeignMasterTableData","getBook","outputPath","data"]}
|
|
1
|
+
{"version":3,"sources":["../src/api.ts","../src/db/book.ts","../src/utils/logger.ts","../src/db/master.ts","../src/db/queryBuilder.ts","../src/utils/constants.ts","../src/utils/io.ts","../src/utils/network.ts","../src/utils/validation.ts","../src/content.ts"],"sourcesContent":["import { Database } from 'bun:sqlite';\nimport { promises as fs } from 'node:fs';\nimport path from 'node:path';\nimport process from 'node:process';\nimport { URL } from 'node:url';\n\nimport { applyPatches, copyTableData, createTables as createBookTables, getData as getBookData } from './db/book.js';\nimport {\n copyForeignMasterTableData,\n createTables as createMasterTables,\n getData as getMasterData,\n} from './db/master.js';\nimport type {\n BookData,\n DownloadBookOptions,\n DownloadMasterOptions,\n GetBookMetadataOptions,\n GetBookMetadataResponsePayload,\n GetMasterMetadataResponsePayload,\n} from './types.js';\nimport { DEFAULT_MASTER_METADATA_VERSION } from './utils/constants.js';\nimport { createTempDir, unzipFromUrl } from './utils/io.js';\nimport logger from './utils/logger.js';\nimport { buildUrl, httpsGet } from './utils/network.js';\nimport { validateEnvVariables, validateMasterSourceTables } from './utils/validation.js';\n\nconst fixHttpsProtocol = (originalUrl: string) => {\n const url = new URL(originalUrl);\n url.protocol = 'https';\n\n return url.toString();\n};\n\ntype BookUpdatesResponse = {\n major_release: number;\n major_release_url: string;\n minor_release?: number;\n minor_release_url?: string;\n};\n\n/**\n * Retrieves metadata for a specific book from the Shamela API.\n *\n * This function fetches book release information including major and minor release\n * URLs and version numbers from the Shamela web service.\n *\n * @param id - The unique identifier of the book to fetch metadata for\n * @param options - Optional parameters for specifying major and minor versions\n * @returns A promise that resolves to book metadata including release URLs and versions\n *\n * @throws {Error} When environment variables are not set or API request fails\n *\n * @example\n * ```typescript\n * const metadata = await getBookMetadata(123, { majorVersion: 1, minorVersion: 2 });\n * console.log(metadata.majorReleaseUrl); // Download URL for the book\n * ```\n */\nexport const getBookMetadata = async (\n id: number,\n options?: GetBookMetadataOptions,\n): Promise<GetBookMetadataResponsePayload> => {\n validateEnvVariables();\n\n const url = buildUrl(`${process.env.SHAMELA_API_BOOKS_ENDPOINT}/${id}`, {\n major_release: (options?.majorVersion || 0).toString(),\n minor_release: (options?.minorVersion || 0).toString(),\n });\n\n logger.info(`Fetching shamela.ws book link: ${url.toString()}`);\n\n try {\n const response = (await httpsGet(url)) as BookUpdatesResponse;\n return {\n majorRelease: response.major_release,\n majorReleaseUrl: fixHttpsProtocol(response.major_release_url),\n ...(response.minor_release_url && { minorReleaseUrl: fixHttpsProtocol(response.minor_release_url) }),\n ...(response.minor_release_url && { minorRelease: response.minor_release }),\n };\n } catch (error: any) {\n throw new Error(`Error fetching book metadata: ${error.message}`);\n }\n};\n\n/**\n * Downloads and processes a book from the Shamela database.\n *\n * This function downloads the book's database files, applies patches if available,\n * creates the necessary database tables, and exports the data to the specified format.\n * The output can be either a JSON file or a SQLite database file.\n *\n * @param id - The unique identifier of the book to download\n * @param options - Configuration options including output file path and optional book metadata\n * @returns A promise that resolves to the path of the created output file\n *\n * @throws {Error} When download fails, database operations fail, or file operations fail\n *\n * @example\n * ```typescript\n * // Download as JSON\n * const jsonPath = await downloadBook(123, {\n * outputFile: { path: './book.json' }\n * });\n *\n * // Download as SQLite database\n * const dbPath = await downloadBook(123, {\n * outputFile: { path: './book.db' }\n * });\n * ```\n */\nexport const downloadBook = async (id: number, options: DownloadBookOptions): Promise<string> => {\n logger.info(`downloadBook ${id} ${JSON.stringify(options)}`);\n\n const outputDir = await createTempDir('shamela_downloadBook');\n\n const bookResponse: GetBookMetadataResponsePayload = options?.bookMetadata || (await getBookMetadata(id));\n const [[bookDatabase], [patchDatabase] = []]: string[][] = await Promise.all([\n unzipFromUrl(bookResponse.majorReleaseUrl, outputDir),\n ...(bookResponse.minorReleaseUrl ? [unzipFromUrl(bookResponse.minorReleaseUrl, outputDir)] : []),\n ]);\n const dbPath = path.join(outputDir, 'book.db');\n\n const client = new Database(dbPath);\n\n try {\n logger.info(`Creating tables`);\n await createBookTables(client);\n\n if (patchDatabase) {\n logger.info(`Applying patches from ${patchDatabase} to ${bookDatabase}`);\n await applyPatches(client, bookDatabase, patchDatabase);\n } else {\n logger.info(`Copying table data from ${bookDatabase}`);\n await copyTableData(client, bookDatabase);\n }\n\n const { ext: extension } = path.parse(options.outputFile.path);\n\n if (extension === '.json') {\n const result = await getBookData(client);\n await Bun.file(options.outputFile.path).write(JSON.stringify(result, null, 2));\n }\n\n client.close();\n\n if (extension === '.db' || extension === '.sqlite') {\n await fs.rename(dbPath, options.outputFile.path);\n }\n\n await fs.rm(outputDir, { recursive: true });\n } finally {\n client.close();\n }\n\n return options.outputFile.path;\n};\n\n/**\n * Retrieves metadata for the master database from the Shamela API.\n *\n * The master database contains information about all books, authors, and categories\n * in the Shamela library. This function fetches the download URL and version\n * information for the master database patches.\n *\n * @param version - The version number to check for updates (defaults to 0)\n * @returns A promise that resolves to master database metadata including download URL and version\n *\n * @throws {Error} When environment variables are not set or API request fails\n *\n * @example\n * ```typescript\n * const masterMetadata = await getMasterMetadata(5);\n * console.log(masterMetadata.url); // URL to download master database patch\n * console.log(masterMetadata.version); // Latest version number\n * ```\n */\nexport const getMasterMetadata = async (version: number = 0): Promise<GetMasterMetadataResponsePayload> => {\n validateEnvVariables();\n\n const url = buildUrl(process.env.SHAMELA_API_MASTER_PATCH_ENDPOINT as string, { version: version.toString() });\n\n logger.info(`Fetching shamela.ws master database patch link: ${url.toString()}`);\n\n try {\n const response: Record<string, any> = await httpsGet(url);\n return { url: response.patch_url, version: response.version };\n } catch (error: any) {\n throw new Error(`Error fetching master patch: ${error.message}`);\n }\n};\n\n/**\n * Generates the URL for a book's cover image.\n *\n * This function constructs the URL to access the cover image for a specific book\n * using the book's ID and the API endpoint host.\n *\n * @param bookId - The unique identifier of the book\n * @returns The complete URL to the book's cover image\n *\n * @example\n * ```typescript\n * const coverUrl = getCoverUrl(123);\n * console.log(coverUrl); // \"https://api.shamela.ws/covers/123.jpg\"\n * ```\n */\nexport const getCoverUrl = (bookId: number) => {\n const { origin } = new URL(process.env.SHAMELA_API_MASTER_PATCH_ENDPOINT!);\n return `${origin}/covers/${bookId}.jpg`;\n};\n\n/**\n * Downloads and processes the master database from the Shamela service.\n *\n * The master database contains comprehensive information about all books, authors,\n * and categories available in the Shamela library. This function downloads the\n * database files, creates the necessary tables, and exports the data in the\n * specified format (JSON or SQLite).\n *\n * @param options - Configuration options including output file path and optional master metadata\n * @returns A promise that resolves to the path of the created output file\n *\n * @throws {Error} When download fails, expected tables are missing, database operations fail, or file operations fail\n *\n * @example\n * ```typescript\n * // Download master database as JSON\n * const jsonPath = await downloadMasterDatabase({\n * outputFile: { path: './master.json' }\n * });\n *\n * // Download master database as SQLite\n * const dbPath = await downloadMasterDatabase({\n * outputFile: { path: './master.db' }\n * });\n * ```\n */\nexport const downloadMasterDatabase = async (options: DownloadMasterOptions): Promise<string> => {\n logger.info(`downloadMasterDatabase ${JSON.stringify(options)}`);\n\n const outputDir = await createTempDir('shamela_downloadMaster');\n\n const masterResponse: GetMasterMetadataResponsePayload =\n options.masterMetadata || (await getMasterMetadata(DEFAULT_MASTER_METADATA_VERSION));\n\n logger.info(`Downloading master database from: ${JSON.stringify(masterResponse)}`);\n const sourceTables: string[] = await unzipFromUrl(fixHttpsProtocol(masterResponse.url), outputDir);\n\n logger.info(`sourceTables downloaded: ${sourceTables.toString()}`);\n\n if (!validateMasterSourceTables(sourceTables)) {\n logger.error(`Some source tables were not found: ${sourceTables.toString()}`);\n throw new Error('Expected tables not found!');\n }\n\n const dbPath = path.join(outputDir, 'master.db');\n\n const client = new Database(dbPath);\n\n try {\n logger.info(`Creating tables`);\n await createMasterTables(client);\n\n logger.info(`Copying data to master table`);\n await copyForeignMasterTableData(client, sourceTables);\n\n const { ext: extension } = path.parse(options.outputFile.path);\n\n if (extension === '.json') {\n const result = await getMasterData(client);\n await Bun.file(options.outputFile.path).write(JSON.stringify(result, null, 2));\n }\n\n client.close();\n\n if (extension === '.db' || extension === '.sqlite') {\n await fs.rename(dbPath, options.outputFile.path);\n }\n\n await fs.rm(outputDir, { recursive: true });\n } finally {\n client.close();\n }\n\n return options.outputFile.path;\n};\n\n/**\n * Retrieves complete book data including pages and titles.\n *\n * This is a convenience function that downloads a book's data and returns it\n * as a structured JavaScript object. The function handles the temporary file\n * creation and cleanup automatically.\n *\n * @param id - The unique identifier of the book to retrieve\n * @returns A promise that resolves to the complete book data including pages and titles\n *\n * @throws {Error} When download fails, file operations fail, or JSON parsing fails\n *\n * @example\n * ```typescript\n * const bookData = await getBook(123);\n * console.log(bookData.pages.length); // Number of pages in the book\n * console.log(bookData.titles?.length); // Number of title entries\n * ```\n */\nexport const getBook = async (id: number): Promise<BookData> => {\n const outputDir = await createTempDir('shamela_getBookData');\n const outputPath = await downloadBook(id, { outputFile: { path: path.join(outputDir, `${id}.json`) } });\n\n const data: BookData = await Bun.file(outputPath).json();\n await fs.rm(outputDir, { recursive: true });\n\n return data;\n};\n","import { Database } from 'bun:sqlite';\n\nimport type { BookData, Page, Title } from '../types';\nimport logger from '../utils/logger';\nimport { Tables } from './types';\n\ntype Row = Record<string, any> & { is_deleted?: string };\n\nconst PATCH_NOOP_VALUE = '#';\n\n/**\n * Retrieves column information for a specified table.\n * @param db - The database instance\n * @param table - The table name to get info for\n * @returns Array of column information with name and type\n */\nconst getTableInfo = (db: Database, table: Tables) => {\n return db.query(`PRAGMA table_info(${table})`).all() as { name: string; type: string }[];\n};\n\n/**\n * Checks if a table exists in the database.\n * @param db - The database instance\n * @param table - The table name to check\n * @returns True if the table exists, false otherwise\n */\nconst hasTable = (db: Database, table: Tables): boolean => {\n const result = db.query(`SELECT name FROM sqlite_master WHERE type='table' AND name = ?1`).get(table) as\n | { name: string }\n | undefined;\n return Boolean(result);\n};\n\n/**\n * Reads all rows from a specified table.\n * @param db - The database instance\n * @param table - The table name to read from\n * @returns Array of rows, or empty array if table doesn't exist\n */\nconst readRows = (db: Database, table: Tables): Row[] => {\n if (!hasTable(db, table)) {\n return [];\n }\n\n return db.query(`SELECT * FROM ${table}`).all() as Row[];\n};\n\n/**\n * Checks if a row is marked as deleted.\n * @param row - The row to check\n * @returns True if the row has is_deleted field set to '1', false otherwise\n */\nconst isDeleted = (row: Row): boolean => {\n return String(row.is_deleted) === '1';\n};\n\n/**\n * Merges values from a base row and patch row, with patch values taking precedence.\n * @param baseRow - The original row data (can be undefined)\n * @param patchRow - The patch row data with updates (can be undefined)\n * @param columns - Array of column names to merge\n * @returns Merged row with combined values\n */\nconst mergeRowValues = (baseRow: Row | undefined, patchRow: Row | undefined, columns: string[]): Row => {\n const merged: Row = {};\n\n for (const column of columns) {\n if (column === 'id') {\n merged.id = (patchRow ?? baseRow)?.id ?? null;\n continue;\n }\n\n if (patchRow && Object.hasOwn(patchRow, column)) {\n const value = patchRow[column];\n\n if (value !== PATCH_NOOP_VALUE && value !== null && value !== undefined) {\n merged[column] = value;\n continue;\n }\n }\n\n if (baseRow && Object.hasOwn(baseRow, column)) {\n merged[column] = baseRow[column];\n continue;\n }\n\n merged[column] = null;\n }\n\n return merged;\n};\n\n/**\n * Merges arrays of base rows and patch rows, handling deletions and updates.\n * @param baseRows - Original rows from the base database\n * @param patchRows - Patch rows containing updates, additions, and deletions\n * @param columns - Array of column names to merge\n * @returns Array of merged rows with patches applied\n */\nconst mergeRows = (baseRows: Row[], patchRows: Row[], columns: string[]): Row[] => {\n const baseIds = new Set<string>();\n const patchById = new Map<string, Row>();\n\n for (const row of baseRows) {\n baseIds.add(String(row.id));\n }\n\n for (const row of patchRows) {\n patchById.set(String(row.id), row);\n }\n\n const merged: Row[] = [];\n\n for (const baseRow of baseRows) {\n const patchRow = patchById.get(String(baseRow.id));\n\n if (patchRow && isDeleted(patchRow)) {\n continue;\n }\n\n merged.push(mergeRowValues(baseRow, patchRow, columns));\n }\n\n for (const row of patchRows) {\n const id = String(row.id);\n\n if (baseIds.has(id) || isDeleted(row)) {\n continue;\n }\n\n merged.push(mergeRowValues(undefined, row, columns));\n }\n\n return merged;\n};\n\n/**\n * Inserts multiple rows into a specified table using a prepared statement.\n * @param db - The database instance\n * @param table - The table name to insert into\n * @param columns - Array of column names\n * @param rows - Array of row data to insert\n */\nconst insertRows = (db: Database, table: Tables, columns: string[], rows: Row[]) => {\n if (rows.length === 0) {\n return;\n }\n\n const placeholders = columns.map(() => '?').join(',');\n const statement = db.prepare(`INSERT INTO ${table} (${columns.join(',')}) VALUES (${placeholders})`);\n\n rows.forEach((row) => {\n const values = columns.map((column) => (column in row ? row[column] : null));\n // Spread the values array instead of passing it directly\n statement.run(...values);\n });\n\n statement.finalize();\n};\n\n/**\n * Ensures the target database has the same table schema as the source database.\n * @param target - The target database to create/update the table in\n * @param source - The source database to copy the schema from\n * @param table - The table name to ensure schema for\n * @returns True if schema was successfully ensured, false otherwise\n */\nconst ensureTableSchema = (target: Database, source: Database, table: Tables) => {\n const row = source.query(`SELECT sql FROM sqlite_master WHERE type='table' AND name = ?1`).get(table) as\n | { sql: string }\n | undefined;\n\n if (!row?.sql) {\n logger.warn(`${table} table definition missing in source database`);\n return false;\n }\n\n target.run(`DROP TABLE IF EXISTS ${table}`);\n target.run(row.sql);\n return true;\n};\n\n/**\n * Copies and patches a table from source to target database, applying patch updates if provided.\n * @param target - The target database to copy/patch the table to\n * @param source - The source database containing the base table data\n * @param patch - Optional patch database containing updates (can be null)\n * @param table - The table name to copy and patch\n */\nconst copyAndPatchTable = (target: Database, source: Database, patch: Database | null, table: Tables) => {\n if (!hasTable(source, table)) {\n logger.warn(`${table} table missing in source database`);\n return;\n }\n\n if (!ensureTableSchema(target, source, table)) {\n return;\n }\n\n const baseInfo = getTableInfo(source, table);\n const patchInfo = patch && hasTable(patch, table) ? getTableInfo(patch, table) : [];\n\n const columns = baseInfo.map((info) => info.name);\n\n for (const info of patchInfo) {\n if (!columns.includes(info.name)) {\n const columnType = info.type && info.type.length > 0 ? info.type : 'TEXT';\n target.run(`ALTER TABLE ${table} ADD COLUMN ${info.name} ${columnType}`);\n columns.push(info.name);\n }\n }\n\n const baseRows = readRows(source, table);\n const patchRows = patch ? readRows(patch, table) : [];\n\n const mergedRows = mergeRows(baseRows, patchRows, columns);\n\n insertRows(target, table, columns, mergedRows);\n};\n\n/**\n * Applies patches from a patch database to the main database.\n * @param db - The target database to apply patches to\n * @param aslDB - Path to the source ASL database file\n * @param patchDB - Path to the patch database file\n */\nexport const applyPatches = (db: Database, aslDB: string, patchDB: string) => {\n const source = new Database(aslDB);\n const patch = new Database(patchDB);\n\n try {\n db.transaction(() => {\n copyAndPatchTable(db, source, patch, Tables.Page);\n copyAndPatchTable(db, source, patch, Tables.Title);\n })();\n } finally {\n source.close();\n patch.close();\n }\n};\n\n/**\n * Copies table data from a source database without applying any patches.\n * @param db - The target database to copy data to\n * @param aslDB - Path to the source ASL database file\n */\nexport const copyTableData = (db: Database, aslDB: string) => {\n const source = new Database(aslDB);\n\n try {\n db.transaction(() => {\n copyAndPatchTable(db, source, null, Tables.Page);\n copyAndPatchTable(db, source, null, Tables.Title);\n })();\n } finally {\n source.close();\n }\n};\n\n/**\n * Creates the required tables (Page and Title) in the database with their schema.\n * @param db - The database instance to create tables in\n */\nexport const createTables = (db: Database) => {\n db.run(\n `CREATE TABLE ${Tables.Page} (\n id INTEGER,\n content TEXT,\n part TEXT,\n page TEXT,\n number TEXT,\n services TEXT,\n is_deleted TEXT\n )`,\n );\n db.run(\n `CREATE TABLE ${Tables.Title} (\n id INTEGER,\n content TEXT,\n page INTEGER,\n parent INTEGER,\n is_deleted TEXT\n )`,\n );\n};\n\n/**\n * Retrieves all pages from the Page table.\n * @param db - The database instance\n * @returns Array of all pages\n */\nexport const getAllPages = (db: Database) => {\n return db.query(`SELECT * FROM ${Tables.Page}`).all() as Page[];\n};\n\n/**\n * Retrieves all titles from the Title table.\n * @param db - The database instance\n * @returns Array of all titles\n */\nexport const getAllTitles = (db: Database) => {\n return db.query(`SELECT * FROM ${Tables.Title}`).all() as Title[];\n};\n\n/**\n * Retrieves all book data including pages and titles.\n * @param db - The database instance\n * @returns Object containing arrays of pages and titles\n */\nexport const getData = (db: Database): BookData => {\n return { pages: getAllPages(db), titles: getAllTitles(db) };\n};\n","type LogFunction = (...args: unknown[]) => void;\n\ninterface Logger {\n debug: LogFunction;\n error: LogFunction;\n info: LogFunction;\n warn: LogFunction;\n}\n\nconst SILENT_LOGGER = { debug: () => {}, error: () => {}, info: () => {}, warn: () => {} };\nlet logger: Logger = SILENT_LOGGER;\n\nexport const setLogger = (newLogger: Logger = SILENT_LOGGER) => {\n if (!newLogger.debug || !newLogger.error || !newLogger.info) {\n throw new Error('Logger must implement debug, error, and info methods');\n }\n\n logger = newLogger;\n};\n\nexport { logger as default };\n","import type { Database } from 'bun:sqlite';\nimport path from 'node:path';\n\nimport type { Author, Book, Category, MasterData } from '../types';\nimport { attachDB, detachDB } from './queryBuilder';\nimport { Tables } from './types';\n\n/**\n * Ensures the target database has the same table schema as the source database for a specific table.\n * @param db - The database instance\n * @param alias - The alias name of the attached database\n * @param table - The table name to ensure schema for\n * @throws {Error} When table definition is missing in the source database\n */\nconst ensureTableSchema = (db: Database, alias: string, table: Tables) => {\n const row = db.query(`SELECT sql FROM ${alias}.sqlite_master WHERE type='table' AND name = ?1`).get(table) as\n | { sql: string }\n | undefined;\n\n if (!row?.sql) {\n throw new Error(`Missing table definition for ${table} in ${alias}`);\n }\n\n db.run(`DROP TABLE IF EXISTS ${table}`);\n db.run(row.sql);\n};\n\n/**\n * Copies data from foreign master table files into the main master database.\n *\n * This function processes the source table files (author.sqlite, book.sqlite, category.sqlite)\n * by attaching them to the current database connection, then copying their data into\n * the main master database tables. It handles data transformation including filtering\n * out deleted records and converting placeholder values.\n *\n * @param db - The database client instance for the master database\n * @param sourceTables - Array of file paths to the source SQLite table files\n *\n * @throws {Error} When source files cannot be attached or data copying operations fail\n */\nexport const copyForeignMasterTableData = (db: Database, sourceTables: string[]) => {\n const aliasToPath: Record<string, string> = {};\n\n for (const tablePath of sourceTables) {\n const { name } = path.parse(tablePath);\n aliasToPath[name] = tablePath;\n }\n\n Object.entries(aliasToPath).forEach(([alias, dbPath]) => db.run(attachDB(dbPath, alias)));\n\n ensureTableSchema(db, Tables.Authors, Tables.Authors);\n ensureTableSchema(db, Tables.Books, Tables.Books);\n ensureTableSchema(db, Tables.Categories, Tables.Categories);\n\n const insertAuthors = db.prepare(`INSERT INTO ${Tables.Authors} SELECT * FROM ${Tables.Authors}.${Tables.Authors}`);\n const insertBooks = db.prepare(`INSERT INTO ${Tables.Books} SELECT * FROM ${Tables.Books}.${Tables.Books}`);\n const insertCategories = db.prepare(\n `INSERT INTO ${Tables.Categories} SELECT * FROM ${Tables.Categories}.${Tables.Categories}`,\n );\n\n db.transaction(() => {\n insertAuthors.run();\n insertBooks.run();\n insertCategories.run();\n })();\n\n Object.keys(aliasToPath).forEach((statement) => {\n db.run(detachDB(statement));\n });\n};\n\n/**\n * Creates a backward-compatible database view for legacy table names.\n * @param db - The database instance\n * @param viewName - The name of the view to create\n * @param sourceTable - The source table to base the view on\n */\nconst createCompatibilityView = (db: Database, viewName: string, sourceTable: Tables) => {\n db.run(`DROP VIEW IF EXISTS ${viewName}`);\n db.run(`CREATE VIEW ${viewName} AS SELECT * FROM ${sourceTable}`);\n};\n\n/**\n * Creates the necessary database tables for the master database.\n *\n * This function sets up the schema for the master database by creating\n * tables for authors, books, and categories with their respective columns\n * and data types. This is typically the first step in setting up a new\n * master database. Also creates backward-compatible views for legacy table names.\n *\n * @param db - The database client instance where tables should be created\n *\n * @throws {Error} When table creation fails due to database constraints or permissions\n */\nexport const createTables = (db: Database) => {\n db.run(\n `CREATE TABLE ${Tables.Authors} (\n id INTEGER,\n is_deleted TEXT,\n name TEXT,\n biography TEXT,\n death_text TEXT,\n death_number TEXT\n )`,\n );\n db.run(\n `CREATE TABLE ${Tables.Books} (\n id INTEGER,\n name TEXT,\n is_deleted TEXT,\n category TEXT,\n type TEXT,\n date TEXT,\n author TEXT,\n printed TEXT,\n minor_release TEXT,\n major_release TEXT,\n bibliography TEXT,\n hint TEXT,\n pdf_links TEXT,\n metadata TEXT\n )`,\n );\n db.run(\n `CREATE TABLE ${Tables.Categories} (\n id INTEGER,\n is_deleted TEXT,\n \"order\" TEXT,\n name TEXT\n )`,\n );\n\n // Provide backward-compatible pluralised views since callers historically\n // queried \"authors\", \"books\", and \"categories\" tables.\n createCompatibilityView(db, 'authors', Tables.Authors);\n createCompatibilityView(db, 'books', Tables.Books);\n createCompatibilityView(db, 'categories', Tables.Categories);\n};\n\n/**\n * Retrieves all authors from the Authors table.\n * @param db - The database instance\n * @returns Array of all authors\n */\nexport const getAllAuthors = (db: Database) => {\n return db.query(`SELECT * FROM ${Tables.Authors}`).all() as Author[];\n};\n\n/**\n * Retrieves all books from the Books table.\n * @param db - The database instance\n * @returns Array of all books\n */\nexport const getAllBooks = (db: Database) => {\n return db.query(`SELECT * FROM ${Tables.Books}`).all() as Book[];\n};\n\n/**\n * Retrieves all categories from the Categories table.\n * @param db - The database instance\n * @returns Array of all categories\n */\nexport const getAllCategories = (db: Database) => {\n return db.query(`SELECT * FROM ${Tables.Categories}`).all() as Category[];\n};\n\n/**\n * Retrieves all master data including authors, books, and categories.\n * @param db - The database instance\n * @returns Object containing arrays of authors, books, and categories\n */\nexport const getData = (db: Database) => {\n return { authors: getAllAuthors(db), books: getAllBooks(db), categories: getAllCategories(db) } as MasterData;\n};\n","/**\n * Generates SQL to attach a database file with an alias.\n * @param {string} dbFile - Path to the database file to attach\n * @param {string} alias - Alias name for the attached database\n * @returns {string} SQL ATTACH DATABASE statement\n */\nexport const attachDB = (dbFile: string, alias: string) => {\n const escapedPath = dbFile.replace(/'/g, \"''\");\n if (!/^[a-zA-Z0-9_]+$/.test(alias)) {\n throw new Error('Invalid database alias');\n }\n return `ATTACH DATABASE '${escapedPath}' AS ${alias}`;\n};\n\n/**\n * Generates SQL to create a table with specified fields.\n * @param {string} name - Name of the table to create\n * @param {string[]} fields - Array of field definitions\n * @returns {string} SQL CREATE TABLE statement\n */\nexport const createTable = (name: string, fields: string[]) => {\n if (!/^[a-zA-Z0-9_]+$/.test(name)) {\n throw new Error('Invalid table name');\n }\n fields.forEach((field) => {\n if (field.includes(';') || field.includes('--')) {\n throw new Error('Invalid field definition');\n }\n });\n return `CREATE TABLE IF NOT EXISTS ${name} (${fields.join(', ')})`;\n};\n\n/**\n * Generates SQL to detach a database by alias.\n * @param {string} alias - Alias of the database to detach\n * @returns {string} SQL DETACH DATABASE statement\n */\nexport const detachDB = (alias: string) => {\n if (!/^[a-zA-Z0-9_]+$/.test(alias)) {\n throw new Error('Invalid database alias');\n }\n return `DETACH DATABASE ${alias}`;\n};\n\n/**\n * Generates an unsafe SQL INSERT statement with provided field values.\n * @param {string} table - Name of the table to insert into\n * @param {Record<string, any>} fieldToValue - Object mapping field names to values\n * @param {boolean} [isDeleted=false] - Whether to mark the record as deleted\n * @returns {string} SQL INSERT statement (unsafe - does not escape values properly)\n * @warning This function does not properly escape SQL values and should not be used with untrusted input\n */\nexport const insertUnsafely = (table: string, fieldToValue: Record<string, any>, isDeleted = false) => {\n const combinedRecords: Record<string, any> = { ...fieldToValue, is_deleted: isDeleted ? '1' : '0' };\n\n const sortedKeys = Object.keys(combinedRecords).sort();\n\n const sortedValues = sortedKeys.map((key) => combinedRecords[key]);\n\n return `INSERT INTO ${table} (${sortedKeys.toString()}) VALUES (${sortedValues\n .map((val) => {\n if (val === null) {\n return 'NULL';\n }\n\n return typeof val === 'string' ? `'${val}'` : val;\n })\n .toString()})`;\n};\n","/**\n * The default version number for master metadata.\n * @constant {number}\n */\nexport const DEFAULT_MASTER_METADATA_VERSION = 0;\n\n/**\n * Placeholder value used to represent unknown or missing data.\n * @constant {string}\n */\nexport const UNKNOWN_VALUE_PLACEHOLDER = '99999';\n\n/**\n * Default rules to sanitize page content.\n */\nexport const DEFAULT_SANITIZATION_RULES: Record<string, string> = {\n '<img[^>]*>>': '',\n 舄: '',\n '﵌': 'صلى الله عليه وآله وسلم',\n};\n","import { createWriteStream, promises as fs } from 'node:fs';\nimport type { IncomingMessage } from 'node:http';\nimport https from 'node:https';\nimport os from 'node:os';\nimport path from 'node:path';\nimport { pipeline } from 'node:stream/promises';\nimport unzipper, { type Entry } from 'unzipper';\n\n/**\n * Creates a temporary directory with an optional prefix.\n * @param {string} [prefix='shamela'] - The prefix to use for the temporary directory name\n * @returns {Promise<string>} A promise that resolves to the path of the created temporary directory\n */\nexport const createTempDir = async (prefix = 'shamela') => {\n const tempDirBase = path.join(os.tmpdir(), prefix);\n return fs.mkdtemp(tempDirBase);\n};\n\n/**\n * Checks if a file exists at the given path.\n * @param {string} path - The file path to check\n * @returns {Promise<boolean>} A promise that resolves to true if the file exists, false otherwise\n */\nexport const fileExists = async (filePath: string) => !!(await fs.stat(filePath).catch(() => false));\n\n/**\n * Downloads and extracts a ZIP file from a given URL without loading the entire file into memory.\n * @param {string} url - The URL of the ZIP file to download and extract\n * @param {string} outputDir - The directory where the files should be extracted\n * @returns {Promise<string[]>} A promise that resolves with the list of all extracted file paths\n * @throws {Error} When the download fails, extraction fails, or other network/filesystem errors occur\n */\nexport async function unzipFromUrl(url: string, outputDir: string): Promise<string[]> {\n const extractedFiles: string[] = [];\n\n try {\n // Make HTTPS request and get the response stream\n const response = await new Promise<IncomingMessage>((resolve, reject) => {\n https\n .get(url, (res) => {\n if (res.statusCode !== 200) {\n reject(new Error(`Failed to download ZIP file: ${res.statusCode} ${res.statusMessage}`));\n } else {\n resolve(res);\n }\n })\n .on('error', (err) => {\n reject(new Error(`HTTPS request failed: ${err.message}`));\n });\n });\n\n // Process the ZIP file using unzipper.Extract with proper event handling\n await new Promise<void>((resolve, reject) => {\n const unzipStream = unzipper.Parse();\n const entryPromises: Promise<void>[] = [];\n\n unzipStream.on('entry', (entry: Entry) => {\n const entryPromise = (async () => {\n const filePath = path.join(outputDir, entry.path);\n\n if (entry.type === 'Directory') {\n // Ensure the directory exists\n await fs.mkdir(filePath, { recursive: true });\n entry.autodrain();\n } else {\n // Ensure the parent directory exists\n const dir = path.dirname(filePath);\n await fs.mkdir(dir, { recursive: true });\n\n // Create write stream and pipe entry to it\n const writeStream = createWriteStream(filePath);\n await pipeline(entry, writeStream);\n extractedFiles.push(filePath);\n }\n })();\n\n entryPromises.push(entryPromise);\n });\n\n unzipStream.on('finish', async () => {\n try {\n // Wait for all entries to be processed\n await Promise.all(entryPromises);\n resolve();\n } catch (error) {\n reject(error);\n }\n });\n\n unzipStream.on('error', (error) => {\n reject(new Error(`Error during extraction: ${error.message}`));\n });\n\n // Pipe the response to the unzip stream\n response.pipe(unzipStream);\n });\n\n return extractedFiles;\n } catch (error: any) {\n throw new Error(`Error processing URL: ${error.message}`);\n }\n}\n","import { Buffer } from 'node:buffer';\nimport type { IncomingMessage } from 'node:http';\nimport https from 'node:https';\nimport process from 'node:process';\nimport { URL, URLSearchParams } from 'node:url';\n\n/**\n * Builds a URL with query parameters and optional authentication.\n * @param {string} endpoint - The base endpoint URL\n * @param {Record<string, any>} queryParams - Object containing query parameters to append\n * @param {boolean} [useAuth=true] - Whether to include the API key from environment variables\n * @returns {URL} The constructed URL object with query parameters\n */\nexport const buildUrl = (endpoint: string, queryParams: Record<string, any>, useAuth: boolean = true): URL => {\n const url = new URL(endpoint);\n {\n const params = new URLSearchParams();\n\n Object.entries(queryParams).forEach(([key, value]) => {\n params.append(key, value.toString());\n });\n\n if (useAuth) {\n params.append('api_key', process.env.SHAMELA_API_KEY!);\n }\n\n url.search = params.toString();\n }\n\n return url;\n};\n\n/**\n * Makes an HTTPS GET request and returns the response data.\n * @template T - The expected return type (Buffer or Record<string, any>)\n * @param {string | URL} url - The URL to make the request to\n * @returns {Promise<T>} A promise that resolves to the response data, parsed as JSON if content-type is application/json, otherwise as Buffer\n * @throws {Error} When the request fails or JSON parsing fails\n */\nexport const httpsGet = <T extends Buffer | Record<string, any>>(url: string | URL): Promise<T> => {\n return new Promise((resolve, reject) => {\n https\n .get(url, (res: IncomingMessage) => {\n const contentType = res.headers['content-type'] || '';\n const dataChunks: Buffer[] = [];\n\n res.on('data', (chunk: Buffer) => {\n dataChunks.push(chunk);\n });\n\n res.on('end', () => {\n const fullData = Buffer.concat(dataChunks);\n\n if (contentType.includes('application/json')) {\n try {\n const json = JSON.parse(fullData.toString('utf-8'));\n resolve(json);\n } catch (error: any) {\n reject(new Error(`Failed to parse JSON: ${error.message}`));\n }\n } else {\n resolve(fullData as T);\n }\n });\n })\n .on('error', (error) => {\n reject(new Error(`Error making request: ${error.message}`));\n });\n });\n};\n","import path from 'node:path';\nimport process from 'node:process';\n\nconst SOURCE_TABLES = ['author.sqlite', 'book.sqlite', 'category.sqlite'];\n\n/**\n * Validates that required environment variables are set.\n * @throws {Error} When any required environment variable is missing\n */\nexport const validateEnvVariables = () => {\n const envVariablesNotFound = [\n 'SHAMELA_API_MASTER_PATCH_ENDPOINT',\n 'SHAMELA_API_BOOKS_ENDPOINT',\n 'SHAMELA_API_KEY',\n ].filter((key) => !process.env[key]);\n\n if (envVariablesNotFound.length) {\n throw new Error(`${envVariablesNotFound.join(', ')} environment variables not set`);\n }\n};\n\n/**\n * Validates that all required master source tables are present in the provided paths.\n * @param {string[]} sourceTablePaths - Array of file paths to validate\n * @returns {boolean} True if all required source tables (author.sqlite, book.sqlite, category.sqlite) are present\n */\nexport const validateMasterSourceTables = (sourceTablePaths: string[]) => {\n const sourceTableNames = new Set(sourceTablePaths.map((tablePath) => path.basename(tablePath).toLowerCase()));\n return SOURCE_TABLES.every((table) => sourceTableNames.has(table.toLowerCase()));\n};\n","import { DEFAULT_SANITIZATION_RULES } from './utils/constants';\n\nexport type Line = {\n id?: string;\n text: string;\n};\n\nconst PUNCT_ONLY = /^[)\\]\\u00BB\"”'’.,?!:\\u061B\\u060C\\u061F\\u06D4\\u2026]+$/;\nconst OPENER_AT_END = /[[({«“‘]$/;\n\nconst mergeDanglingPunctuation = (lines: Line[]): Line[] => {\n const out: Line[] = [];\n for (const item of lines) {\n const last = out[out.length - 1];\n if (last?.id && PUNCT_ONLY.test(item.text)) {\n last.text += item.text;\n } else {\n out.push(item);\n }\n }\n return out;\n};\n\nconst splitIntoLines = (text: string) => {\n let normalized = text.replace(/\\r\\n/g, '\\n').replace(/\\r/g, '\\n');\n\n if (!/\\n/.test(normalized)) {\n normalized = normalized.replace(/([.?!\\u061F\\u061B\\u06D4\\u2026][\"“”'’»«)\\]]?)\\s+(?=[\\u0600-\\u06FF])/, '$1\\n');\n }\n\n return normalized\n .split('\\n')\n .map((line) => line.replace(/^\\*+/, '').trim())\n .filter(Boolean);\n};\n\nconst processTextContent = (content: string): Line[] => {\n return splitIntoLines(content).map((line) => ({ text: line }));\n};\n\nconst extractAttribute = (tag: string, name: string): string | undefined => {\n const pattern = new RegExp(`${name}\\\\s*=\\\\s*(\"([^\"]*)\"|'([^']*)'|([^s>]+))`, 'i');\n const match = tag.match(pattern);\n if (!match) {\n return undefined;\n }\n return match[2] ?? match[3] ?? match[4];\n};\n\ntype Token =\n | { type: 'text'; value: string }\n | { type: 'start'; name: string; attributes: Record<string, string | undefined> }\n | { type: 'end'; name: string };\n\nconst tokenize = (html: string): Token[] => {\n const tokens: Token[] = [];\n const tagRegex = /<[^>]+>/g;\n let lastIndex = 0;\n let match: RegExpExecArray | null;\n match = tagRegex.exec(html);\n\n while (match) {\n if (match.index > lastIndex) {\n tokens.push({ type: 'text', value: html.slice(lastIndex, match.index) });\n }\n\n const raw = match[0];\n const isEnd = /^<\\//.test(raw);\n const nameMatch = raw.match(/^<\\/?\\s*([a-zA-Z0-9:-]+)/);\n const name = nameMatch ? nameMatch[1].toLowerCase() : '';\n\n if (isEnd) {\n tokens.push({ name, type: 'end' });\n } else {\n const attributes: Record<string, string | undefined> = {};\n attributes.id = extractAttribute(raw, 'id');\n attributes['data-type'] = extractAttribute(raw, 'data-type');\n tokens.push({ attributes, name, type: 'start' });\n }\n\n lastIndex = tagRegex.lastIndex;\n match = tagRegex.exec(html);\n }\n\n if (lastIndex < html.length) {\n tokens.push({ type: 'text', value: html.slice(lastIndex) });\n }\n\n return tokens;\n};\n\nconst maybeAppendToPrevTitle = (result: Line[], raw: string) => {\n const last = result[result.length - 1];\n if (!raw) {\n return false;\n }\n if (!last || !last.id) {\n return false;\n }\n if (!OPENER_AT_END.test(last.text)) {\n return false;\n }\n if (/\\n/.test(raw)) {\n return false;\n }\n last.text += raw.replace(/^\\s+/, '');\n return true;\n};\n\nexport const parseContentRobust = (content: string): Line[] => {\n if (!/<span[^>]*>/i.test(content)) {\n return processTextContent(content);\n }\n\n const tokens = tokenize(`<root>${content}</root>`);\n const result: Line[] = [];\n\n let titleDepth = 0;\n let currentTitle: Line | null = null;\n\n const pushText = (raw: string) => {\n if (!raw) {\n return;\n }\n\n if (titleDepth > 0 && currentTitle) {\n const cleaned = titleDepth === 1 ? raw.replace(/^\\s+/, '') : raw;\n currentTitle.text += cleaned;\n return;\n }\n\n if (maybeAppendToPrevTitle(result, raw)) {\n return;\n }\n\n const text = raw.trim();\n if (text) {\n result.push(...processTextContent(text));\n }\n };\n\n for (const token of tokens) {\n if (token.type === 'text') {\n pushText(token.value);\n } else if (token.type === 'start' && token.name === 'span') {\n const dataType = token.attributes['data-type'];\n if (dataType === 'title') {\n if (titleDepth === 0) {\n const id = token.attributes.id?.replace(/^toc-/, '') ?? '';\n currentTitle = { id, text: '' };\n result.push(currentTitle);\n }\n titleDepth += 1;\n }\n } else if (token.type === 'end' && token.name === 'span') {\n if (titleDepth > 0) {\n titleDepth -= 1;\n if (titleDepth === 0) {\n currentTitle = null;\n }\n }\n }\n }\n\n const cleaned = result.map((line) => (line.id ? line : { ...line, text: line.text.trim() }));\n\n return mergeDanglingPunctuation(cleaned.map((line) => (line.id ? line : { ...line, text: line.text }))).filter(\n (line) => line.text.length > 0,\n );\n};\n\nconst DEFAULT_COMPILED_RULES = Object.entries(DEFAULT_SANITIZATION_RULES).map(([pattern, replacement]) => ({\n regex: new RegExp(pattern, 'g'),\n replacement,\n}));\n\n/**\n * Compiles sanitization rules into RegExp objects for performance\n */\nconst getCompiledRules = (rules: Record<string, string>) => {\n if (rules === DEFAULT_SANITIZATION_RULES) {\n return DEFAULT_COMPILED_RULES;\n }\n\n const compiled = [];\n for (const pattern in rules) {\n compiled.push({\n regex: new RegExp(pattern, 'g'),\n replacement: rules[pattern],\n });\n }\n return compiled;\n};\n\n/**\n * Sanitizes page content by applying regex replacement rules\n * @param text - The text to sanitize\n * @param rules - Optional custom rules (defaults to DEFAULT_SANITIZATION_RULES)\n * @returns The sanitized text\n */\nexport const sanitizePageContent = (\n text: string,\n rules: Record<string, string> = DEFAULT_SANITIZATION_RULES,\n): string => {\n const compiledRules = getCompiledRules(rules);\n\n let content = text;\n for (let i = 0; i < compiledRules.length; i++) {\n const { regex, replacement } = compiledRules[i];\n content = content.replace(regex, replacement);\n }\n return content;\n};\n\nexport const splitPageBodyFromFooter = (content: string, footnoteMarker = '_________') => {\n let footnote = '';\n const indexOfFootnote = content.lastIndexOf(footnoteMarker);\n\n if (indexOfFootnote >= 0) {\n footnote = content.slice(indexOfFootnote + footnoteMarker.length);\n content = content.slice(0, indexOfFootnote);\n }\n\n return [content, footnote] as const;\n};\n"],"mappings":"AAAA,OAAS,YAAAA,MAAgB,aACzB,OAAS,YAAYC,MAAU,KAC/B,OAAOC,MAAU,OACjB,OAAOC,MAAa,UACpB,OAAS,OAAAC,MAAW,MCJpB,OAAS,YAAAC,MAAgB,aCSzB,IAAMC,EAAgB,CAAE,MAAO,IAAM,CAAC,EAAG,MAAO,IAAM,CAAC,EAAG,KAAM,IAAM,CAAC,EAAG,KAAM,IAAM,CAAC,CAAE,EACrFC,EAAiBD,EAERE,EAAY,CAACC,EAAoBH,IAAkB,CAC5D,GAAI,CAACG,EAAU,OAAS,CAACA,EAAU,OAAS,CAACA,EAAU,KACnD,MAAM,IAAI,MAAM,sDAAsD,EAG1EF,EAASE,CACb,EDVA,IAAMC,EAAmB,IAQnBC,EAAe,CAACC,EAAcC,IACzBD,EAAG,MAAM,qBAAqBC,CAAK,GAAG,EAAE,IAAI,EASjDC,EAAW,CAACF,EAAcC,IAIrB,EAHQD,EAAG,MAAM,iEAAiE,EAAE,IAAIC,CAAK,EAYlGE,EAAW,CAACH,EAAcC,IACvBC,EAASF,EAAIC,CAAK,EAIhBD,EAAG,MAAM,iBAAiBC,CAAK,EAAE,EAAE,IAAI,EAHnC,CAAC,EAWVG,EAAaC,GACR,OAAOA,EAAI,UAAU,IAAM,IAUhCC,EAAiB,CAACC,EAA0BC,EAA2BC,IAA2B,CACpG,IAAMC,EAAc,CAAC,EAErB,QAAWC,KAAUF,EAAS,CAC1B,GAAIE,IAAW,KAAM,CACjBD,EAAO,IAAMF,GAAYD,IAAU,IAAM,KACzC,QACJ,CAEA,GAAIC,GAAY,OAAO,OAAOA,EAAUG,CAAM,EAAG,CAC7C,IAAMC,EAAQJ,EAASG,CAAM,EAE7B,GAAIC,IAAUd,GAAoBc,IAAU,MAAQA,IAAU,OAAW,CACrEF,EAAOC,CAAM,EAAIC,EACjB,QACJ,CACJ,CAEA,GAAIL,GAAW,OAAO,OAAOA,EAASI,CAAM,EAAG,CAC3CD,EAAOC,CAAM,EAAIJ,EAAQI,CAAM,EAC/B,QACJ,CAEAD,EAAOC,CAAM,EAAI,IACrB,CAEA,OAAOD,CACX,EASMG,EAAY,CAACC,EAAiBC,EAAkBN,IAA6B,CAC/E,IAAMO,EAAU,IAAI,IACdC,EAAY,IAAI,IAEtB,QAAWZ,KAAOS,EACdE,EAAQ,IAAI,OAAOX,EAAI,EAAE,CAAC,EAG9B,QAAWA,KAAOU,EACdE,EAAU,IAAI,OAAOZ,EAAI,EAAE,EAAGA,CAAG,EAGrC,IAAMK,EAAgB,CAAC,EAEvB,QAAWH,KAAWO,EAAU,CAC5B,IAAMN,EAAWS,EAAU,IAAI,OAAOV,EAAQ,EAAE,CAAC,EAE7CC,GAAYJ,EAAUI,CAAQ,GAIlCE,EAAO,KAAKJ,EAAeC,EAASC,EAAUC,CAAO,CAAC,CAC1D,CAEA,QAAWJ,KAAOU,EAAW,CACzB,IAAMG,EAAK,OAAOb,EAAI,EAAE,EAEpBW,EAAQ,IAAIE,CAAE,GAAKd,EAAUC,CAAG,GAIpCK,EAAO,KAAKJ,EAAe,OAAWD,EAAKI,CAAO,CAAC,CACvD,CAEA,OAAOC,CACX,EASMS,EAAa,CAACnB,EAAcC,EAAeQ,EAAmBW,IAAgB,CAChF,GAAIA,EAAK,SAAW,EAChB,OAGJ,IAAMC,EAAeZ,EAAQ,IAAI,IAAM,GAAG,EAAE,KAAK,GAAG,EAC9Ca,EAAYtB,EAAG,QAAQ,eAAeC,CAAK,KAAKQ,EAAQ,KAAK,GAAG,CAAC,aAAaY,CAAY,GAAG,EAEnGD,EAAK,QAASf,GAAQ,CAClB,IAAMkB,EAASd,EAAQ,IAAKE,GAAYA,KAAUN,EAAMA,EAAIM,CAAM,EAAI,IAAK,EAE3EW,EAAU,IAAI,GAAGC,CAAM,CAC3B,CAAC,EAEDD,EAAU,SAAS,CACvB,EASME,GAAoB,CAACC,EAAkBC,EAAkBzB,IAAkB,CAC7E,IAAMI,EAAMqB,EAAO,MAAM,gEAAgE,EAAE,IAAIzB,CAAK,EAIpG,OAAKI,GAAK,KAKVoB,EAAO,IAAI,wBAAwBxB,CAAK,EAAE,EAC1CwB,EAAO,IAAIpB,EAAI,GAAG,EACX,KANHsB,EAAO,KAAK,GAAG1B,CAAK,8CAA8C,EAC3D,GAMf,EASM2B,EAAoB,CAACH,EAAkBC,EAAkBG,EAAwB5B,IAAkB,CACrG,GAAI,CAACC,EAASwB,EAAQzB,CAAK,EAAG,CAC1B0B,EAAO,KAAK,GAAG1B,CAAK,mCAAmC,EACvD,MACJ,CAEA,GAAI,CAACuB,GAAkBC,EAAQC,EAAQzB,CAAK,EACxC,OAGJ,IAAM6B,EAAW/B,EAAa2B,EAAQzB,CAAK,EACrC8B,EAAYF,GAAS3B,EAAS2B,EAAO5B,CAAK,EAAIF,EAAa8B,EAAO5B,CAAK,EAAI,CAAC,EAE5EQ,EAAUqB,EAAS,IAAKE,GAASA,EAAK,IAAI,EAEhD,QAAWA,KAAQD,EACf,GAAI,CAACtB,EAAQ,SAASuB,EAAK,IAAI,EAAG,CAC9B,IAAMC,EAAaD,EAAK,MAAQA,EAAK,KAAK,OAAS,EAAIA,EAAK,KAAO,OACnEP,EAAO,IAAI,eAAexB,CAAK,eAAe+B,EAAK,IAAI,IAAIC,CAAU,EAAE,EACvExB,EAAQ,KAAKuB,EAAK,IAAI,CAC1B,CAGJ,IAAMlB,EAAWX,EAASuB,EAAQzB,CAAK,EACjCc,EAAYc,EAAQ1B,EAAS0B,EAAO5B,CAAK,EAAI,CAAC,EAE9CiC,EAAarB,EAAUC,EAAUC,EAAWN,CAAO,EAEzDU,EAAWM,EAAQxB,EAAOQ,EAASyB,CAAU,CACjD,EAQaC,EAAe,CAACnC,EAAcoC,EAAeC,IAAoB,CAC1E,IAAMX,EAAS,IAAIY,EAASF,CAAK,EAC3BP,EAAQ,IAAIS,EAASD,CAAO,EAElC,GAAI,CACArC,EAAG,YAAY,IAAM,CACjB4B,EAAkB5B,EAAI0B,EAAQG,QAAkB,EAChDD,EAAkB5B,EAAI0B,EAAQG,SAAmB,CACrD,CAAC,EAAE,CACP,QAAE,CACEH,EAAO,MAAM,EACbG,EAAM,MAAM,CAChB,CACJ,EAOaU,EAAgB,CAACvC,EAAcoC,IAAkB,CAC1D,IAAMV,EAAS,IAAIY,EAASF,CAAK,EAEjC,GAAI,CACApC,EAAG,YAAY,IAAM,CACjB4B,EAAkB5B,EAAI0B,EAAQ,WAAiB,EAC/CE,EAAkB5B,EAAI0B,EAAQ,YAAkB,CACpD,CAAC,EAAE,CACP,QAAE,CACEA,EAAO,MAAM,CACjB,CACJ,EAMac,EAAgBxC,GAAiB,CAC1CA,EAAG;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAUH,EACAA,EAAG;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAQH,CACJ,EAOayC,GAAezC,GACjBA,EAAG,0BAAoC,EAAE,IAAI,EAQ3C0C,GAAgB1C,GAClBA,EAAG,2BAAqC,EAAE,IAAI,EAQ5C2C,EAAW3C,IACb,CAAE,MAAOyC,GAAYzC,CAAE,EAAG,OAAQ0C,GAAa1C,CAAE,CAAE,GErT9D,OAAO4C,OAAU,OCKV,IAAMC,EAAW,CAACC,EAAgBC,IAAkB,CACvD,IAAMC,EAAcF,EAAO,QAAQ,KAAM,IAAI,EAC7C,GAAI,CAAC,kBAAkB,KAAKC,CAAK,EAC7B,MAAM,IAAI,MAAM,wBAAwB,EAE5C,MAAO,oBAAoBC,CAAW,QAAQD,CAAK,EACvD,EAyBO,IAAME,EAAYC,GAAkB,CACvC,GAAI,CAAC,kBAAkB,KAAKA,CAAK,EAC7B,MAAM,IAAI,MAAM,wBAAwB,EAE5C,MAAO,mBAAmBA,CAAK,EACnC,ED5BA,IAAMC,EAAoB,CAACC,EAAcC,EAAeC,IAAkB,CACtE,IAAMC,EAAMH,EAAG,MAAM,mBAAmBC,CAAK,iDAAiD,EAAE,IAAIC,CAAK,EAIzG,GAAI,CAACC,GAAK,IACN,MAAM,IAAI,MAAM,gCAAgCD,CAAK,OAAOD,CAAK,EAAE,EAGvED,EAAG,IAAI,wBAAwBE,CAAK,EAAE,EACtCF,EAAG,IAAIG,EAAI,GAAG,CAClB,EAeaC,EAA6B,CAACJ,EAAcK,IAA2B,CAChF,IAAMC,EAAsC,CAAC,EAE7C,QAAWC,KAAaF,EAAc,CAClC,GAAM,CAAE,KAAAG,CAAK,EAAIC,GAAK,MAAMF,CAAS,EACrCD,EAAYE,CAAI,EAAID,CACxB,CAEA,OAAO,QAAQD,CAAW,EAAE,QAAQ,CAAC,CAACL,EAAOS,CAAM,IAAMV,EAAG,IAAIW,EAASD,EAAQT,CAAK,CAAC,CAAC,EAExFF,EAAkBC,mBAAkC,EACpDD,EAAkBC,eAA8B,EAChDD,EAAkBC,uBAAwC,EAE1D,IAAMY,EAAgBZ,EAAG,wDAAyF,EAC5Ga,EAAcb,EAAG,kDAAmF,EACpGc,EAAmBd,EAAG,8DAE5B,EAEAA,EAAG,YAAY,IAAM,CACjBY,EAAc,IAAI,EAClBC,EAAY,IAAI,EAChBC,EAAiB,IAAI,CACzB,CAAC,EAAE,EAEH,OAAO,KAAKR,CAAW,EAAE,QAASS,GAAc,CAC5Cf,EAAG,IAAIgB,EAASD,CAAS,CAAC,CAC9B,CAAC,CACL,EAQME,EAA0B,CAACjB,EAAckB,EAAkBC,IAAwB,CACrFnB,EAAG,IAAI,uBAAuBkB,CAAQ,EAAE,EACxClB,EAAG,IAAI,eAAekB,CAAQ,qBAAqBC,CAAW,EAAE,CACpE,EAcaC,EAAgBpB,GAAiB,CAC1CA,EAAG;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UASH,EACAA,EAAG;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAiBH,EACAA,EAAG;AAAA;AAAA;AAAA;AAAA;AAAA,UAOH,EAIAiB,EAAwBjB,EAAI,kBAAyB,EACrDiB,EAAwBjB,EAAI,cAAqB,EACjDiB,EAAwBjB,EAAI,uBAA+B,CAC/D,EAOaqB,GAAiBrB,GACnBA,EAAG,4BAAuC,EAAE,IAAI,EAQ9CsB,GAAetB,GACjBA,EAAG,0BAAqC,EAAE,IAAI,EAQ5CuB,GAAoBvB,GACtBA,EAAG,8BAA0C,EAAE,IAAI,EAQjDwB,EAAWxB,IACb,CAAE,QAASqB,GAAcrB,CAAE,EAAG,MAAOsB,GAAYtB,CAAE,EAAG,WAAYuB,GAAiBvB,CAAE,CAAE,GE7J3F,IAAMyB,EAAqD,CAC9D,cAAe,GACf,OAAG,GACH,SAAK,wHACT,ECnBA,OAAS,qBAAAC,GAAmB,YAAYC,MAAU,KAElD,OAAOC,OAAW,QAClB,OAAOC,OAAQ,KACf,OAAOC,MAAU,OACjB,OAAS,YAAAC,OAAgB,kBACzB,OAAOC,OAA8B,WAO9B,IAAMC,EAAgB,MAAOC,EAAS,YAAc,CACvD,IAAMC,EAAcL,EAAK,KAAKD,GAAG,OAAO,EAAGK,CAAM,EACjD,OAAOP,EAAG,QAAQQ,CAAW,CACjC,EAgBA,eAAsBC,EAAaC,EAAaC,EAAsC,CAClF,IAAMC,EAA2B,CAAC,EAElC,GAAI,CAEA,IAAMC,EAAW,MAAM,IAAI,QAAyB,CAACC,EAASC,IAAW,CACrEC,GACK,IAAIN,EAAMO,GAAQ,CACXA,EAAI,aAAe,IACnBF,EAAO,IAAI,MAAM,gCAAgCE,EAAI,UAAU,IAAIA,EAAI,aAAa,EAAE,CAAC,EAEvFH,EAAQG,CAAG,CAEnB,CAAC,EACA,GAAG,QAAUC,GAAQ,CAClBH,EAAO,IAAI,MAAM,yBAAyBG,EAAI,OAAO,EAAE,CAAC,CAC5D,CAAC,CACT,CAAC,EAGD,aAAM,IAAI,QAAc,CAACJ,EAASC,IAAW,CACzC,IAAMI,EAAcC,GAAS,MAAM,EAC7BC,EAAiC,CAAC,EAExCF,EAAY,GAAG,QAAUG,GAAiB,CACtC,IAAMC,GAAgB,SAAY,CAC9B,IAAMC,EAAWC,EAAK,KAAKd,EAAWW,EAAM,IAAI,EAEhD,GAAIA,EAAM,OAAS,YAEf,MAAMI,EAAG,MAAMF,EAAU,CAAE,UAAW,EAAK,CAAC,EAC5CF,EAAM,UAAU,MACb,CAEH,IAAMK,EAAMF,EAAK,QAAQD,CAAQ,EACjC,MAAME,EAAG,MAAMC,EAAK,CAAE,UAAW,EAAK,CAAC,EAGvC,IAAMC,EAAcC,GAAkBL,CAAQ,EAC9C,MAAMM,GAASR,EAAOM,CAAW,EACjChB,EAAe,KAAKY,CAAQ,CAChC,CACJ,GAAG,EAEHH,EAAc,KAAKE,CAAY,CACnC,CAAC,EAEDJ,EAAY,GAAG,SAAU,SAAY,CACjC,GAAI,CAEA,MAAM,QAAQ,IAAIE,CAAa,EAC/BP,EAAQ,CACZ,OAASiB,EAAO,CACZhB,EAAOgB,CAAK,CAChB,CACJ,CAAC,EAEDZ,EAAY,GAAG,QAAUY,GAAU,CAC/BhB,EAAO,IAAI,MAAM,4BAA4BgB,EAAM,OAAO,EAAE,CAAC,CACjE,CAAC,EAGDlB,EAAS,KAAKM,CAAW,CAC7B,CAAC,EAEMP,CACX,OAASmB,EAAY,CACjB,MAAM,IAAI,MAAM,yBAAyBA,EAAM,OAAO,EAAE,CAC5D,CACJ,CCrGA,OAAS,UAAAC,OAAc,SAEvB,OAAOC,OAAW,QAClB,OAAOC,OAAa,UACpB,OAAS,OAAAC,GAAK,mBAAAC,OAAuB,MAS9B,IAAMC,EAAW,CAACC,EAAkBC,EAAkCC,EAAmB,KAAc,CAC1G,IAAMC,EAAM,IAAIN,GAAIG,CAAQ,EAC5B,CACI,IAAMI,EAAS,IAAIN,GAEnB,OAAO,QAAQG,CAAW,EAAE,QAAQ,CAAC,CAACI,EAAKC,CAAK,IAAM,CAClDF,EAAO,OAAOC,EAAKC,EAAM,SAAS,CAAC,CACvC,CAAC,EAEGJ,GACAE,EAAO,OAAO,UAAWR,GAAQ,IAAI,eAAgB,EAGzDO,EAAI,OAASC,EAAO,SAAS,CACjC,CAEA,OAAOD,CACX,EASaI,EAAoDJ,GACtD,IAAI,QAAQ,CAACK,EAASC,IAAW,CACpCd,GACK,IAAIQ,EAAMO,GAAyB,CAChC,IAAMC,EAAcD,EAAI,QAAQ,cAAc,GAAK,GAC7CE,EAAuB,CAAC,EAE9BF,EAAI,GAAG,OAASG,GAAkB,CAC9BD,EAAW,KAAKC,CAAK,CACzB,CAAC,EAEDH,EAAI,GAAG,MAAO,IAAM,CAChB,IAAMI,EAAWpB,GAAO,OAAOkB,CAAU,EAEzC,GAAID,EAAY,SAAS,kBAAkB,EACvC,GAAI,CACA,IAAMI,EAAO,KAAK,MAAMD,EAAS,SAAS,OAAO,CAAC,EAClDN,EAAQO,CAAI,CAChB,OAASC,EAAY,CACjBP,EAAO,IAAI,MAAM,yBAAyBO,EAAM,OAAO,EAAE,CAAC,CAC9D,MAEAR,EAAQM,CAAa,CAE7B,CAAC,CACL,CAAC,EACA,GAAG,QAAUE,GAAU,CACpBP,EAAO,IAAI,MAAM,yBAAyBO,EAAM,OAAO,EAAE,CAAC,CAC9D,CAAC,CACT,CAAC,ECpEL,OAAOC,OAAU,OACjB,OAAOC,OAAa,UAEpB,IAAMC,GAAgB,CAAC,gBAAiB,cAAe,iBAAiB,EAM3DC,EAAuB,IAAM,CACtC,IAAMC,EAAuB,CACzB,oCACA,6BACA,iBACJ,EAAE,OAAQC,GAAQ,CAACJ,GAAQ,IAAII,CAAG,CAAC,EAEnC,GAAID,EAAqB,OACrB,MAAM,IAAI,MAAM,GAAGA,EAAqB,KAAK,IAAI,CAAC,gCAAgC,CAE1F,EAOaE,EAA8BC,GAA+B,CACtE,IAAMC,EAAmB,IAAI,IAAID,EAAiB,IAAKE,GAAcT,GAAK,SAASS,CAAS,EAAE,YAAY,CAAC,CAAC,EAC5G,OAAOP,GAAc,MAAOQ,GAAUF,EAAiB,IAAIE,EAAM,YAAY,CAAC,CAAC,CACnF,ERHA,IAAMC,EAAoBC,GAAwB,CAC9C,IAAMC,EAAM,IAAIC,EAAIF,CAAW,EAC/B,OAAAC,EAAI,SAAW,QAERA,EAAI,SAAS,CACxB,EA2BaE,GAAkB,MAC3BC,EACAC,IAC0C,CAC1CC,EAAqB,EAErB,IAAML,EAAMM,EAAS,GAAGC,EAAQ,IAAI,0BAA0B,IAAIJ,CAAE,GAAI,CACpE,eAAgBC,GAAS,cAAgB,GAAG,SAAS,EACrD,eAAgBA,GAAS,cAAgB,GAAG,SAAS,CACzD,CAAC,EAEDI,EAAO,KAAK,kCAAkCR,EAAI,SAAS,CAAC,EAAE,EAE9D,GAAI,CACA,IAAMS,EAAY,MAAMC,EAASV,CAAG,EACpC,MAAO,CACH,aAAcS,EAAS,cACvB,gBAAiBX,EAAiBW,EAAS,iBAAiB,EAC5D,GAAIA,EAAS,mBAAqB,CAAE,gBAAiBX,EAAiBW,EAAS,iBAAiB,CAAE,EAClG,GAAIA,EAAS,mBAAqB,CAAE,aAAcA,EAAS,aAAc,CAC7E,CACJ,OAASE,EAAY,CACjB,MAAM,IAAI,MAAM,iCAAiCA,EAAM,OAAO,EAAE,CACpE,CACJ,EA4BaC,GAAe,MAAOT,EAAYC,IAAkD,CAC7FI,EAAO,KAAK,gBAAgBL,CAAE,IAAI,KAAK,UAAUC,CAAO,CAAC,EAAE,EAE3D,IAAMS,EAAY,MAAMC,EAAc,sBAAsB,EAEtDC,EAA+CX,GAAS,cAAiB,MAAMF,GAAgBC,CAAE,EACjG,CAAC,CAACa,CAAY,EAAG,CAACC,CAAa,EAAI,CAAC,CAAC,EAAgB,MAAM,QAAQ,IAAI,CACzEC,EAAaH,EAAa,gBAAiBF,CAAS,EACpD,GAAIE,EAAa,gBAAkB,CAACG,EAAaH,EAAa,gBAAiBF,CAAS,CAAC,EAAI,CAAC,CAClG,CAAC,EACKM,EAASC,EAAK,KAAKP,EAAW,SAAS,EAEvCQ,EAAS,IAAIC,EAASH,CAAM,EAElC,GAAI,CACAX,EAAO,KAAK,iBAAiB,EAC7B,MAAMe,EAAiBF,CAAM,EAEzBJ,GACAT,EAAO,KAAK,yBAAyBS,CAAa,OAAOD,CAAY,EAAE,EACvE,MAAMQ,EAAaH,EAAQL,EAAcC,CAAa,IAEtDT,EAAO,KAAK,2BAA2BQ,CAAY,EAAE,EACrD,MAAMS,EAAcJ,EAAQL,CAAY,GAG5C,GAAM,CAAE,IAAKU,CAAU,EAAIN,EAAK,MAAMhB,EAAQ,WAAW,IAAI,EAE7D,GAAIsB,IAAc,QAAS,CACvB,IAAMC,EAAS,MAAMC,EAAYP,CAAM,EACvC,MAAM,IAAI,KAAKjB,EAAQ,WAAW,IAAI,EAAE,MAAM,KAAK,UAAUuB,EAAQ,KAAM,CAAC,CAAC,CACjF,CAEAN,EAAO,MAAM,GAETK,IAAc,OAASA,IAAc,YACrC,MAAMG,EAAG,OAAOV,EAAQf,EAAQ,WAAW,IAAI,EAGnD,MAAMyB,EAAG,GAAGhB,EAAW,CAAE,UAAW,EAAK,CAAC,CAC9C,QAAE,CACEQ,EAAO,MAAM,CACjB,CAEA,OAAOjB,EAAQ,WAAW,IAC9B,EAqBa0B,GAAoB,MAAOC,EAAkB,IAAiD,CACvG1B,EAAqB,EAErB,IAAML,EAAMM,EAASC,EAAQ,IAAI,kCAA6C,CAAE,QAASwB,EAAQ,SAAS,CAAE,CAAC,EAE7GvB,EAAO,KAAK,mDAAmDR,EAAI,SAAS,CAAC,EAAE,EAE/E,GAAI,CACA,IAAMS,EAAgC,MAAMC,EAASV,CAAG,EACxD,MAAO,CAAE,IAAKS,EAAS,UAAW,QAASA,EAAS,OAAQ,CAChE,OAASE,EAAY,CACjB,MAAM,IAAI,MAAM,gCAAgCA,EAAM,OAAO,EAAE,CACnE,CACJ,EAiBaqB,GAAeC,GAAmB,CAC3C,GAAM,CAAE,OAAAC,CAAO,EAAI,IAAIjC,EAAIM,EAAQ,IAAI,iCAAkC,EACzE,MAAO,GAAG2B,CAAM,WAAWD,CAAM,MACrC,EA4BaE,GAAyB,MAAO/B,GAAoD,CAC7FI,EAAO,KAAK,0BAA0B,KAAK,UAAUJ,CAAO,CAAC,EAAE,EAE/D,IAAMS,EAAY,MAAMC,EAAc,wBAAwB,EAExDsB,EACFhC,EAAQ,gBAAmB,MAAM0B,GAAkB,CAA+B,EAEtFtB,EAAO,KAAK,qCAAqC,KAAK,UAAU4B,CAAc,CAAC,EAAE,EACjF,IAAMC,EAAyB,MAAMnB,EAAapB,EAAiBsC,EAAe,GAAG,EAAGvB,CAAS,EAIjG,GAFAL,EAAO,KAAK,4BAA4B6B,EAAa,SAAS,CAAC,EAAE,EAE7D,CAACC,EAA2BD,CAAY,EACxC,MAAA7B,EAAO,MAAM,sCAAsC6B,EAAa,SAAS,CAAC,EAAE,EACtE,IAAI,MAAM,4BAA4B,EAGhD,IAAMlB,EAASC,EAAK,KAAKP,EAAW,WAAW,EAEzCQ,EAAS,IAAIC,EAASH,CAAM,EAElC,GAAI,CACAX,EAAO,KAAK,iBAAiB,EAC7B,MAAMe,EAAmBF,CAAM,EAE/Bb,EAAO,KAAK,8BAA8B,EAC1C,MAAM+B,EAA2BlB,EAAQgB,CAAY,EAErD,GAAM,CAAE,IAAKX,CAAU,EAAIN,EAAK,MAAMhB,EAAQ,WAAW,IAAI,EAE7D,GAAIsB,IAAc,QAAS,CACvB,IAAMC,EAAS,MAAMC,EAAcP,CAAM,EACzC,MAAM,IAAI,KAAKjB,EAAQ,WAAW,IAAI,EAAE,MAAM,KAAK,UAAUuB,EAAQ,KAAM,CAAC,CAAC,CACjF,CAEAN,EAAO,MAAM,GAETK,IAAc,OAASA,IAAc,YACrC,MAAMG,EAAG,OAAOV,EAAQf,EAAQ,WAAW,IAAI,EAGnD,MAAMyB,EAAG,GAAGhB,EAAW,CAAE,UAAW,EAAK,CAAC,CAC9C,QAAE,CACEQ,EAAO,MAAM,CACjB,CAEA,OAAOjB,EAAQ,WAAW,IAC9B,EAqBaoC,GAAU,MAAOrC,GAAkC,CAC5D,IAAMU,EAAY,MAAMC,EAAc,qBAAqB,EACrD2B,EAAa,MAAM7B,GAAaT,EAAI,CAAE,WAAY,CAAE,KAAMiB,EAAK,KAAKP,EAAW,GAAGV,CAAE,OAAO,CAAE,CAAE,CAAC,EAEhGuC,EAAiB,MAAM,IAAI,KAAKD,CAAU,EAAE,KAAK,EACvD,aAAMZ,EAAG,GAAGhB,EAAW,CAAE,UAAW,EAAK,CAAC,EAEnC6B,CACX,ESnTA,IAAMC,GAAa,wDACbC,GAAgB,YAEhBC,GAA4BC,GAA0B,CACxD,IAAMC,EAAc,CAAC,EACrB,QAAWC,KAAQF,EAAO,CACtB,IAAMG,EAAOF,EAAIA,EAAI,OAAS,CAAC,EAC3BE,GAAM,IAAMN,GAAW,KAAKK,EAAK,IAAI,EACrCC,EAAK,MAAQD,EAAK,KAElBD,EAAI,KAAKC,CAAI,CAErB,CACA,OAAOD,CACX,EAEMG,GAAkBC,GAAiB,CACrC,IAAIC,EAAaD,EAAK,QAAQ,QAAS;AAAA,CAAI,EAAE,QAAQ,MAAO;AAAA,CAAI,EAEhE,MAAK,KAAK,KAAKC,CAAU,IACrBA,EAAaA,EAAW,QAAQ,qEAAsE;AAAA,CAAM,GAGzGA,EACF,MAAM;AAAA,CAAI,EACV,IAAKC,GAASA,EAAK,QAAQ,OAAQ,EAAE,EAAE,KAAK,CAAC,EAC7C,OAAO,OAAO,CACvB,EAEMC,EAAsBC,GACjBL,GAAeK,CAAO,EAAE,IAAKF,IAAU,CAAE,KAAMA,CAAK,EAAE,EAG3DG,EAAmB,CAACC,EAAaC,IAAqC,CACxE,IAAMC,EAAU,IAAI,OAAO,GAAGD,CAAI,0CAA2C,GAAG,EAC1EE,EAAQH,EAAI,MAAME,CAAO,EAC/B,GAAKC,EAGL,OAAOA,EAAM,CAAC,GAAKA,EAAM,CAAC,GAAKA,EAAM,CAAC,CAC1C,EAOMC,GAAYC,GAA0B,CACxC,IAAMC,EAAkB,CAAC,EACnBC,EAAW,WACbC,EAAY,EACZL,EAGJ,IAFAA,EAAQI,EAAS,KAAKF,CAAI,EAEnBF,GAAO,CACNA,EAAM,MAAQK,GACdF,EAAO,KAAK,CAAE,KAAM,OAAQ,MAAOD,EAAK,MAAMG,EAAWL,EAAM,KAAK,CAAE,CAAC,EAG3E,IAAMM,EAAMN,EAAM,CAAC,EACbO,EAAQ,OAAO,KAAKD,CAAG,EACvBE,EAAYF,EAAI,MAAM,0BAA0B,EAChDR,EAAOU,EAAYA,EAAU,CAAC,EAAE,YAAY,EAAI,GAEtD,GAAID,EACAJ,EAAO,KAAK,CAAE,KAAAL,EAAM,KAAM,KAAM,CAAC,MAC9B,CACH,IAAMW,EAAiD,CAAC,EACxDA,EAAW,GAAKb,EAAiBU,EAAK,IAAI,EAC1CG,EAAW,WAAW,EAAIb,EAAiBU,EAAK,WAAW,EAC3DH,EAAO,KAAK,CAAE,WAAAM,EAAY,KAAAX,EAAM,KAAM,OAAQ,CAAC,CACnD,CAEAO,EAAYD,EAAS,UACrBJ,EAAQI,EAAS,KAAKF,CAAI,CAC9B,CAEA,OAAIG,EAAYH,EAAK,QACjBC,EAAO,KAAK,CAAE,KAAM,OAAQ,MAAOD,EAAK,MAAMG,CAAS,CAAE,CAAC,EAGvDF,CACX,EAEMO,GAAyB,CAACC,EAAgBL,IAAgB,CAC5D,IAAMjB,EAAOsB,EAAOA,EAAO,OAAS,CAAC,EAUrC,MATI,CAACL,GAGD,CAACjB,GAAQ,CAACA,EAAK,IAGf,CAACL,GAAc,KAAKK,EAAK,IAAI,GAG7B,KAAK,KAAKiB,CAAG,EACN,IAEXjB,EAAK,MAAQiB,EAAI,QAAQ,OAAQ,EAAE,EAC5B,GACX,EAEaM,GAAsBjB,GAA4B,CAC3D,GAAI,CAAC,eAAe,KAAKA,CAAO,EAC5B,OAAOD,EAAmBC,CAAO,EAGrC,IAAMQ,EAASF,GAAS,SAASN,CAAO,SAAS,EAC3CgB,EAAiB,CAAC,EAEpBE,EAAa,EACbC,EAA4B,KAE1BC,EAAYT,GAAgB,CAC9B,GAAI,CAACA,EACD,OAGJ,GAAIO,EAAa,GAAKC,EAAc,CAChC,IAAME,EAAUH,IAAe,EAAIP,EAAI,QAAQ,OAAQ,EAAE,EAAIA,EAC7DQ,EAAa,MAAQE,EACrB,MACJ,CAEA,GAAIN,GAAuBC,EAAQL,CAAG,EAClC,OAGJ,IAAMf,EAAOe,EAAI,KAAK,EAClBf,GACAoB,EAAO,KAAK,GAAGjB,EAAmBH,CAAI,CAAC,CAE/C,EAEA,QAAW0B,KAASd,EACZc,EAAM,OAAS,OACfF,EAASE,EAAM,KAAK,EACbA,EAAM,OAAS,SAAWA,EAAM,OAAS,OAC/BA,EAAM,WAAW,WAAW,IAC5B,UACTJ,IAAe,IAEfC,EAAe,CAAE,GADNG,EAAM,WAAW,IAAI,QAAQ,QAAS,EAAE,GAAK,GACnC,KAAM,EAAG,EAC9BN,EAAO,KAAKG,CAAY,GAE5BD,GAAc,GAEXI,EAAM,OAAS,OAASA,EAAM,OAAS,QAC1CJ,EAAa,IACbA,GAAc,EACVA,IAAe,IACfC,EAAe,OAM/B,IAAME,EAAUL,EAAO,IAAKlB,GAAUA,EAAK,GAAKA,EAAO,CAAE,GAAGA,EAAM,KAAMA,EAAK,KAAK,KAAK,CAAE,CAAE,EAE3F,OAAOR,GAAyB+B,EAAQ,IAAKvB,GAAUA,EAAK,GAAKA,EAAO,CAAE,GAAGA,EAAM,KAAMA,EAAK,IAAK,CAAE,CAAC,EAAE,OACnGA,GAASA,EAAK,KAAK,OAAS,CACjC,CACJ,EAEMyB,GAAyB,OAAO,QAAQC,CAA0B,EAAE,IAAI,CAAC,CAACpB,EAASqB,CAAW,KAAO,CACvG,MAAO,IAAI,OAAOrB,EAAS,GAAG,EAC9B,YAAAqB,CACJ,EAAE,EAKIC,GAAoBC,GAAkC,CACxD,GAAIA,IAAUH,EACV,OAAOD,GAGX,IAAMK,EAAW,CAAC,EAClB,QAAWxB,KAAWuB,EAClBC,EAAS,KAAK,CACV,MAAO,IAAI,OAAOxB,EAAS,GAAG,EAC9B,YAAauB,EAAMvB,CAAO,CAC9B,CAAC,EAEL,OAAOwB,CACX,EAQaC,GAAsB,CAC/BjC,EACA+B,EAAgCH,IACvB,CACT,IAAMM,EAAgBJ,GAAiBC,CAAK,EAExC3B,EAAUJ,EACd,QAASmC,EAAI,EAAGA,EAAID,EAAc,OAAQC,IAAK,CAC3C,GAAM,CAAE,MAAAC,EAAO,YAAAP,CAAY,EAAIK,EAAcC,CAAC,EAC9C/B,EAAUA,EAAQ,QAAQgC,EAAOP,CAAW,CAChD,CACA,OAAOzB,CACX,EAEaiC,GAA0B,CAACjC,EAAiBkC,EAAiB,cAAgB,CACtF,IAAIC,EAAW,GACTC,EAAkBpC,EAAQ,YAAYkC,CAAc,EAE1D,OAAIE,GAAmB,IACnBD,EAAWnC,EAAQ,MAAMoC,EAAkBF,EAAe,MAAM,EAChElC,EAAUA,EAAQ,MAAM,EAAGoC,CAAe,GAGvC,CAACpC,EAASmC,CAAQ,CAC7B","names":["Database","fs","path","process","URL","Database","SILENT_LOGGER","logger","setLogger","newLogger","PATCH_NOOP_VALUE","getTableInfo","db","table","hasTable","readRows","isDeleted","row","mergeRowValues","baseRow","patchRow","columns","merged","column","value","mergeRows","baseRows","patchRows","baseIds","patchById","id","insertRows","rows","placeholders","statement","values","ensureTableSchema","target","source","logger","copyAndPatchTable","patch","baseInfo","patchInfo","info","columnType","mergedRows","applyPatches","aslDB","patchDB","Database","copyTableData","createTables","getAllPages","getAllTitles","getData","path","attachDB","dbFile","alias","escapedPath","detachDB","alias","ensureTableSchema","db","alias","table","row","copyForeignMasterTableData","sourceTables","aliasToPath","tablePath","name","path","dbPath","attachDB","insertAuthors","insertBooks","insertCategories","statement","detachDB","createCompatibilityView","viewName","sourceTable","createTables","getAllAuthors","getAllBooks","getAllCategories","getData","DEFAULT_SANITIZATION_RULES","createWriteStream","fs","https","os","path","pipeline","unzipper","createTempDir","prefix","tempDirBase","unzipFromUrl","url","outputDir","extractedFiles","response","resolve","reject","https","res","err","unzipStream","unzipper","entryPromises","entry","entryPromise","filePath","path","fs","dir","writeStream","createWriteStream","pipeline","error","Buffer","https","process","URL","URLSearchParams","buildUrl","endpoint","queryParams","useAuth","url","params","key","value","httpsGet","resolve","reject","res","contentType","dataChunks","chunk","fullData","json","error","path","process","SOURCE_TABLES","validateEnvVariables","envVariablesNotFound","key","validateMasterSourceTables","sourceTablePaths","sourceTableNames","tablePath","table","fixHttpsProtocol","originalUrl","url","URL","getBookMetadata","id","options","validateEnvVariables","buildUrl","process","logger","response","httpsGet","error","downloadBook","outputDir","createTempDir","bookResponse","bookDatabase","patchDatabase","unzipFromUrl","dbPath","path","client","Database","createTables","applyPatches","copyTableData","extension","result","getData","fs","getMasterMetadata","version","getCoverUrl","bookId","origin","downloadMasterDatabase","masterResponse","sourceTables","validateMasterSourceTables","copyForeignMasterTableData","getBook","outputPath","data","PUNCT_ONLY","OPENER_AT_END","mergeDanglingPunctuation","lines","out","item","last","splitIntoLines","text","normalized","line","processTextContent","content","extractAttribute","tag","name","pattern","match","tokenize","html","tokens","tagRegex","lastIndex","raw","isEnd","nameMatch","attributes","maybeAppendToPrevTitle","result","parseContentRobust","titleDepth","currentTitle","pushText","cleaned","token","DEFAULT_COMPILED_RULES","DEFAULT_SANITIZATION_RULES","replacement","getCompiledRules","rules","compiled","sanitizePageContent","compiledRules","i","regex","splitPageBodyFromFooter","footnoteMarker","footnote","indexOfFootnote"]}
|
package/package.json
CHANGED
|
@@ -1,28 +1,22 @@
|
|
|
1
1
|
{
|
|
2
|
-
"
|
|
3
|
-
"
|
|
2
|
+
"author": "Ragaeeb Haq",
|
|
3
|
+
"default": "./dist/index.js",
|
|
4
|
+
"dependencies": {
|
|
5
|
+
"unzipper": "^0.12.3"
|
|
6
|
+
},
|
|
4
7
|
"description": "Library to interact with the Maktabah Shamela v4 APIs",
|
|
5
|
-
"
|
|
6
|
-
"
|
|
7
|
-
"
|
|
8
|
+
"devDependencies": {
|
|
9
|
+
"@biomejs/biome": "^2.2.4",
|
|
10
|
+
"@types/bun": "^1.2.22",
|
|
11
|
+
"@types/node": "^24.5.2",
|
|
12
|
+
"@types/unzipper": "^0.10.11",
|
|
13
|
+
"semantic-release": "^24.2.9",
|
|
14
|
+
"tsup": "^8.5.0"
|
|
8
15
|
},
|
|
9
|
-
"private": false,
|
|
10
|
-
"main": "dist/index.js",
|
|
11
|
-
"types": "dist/index.d.ts",
|
|
12
|
-
"default": "./dist/index.js",
|
|
13
|
-
"source": "src/index.ts",
|
|
14
|
-
"type": "module",
|
|
15
16
|
"engines": {
|
|
16
|
-
"bun": ">=1.2.
|
|
17
|
+
"bun": ">=1.2.22",
|
|
17
18
|
"node": ">=22.0.0"
|
|
18
19
|
},
|
|
19
|
-
"scripts": {
|
|
20
|
-
"build": "tsup",
|
|
21
|
-
"test": "bun test src/ --coverage --coverage-reporter=lcov",
|
|
22
|
-
"e2e": "bun test e2e --env-file .env",
|
|
23
|
-
"e2e:ci": "bun test e2e"
|
|
24
|
-
},
|
|
25
|
-
"sideEffects": false,
|
|
26
20
|
"exports": {
|
|
27
21
|
".": {
|
|
28
22
|
"import": "./dist/index.js",
|
|
@@ -38,27 +32,24 @@
|
|
|
38
32
|
"Islamic",
|
|
39
33
|
"Muslim"
|
|
40
34
|
],
|
|
41
|
-
"author": "Ragaeeb Haq",
|
|
42
35
|
"license": "MIT",
|
|
43
|
-
"
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
"
|
|
48
|
-
"
|
|
49
|
-
"@types/unzipper": "^0.10.11",
|
|
50
|
-
"dotenv-vault": "^1.27.0",
|
|
51
|
-
"eslint": "^9.34.0",
|
|
52
|
-
"eslint-config-prettier": "^10.1.8",
|
|
53
|
-
"eslint-plugin-perfectionist": "^4.15.0",
|
|
54
|
-
"eslint-plugin-prettier": "^5.5.4",
|
|
55
|
-
"globals": "^16.3.0",
|
|
56
|
-
"prettier": "^3.6.2",
|
|
57
|
-
"semantic-release": "^24.2.7",
|
|
58
|
-
"tsup": "^8.5.0",
|
|
59
|
-
"typescript-eslint": "^8.41.0"
|
|
36
|
+
"main": "dist/index.js",
|
|
37
|
+
"name": "shamela",
|
|
38
|
+
"private": false,
|
|
39
|
+
"repository": {
|
|
40
|
+
"type": "git",
|
|
41
|
+
"url": "git+https://github.com/ragaeeb/shamela.git"
|
|
60
42
|
},
|
|
61
|
-
"
|
|
62
|
-
"
|
|
63
|
-
|
|
43
|
+
"scripts": {
|
|
44
|
+
"build": "tsup",
|
|
45
|
+
"e2e": "bun test e2e --env-file .env",
|
|
46
|
+
"e2e:ci": "bun test e2e",
|
|
47
|
+
"lint": "biome check .",
|
|
48
|
+
"test": "bun test src/ --coverage --coverage-reporter=lcov"
|
|
49
|
+
},
|
|
50
|
+
"sideEffects": false,
|
|
51
|
+
"source": "src/index.ts",
|
|
52
|
+
"type": "module",
|
|
53
|
+
"types": "dist/index.d.ts",
|
|
54
|
+
"version": "1.2.1"
|
|
64
55
|
}
|