shamela 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.MD +7 -0
- package/README.md +199 -0
- package/dist/index.d.ts +52 -0
- package/dist/main.js +577 -0
- package/dist/main.js.map +1 -0
- package/package.json +68 -0
package/LICENSE.MD
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
Copyright 2024 Ragaeeb Haq>
|
|
2
|
+
|
|
3
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
4
|
+
|
|
5
|
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
6
|
+
|
|
7
|
+
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
# Shamela
|
|
2
|
+
|
|
3
|
+
[](https://wakatime.com/badge/user/a0b906ce-b8e7-4463-8bce-383238df6d4b/project/faef70ab-efdb-448b-ab83-0fc66c95888e) [](https://github.com/ragaeeb/shamela/actions/workflows/e2e.yml) [](https://github.com/ragaeeb/shamela/actions/workflows/build.yml)   [](https://codecov.io/gh/ragaeeb/shamela) [](https://bundlejs.com/?q=shamela%401.0.0) 
|
|
4
|
+
|
|
5
|
+
A NodeJS library for accessing and downloading Maktabah Shamela v4 APIs. This library provides easy-to-use functions to interact with the Shamela API, download master and book databases, and retrieve book data programmatically.
|
|
6
|
+
|
|
7
|
+
## Table of Contents
|
|
8
|
+
|
|
9
|
+
- [Installation](#installation)
|
|
10
|
+
- [Environment Variables](#environment-variables)
|
|
11
|
+
- [Usage](#usage)
|
|
12
|
+
- [Getting Started](#getting-started)
|
|
13
|
+
- [API Functions](#api-functions)
|
|
14
|
+
- [getMasterMetadata](#getmastermetadata)
|
|
15
|
+
- [downloadMasterDatabase](#downloadmasterdatabase)
|
|
16
|
+
- [getBookMetadata](#getbookmetadata)
|
|
17
|
+
- [downloadBook](#downloadbook)
|
|
18
|
+
- [getBook](#getbook)
|
|
19
|
+
- [Examples](#examples)
|
|
20
|
+
- [Downloading the Master Database](#downloading-the-master-database)
|
|
21
|
+
- [Downloading a Book](#downloading-a-book)
|
|
22
|
+
- [Retrieving Book Data](#retrieving-book-data)
|
|
23
|
+
- [Testing](#testing)
|
|
24
|
+
- [License](#license)
|
|
25
|
+
|
|
26
|
+
## Installation
|
|
27
|
+
|
|
28
|
+
```bash
|
|
29
|
+
npm install shamela
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
or
|
|
33
|
+
|
|
34
|
+
```bash
|
|
35
|
+
yarn add shamela
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
or
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
pnpm install shamela
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Environment Variables
|
|
45
|
+
|
|
46
|
+
Before using the library, you need to set up some environment variables for API keys and endpoints:
|
|
47
|
+
|
|
48
|
+
`SHAMELA_API_KEY`: Your API key for accessing the Shamela API.
|
|
49
|
+
`SHAMELA_API_MASTER_PATCH_ENDPOINT`: The endpoint URL for the master database patches.
|
|
50
|
+
`SHAMELA_API_BOOKS_ENDPOINT`: The base endpoint URL for book-related API calls.
|
|
51
|
+
You can set these variables in a `.env` file at the root of your project:
|
|
52
|
+
|
|
53
|
+
```dotenv
|
|
54
|
+
SHAMELA_API_KEY=your_api_key_here
|
|
55
|
+
SHAMELA_API_MASTER_PATCH_ENDPOINT=https://shamela.ws/api/master_patch
|
|
56
|
+
SHAMELA_API_BOOKS_ENDPOINT=https://shamela.ws/api/books
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
## Usage
|
|
60
|
+
|
|
61
|
+
### Getting Started
|
|
62
|
+
|
|
63
|
+
First, import the library functions into your project:
|
|
64
|
+
|
|
65
|
+
```javascript
|
|
66
|
+
import { getMasterMetadata, downloadMasterDatabase, getBookMetadata, downloadBook, getBook } from 'shamela';
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
### API Functions
|
|
70
|
+
|
|
71
|
+
#### getMasterMetadata
|
|
72
|
+
|
|
73
|
+
Fetches metadata for the master database.
|
|
74
|
+
|
|
75
|
+
```typescript
|
|
76
|
+
getMasterMetadata(version?: number): Promise<GetMasterMetadataResponsePayload>
|
|
77
|
+
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
- version (optional): The version number of the master database you want to fetch.
|
|
81
|
+
|
|
82
|
+
Example:
|
|
83
|
+
|
|
84
|
+
```javascript
|
|
85
|
+
const masterMetadata = await getMasterMetadata();
|
|
86
|
+
```
|
|
87
|
+
|
|
88
|
+
#### downloadMasterDatabase
|
|
89
|
+
|
|
90
|
+
Downloads the master database and saves it to a specified path.
|
|
91
|
+
|
|
92
|
+
```typescript
|
|
93
|
+
downloadMasterDatabase(options: DownloadMasterOptions): Promise<string>
|
|
94
|
+
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
- options: An object containing:
|
|
98
|
+
- masterMetadata (optional): The metadata obtained from getMasterMetadata.
|
|
99
|
+
- outputFile: An object specifying the output path.
|
|
100
|
+
|
|
101
|
+
Example:
|
|
102
|
+
|
|
103
|
+
```javascript
|
|
104
|
+
await downloadMasterDatabase({
|
|
105
|
+
outputFile: { path: './master.db' },
|
|
106
|
+
});
|
|
107
|
+
```
|
|
108
|
+
|
|
109
|
+
#### getBookMetadata
|
|
110
|
+
|
|
111
|
+
Fetches metadata for a specific book.
|
|
112
|
+
|
|
113
|
+
```typescript
|
|
114
|
+
getBookMetadata(id: number, options?: GetBookMetadataOptions): Promise<GetBookMetadataResponsePayload>
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
- id: The ID of the book.
|
|
118
|
+
- options (optional): An object containing:
|
|
119
|
+
- majorVersion: The major version of the book.
|
|
120
|
+
- minorVersion: The minor version of the book.
|
|
121
|
+
|
|
122
|
+
Example:
|
|
123
|
+
|
|
124
|
+
```javascript
|
|
125
|
+
await downloadMasterDatabase({
|
|
126
|
+
outputFile: { path: './master.db' },
|
|
127
|
+
});
|
|
128
|
+
```
|
|
129
|
+
|
|
130
|
+
#### getBook
|
|
131
|
+
|
|
132
|
+
Retrieves the data of a book as a JavaScript object.
|
|
133
|
+
|
|
134
|
+
```typescript
|
|
135
|
+
getBook(id: number): Promise<BookData>
|
|
136
|
+
```
|
|
137
|
+
|
|
138
|
+
- id: The ID of the book.
|
|
139
|
+
|
|
140
|
+
Example:
|
|
141
|
+
|
|
142
|
+
```javascript
|
|
143
|
+
const bookData = await getBook(26592);
|
|
144
|
+
```
|
|
145
|
+
|
|
146
|
+
## Examples
|
|
147
|
+
|
|
148
|
+
### Downloading the Master Database
|
|
149
|
+
|
|
150
|
+
```javascript
|
|
151
|
+
import { downloadMasterDatabase } from 'shamela';
|
|
152
|
+
|
|
153
|
+
(async () => {
|
|
154
|
+
await downloadMasterDatabase({
|
|
155
|
+
outputFile: { path: './master.db' },
|
|
156
|
+
});
|
|
157
|
+
})();
|
|
158
|
+
```
|
|
159
|
+
|
|
160
|
+
### Downloading a Book
|
|
161
|
+
|
|
162
|
+
```javascript
|
|
163
|
+
import { downloadBook } from 'shamela';
|
|
164
|
+
|
|
165
|
+
(async () => {
|
|
166
|
+
await downloadBook(26592, {
|
|
167
|
+
outputFile: { path: './book.db' },
|
|
168
|
+
});
|
|
169
|
+
})();
|
|
170
|
+
```
|
|
171
|
+
|
|
172
|
+
### Retrieving Book Data
|
|
173
|
+
|
|
174
|
+
```javascript
|
|
175
|
+
import { getBook } from 'shamela';
|
|
176
|
+
|
|
177
|
+
(async () => {
|
|
178
|
+
const bookData = await getBook(26592);
|
|
179
|
+
console.log(bookData);
|
|
180
|
+
})();
|
|
181
|
+
```
|
|
182
|
+
|
|
183
|
+
## Testing
|
|
184
|
+
|
|
185
|
+
The library includes tests to help you understand how the APIs are used. To run the tests, ensure you have the necessary environment variables set, then execute:
|
|
186
|
+
|
|
187
|
+
```bash
|
|
188
|
+
npm run test
|
|
189
|
+
```
|
|
190
|
+
|
|
191
|
+
For end-to-end tests:
|
|
192
|
+
|
|
193
|
+
```bash
|
|
194
|
+
npm run e2e
|
|
195
|
+
```
|
|
196
|
+
|
|
197
|
+
## License
|
|
198
|
+
|
|
199
|
+
This project is licensed under the MIT License - see the LICENSE file for details.
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
type GetMasterMetadataResponsePayload = {
|
|
2
|
+
url: string;
|
|
3
|
+
version: number;
|
|
4
|
+
};
|
|
5
|
+
interface OutputOptions {
|
|
6
|
+
path: string;
|
|
7
|
+
}
|
|
8
|
+
type DownloadMasterOptions = {
|
|
9
|
+
masterMetadata?: GetMasterMetadataResponsePayload;
|
|
10
|
+
outputFile: OutputOptions;
|
|
11
|
+
};
|
|
12
|
+
type GetBookMetadataOptions = {
|
|
13
|
+
majorVersion: number;
|
|
14
|
+
minorVersion: number;
|
|
15
|
+
};
|
|
16
|
+
type GetBookMetadataResponsePayload = {
|
|
17
|
+
majorRelease: number;
|
|
18
|
+
majorReleaseUrl: string;
|
|
19
|
+
minorRelease?: number;
|
|
20
|
+
minorReleaseUrl?: string;
|
|
21
|
+
};
|
|
22
|
+
interface OutputBookOptions extends OutputOptions {
|
|
23
|
+
removeHeaderTags?: boolean;
|
|
24
|
+
}
|
|
25
|
+
type DownloadBookOptions = {
|
|
26
|
+
bookMetadata?: GetBookMetadataResponsePayload;
|
|
27
|
+
outputFile: OutputBookOptions;
|
|
28
|
+
};
|
|
29
|
+
type Page = {
|
|
30
|
+
content: string;
|
|
31
|
+
id: number;
|
|
32
|
+
number?: number;
|
|
33
|
+
page?: number;
|
|
34
|
+
part?: number;
|
|
35
|
+
};
|
|
36
|
+
type Title = {
|
|
37
|
+
content: string;
|
|
38
|
+
id: number;
|
|
39
|
+
page: number;
|
|
40
|
+
parent?: number;
|
|
41
|
+
};
|
|
42
|
+
type BookData = {
|
|
43
|
+
pages: Page[];
|
|
44
|
+
titles?: Title[];
|
|
45
|
+
};
|
|
46
|
+
export const getMasterMetadata: (version?: number) => Promise<GetMasterMetadataResponsePayload>;
|
|
47
|
+
export const downloadMasterDatabase: (options: DownloadMasterOptions) => Promise<string>;
|
|
48
|
+
export const getBookMetadata: (id: number, options?: GetBookMetadataOptions) => Promise<GetBookMetadataResponsePayload>;
|
|
49
|
+
export const downloadBook: (id: number, options: DownloadBookOptions) => Promise<string>;
|
|
50
|
+
export const getBook: (id: number) => Promise<BookData>;
|
|
51
|
+
|
|
52
|
+
//# sourceMappingURL=index.d.ts.map
|
package/dist/main.js
ADDED
|
@@ -0,0 +1,577 @@
|
|
|
1
|
+
import {createClient as $5oumB$createClient} from "@libsql/client";
|
|
2
|
+
import {promises as $5oumB$promises, createWriteStream as $5oumB$createWriteStream} from "fs";
|
|
3
|
+
import $5oumB$path from "path";
|
|
4
|
+
import $5oumB$process from "process";
|
|
5
|
+
import {URL as $5oumB$URL, URLSearchParams as $5oumB$URLSearchParams} from "url";
|
|
6
|
+
import $5oumB$pino from "pino";
|
|
7
|
+
import $5oumB$pinopretty from "pino-pretty";
|
|
8
|
+
import $5oumB$https from "https";
|
|
9
|
+
import $5oumB$os from "os";
|
|
10
|
+
import {pipeline as $5oumB$pipeline} from "stream/promises";
|
|
11
|
+
import $5oumB$unzipper from "unzipper";
|
|
12
|
+
import {Buffer as $5oumB$Buffer} from "buffer";
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
const $2d2b29d79cbbfeda$var$stream = (0, $5oumB$pinopretty)({
|
|
23
|
+
colorize: true
|
|
24
|
+
});
|
|
25
|
+
const $2d2b29d79cbbfeda$var$logger = (0, $5oumB$pino)({
|
|
26
|
+
base: {
|
|
27
|
+
pid: undefined,
|
|
28
|
+
hostname: undefined
|
|
29
|
+
},
|
|
30
|
+
level: (0, $5oumB$process).env.LOG_LEVEL || "info"
|
|
31
|
+
}, $2d2b29d79cbbfeda$var$stream);
|
|
32
|
+
var $2d2b29d79cbbfeda$export$2e2bcd8739ae039 = $2d2b29d79cbbfeda$var$logger;
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
const $b142353d92e15b6f$export$3274d151f0598f1 = async (client, table)=>{
|
|
36
|
+
const { rows: rows } = await client.execute(`SELECT * FROM ${table}`);
|
|
37
|
+
return rows;
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
const $e6f751831b705ed8$var$MAIN_DB_ALIAS = "main";
|
|
42
|
+
const $e6f751831b705ed8$export$33bbb3ec7652e187 = (name, fields)=>`CREATE TABLE IF NOT EXISTS ${name} (${fields.join(", ")})`;
|
|
43
|
+
const $e6f751831b705ed8$export$ee56083bb7df7ecc = (dbFile, alias)=>`ATTACH DATABASE '${dbFile}' AS ${alias}`;
|
|
44
|
+
const $e6f751831b705ed8$export$7fec5208c714b262 = (alias)=>`DETACH DATABASE ${alias}`;
|
|
45
|
+
const $e6f751831b705ed8$var$updatePageColumn = (columnName, aslAlias, patchAlias)=>`
|
|
46
|
+
(SELECT CASE
|
|
47
|
+
WHEN ${patchAlias}.page.${columnName} != '#' THEN ${patchAlias}.page.${columnName}
|
|
48
|
+
ELSE ${aslAlias}.page.${columnName}
|
|
49
|
+
END
|
|
50
|
+
FROM ${patchAlias}.page
|
|
51
|
+
WHERE ${aslAlias}.page.id = ${patchAlias}.page.id)
|
|
52
|
+
`;
|
|
53
|
+
const $e6f751831b705ed8$export$1f75c01d8a920a35 = (patchAlias, tableName, aslAlias = $e6f751831b705ed8$var$MAIN_DB_ALIAS)=>`
|
|
54
|
+
UPDATE ${aslAlias}.${tableName}
|
|
55
|
+
SET content = ${$e6f751831b705ed8$var$updatePageColumn("content", aslAlias, patchAlias)},
|
|
56
|
+
part = ${$e6f751831b705ed8$var$updatePageColumn("part", aslAlias, patchAlias)},
|
|
57
|
+
page = ${$e6f751831b705ed8$var$updatePageColumn("page", aslAlias, patchAlias)},
|
|
58
|
+
number = ${$e6f751831b705ed8$var$updatePageColumn("number", aslAlias, patchAlias)}
|
|
59
|
+
WHERE EXISTS (
|
|
60
|
+
SELECT 1
|
|
61
|
+
FROM ${patchAlias}.${tableName}
|
|
62
|
+
WHERE ${aslAlias}.${tableName}.id = ${patchAlias}.${tableName}.id
|
|
63
|
+
);
|
|
64
|
+
`;
|
|
65
|
+
const $e6f751831b705ed8$var$updateTitleColumn = (columnName, aslAlias, patchAlias)=>`
|
|
66
|
+
(SELECT CASE
|
|
67
|
+
WHEN ${patchAlias}.title.${columnName} != '#' THEN ${patchAlias}.title.${columnName}
|
|
68
|
+
ELSE ${aslAlias}.title.${columnName}
|
|
69
|
+
END
|
|
70
|
+
FROM ${patchAlias}.title
|
|
71
|
+
WHERE ${aslAlias}.title.id = ${patchAlias}.title.id)
|
|
72
|
+
`;
|
|
73
|
+
const $e6f751831b705ed8$export$a38d1618b943c74f = (patchAlias, tableName, aslAlias = $e6f751831b705ed8$var$MAIN_DB_ALIAS)=>`
|
|
74
|
+
UPDATE ${aslAlias}.${tableName}
|
|
75
|
+
SET content = ${$e6f751831b705ed8$var$updateTitleColumn("content", aslAlias, patchAlias)},
|
|
76
|
+
page = ${$e6f751831b705ed8$var$updateTitleColumn("page", aslAlias, patchAlias)},
|
|
77
|
+
parent = ${$e6f751831b705ed8$var$updateTitleColumn("parent", aslAlias, patchAlias)}
|
|
78
|
+
WHERE EXISTS (
|
|
79
|
+
SELECT 1
|
|
80
|
+
FROM ${patchAlias}.${tableName}
|
|
81
|
+
WHERE ${aslAlias}.${tableName}.id = ${patchAlias}.${tableName}.id
|
|
82
|
+
);
|
|
83
|
+
`;
|
|
84
|
+
const $e6f751831b705ed8$export$3ef07b9580a45514 = (table, fieldToValue, isDeleted = false)=>{
|
|
85
|
+
const combinedRecords = {
|
|
86
|
+
...fieldToValue,
|
|
87
|
+
is_deleted: isDeleted ? "1" : "0"
|
|
88
|
+
};
|
|
89
|
+
const sortedKeys = Object.keys(combinedRecords).sort();
|
|
90
|
+
const sortedValues = sortedKeys.map((key)=>combinedRecords[key]);
|
|
91
|
+
return `INSERT INTO ${table} (${sortedKeys.toString()}) VALUES (${sortedValues.map((val)=>{
|
|
92
|
+
if (val === null) return "NULL";
|
|
93
|
+
return typeof val === "string" ? `'${val}'` : val;
|
|
94
|
+
}).toString()})`;
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
var $167eb860ccdaab7d$export$a17a6870a08b950e;
|
|
99
|
+
(function(Tables) {
|
|
100
|
+
Tables["Authors"] = "authors";
|
|
101
|
+
Tables["Books"] = "books";
|
|
102
|
+
Tables["Categories"] = "categories";
|
|
103
|
+
Tables["Page"] = "page";
|
|
104
|
+
Tables["Title"] = "title";
|
|
105
|
+
})($167eb860ccdaab7d$export$a17a6870a08b950e || ($167eb860ccdaab7d$export$a17a6870a08b950e = {}));
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
const $2a3b237385dd2cff$var$PATCH_DB_ALIAS = "patch";
|
|
109
|
+
const $2a3b237385dd2cff$var$ASL_DB_ALIAS = "asl";
|
|
110
|
+
const $2a3b237385dd2cff$export$5d28a6b0dd65e4c4 = async (db)=>{
|
|
111
|
+
return db.batch([
|
|
112
|
+
`CREATE TABLE page (id INTEGER PRIMARY KEY, content TEXT, part INTEGER, page INTEGER, number INTEGER)`,
|
|
113
|
+
`CREATE TABLE title (id INTEGER PRIMARY KEY, content TEXT, page INTEGER, parent INTEGER)`
|
|
114
|
+
]);
|
|
115
|
+
};
|
|
116
|
+
const $2a3b237385dd2cff$export$3bd13330ac8e761e = async (db)=>{
|
|
117
|
+
const rows = await (0, $b142353d92e15b6f$export$3274d151f0598f1)(db, (0, $167eb860ccdaab7d$export$a17a6870a08b950e).Page);
|
|
118
|
+
const pages = rows.map((row)=>{
|
|
119
|
+
const { content: content, id: id, number: number, page: page, part: part } = row;
|
|
120
|
+
return {
|
|
121
|
+
content: content,
|
|
122
|
+
id: id,
|
|
123
|
+
...page && {
|
|
124
|
+
page: page
|
|
125
|
+
},
|
|
126
|
+
...number && {
|
|
127
|
+
number: number
|
|
128
|
+
},
|
|
129
|
+
...part && {
|
|
130
|
+
part: part
|
|
131
|
+
}
|
|
132
|
+
};
|
|
133
|
+
});
|
|
134
|
+
return pages;
|
|
135
|
+
};
|
|
136
|
+
const $2a3b237385dd2cff$export$8987cf231214301f = async (db)=>{
|
|
137
|
+
const rows = await (0, $b142353d92e15b6f$export$3274d151f0598f1)(db, (0, $167eb860ccdaab7d$export$a17a6870a08b950e).Title);
|
|
138
|
+
const titles = rows.map((row)=>{
|
|
139
|
+
const r = row;
|
|
140
|
+
return {
|
|
141
|
+
content: r.content,
|
|
142
|
+
id: r.id,
|
|
143
|
+
page: r.page,
|
|
144
|
+
...r.parent && {
|
|
145
|
+
number: r.parent
|
|
146
|
+
}
|
|
147
|
+
};
|
|
148
|
+
});
|
|
149
|
+
return titles;
|
|
150
|
+
};
|
|
151
|
+
const $2a3b237385dd2cff$export$7a171f172be0782e = async (db)=>{
|
|
152
|
+
const [pages, titles] = await Promise.all([
|
|
153
|
+
$2a3b237385dd2cff$export$3bd13330ac8e761e(db),
|
|
154
|
+
$2a3b237385dd2cff$export$8987cf231214301f(db)
|
|
155
|
+
]);
|
|
156
|
+
return {
|
|
157
|
+
pages: pages,
|
|
158
|
+
titles: titles
|
|
159
|
+
};
|
|
160
|
+
};
|
|
161
|
+
const $2a3b237385dd2cff$var$getPagesToCopy = (tables)=>{
|
|
162
|
+
const statements = [];
|
|
163
|
+
if (tables.find((t)=>t.name === (0, $167eb860ccdaab7d$export$a17a6870a08b950e).Page)) {
|
|
164
|
+
statements.push(`INSERT INTO main.${(0, $167eb860ccdaab7d$export$a17a6870a08b950e).Page} SELECT id,content,part,page,number FROM ${$2a3b237385dd2cff$var$ASL_DB_ALIAS}.${(0, $167eb860ccdaab7d$export$a17a6870a08b950e).Page} WHERE id IN (SELECT id FROM ${$2a3b237385dd2cff$var$PATCH_DB_ALIAS}.${(0, $167eb860ccdaab7d$export$a17a6870a08b950e).Page} WHERE is_deleted='0')`);
|
|
165
|
+
statements.push((0, $e6f751831b705ed8$export$1f75c01d8a920a35)($2a3b237385dd2cff$var$PATCH_DB_ALIAS, (0, $167eb860ccdaab7d$export$a17a6870a08b950e).Page));
|
|
166
|
+
} else statements.push(`INSERT INTO main.${(0, $167eb860ccdaab7d$export$a17a6870a08b950e).Page} SELECT id,content,part,page,number FROM ${$2a3b237385dd2cff$var$ASL_DB_ALIAS}.${(0, $167eb860ccdaab7d$export$a17a6870a08b950e).Page} WHERE is_deleted='0'`);
|
|
167
|
+
return statements;
|
|
168
|
+
};
|
|
169
|
+
const $2a3b237385dd2cff$var$getTitlesToCopy = (tables)=>{
|
|
170
|
+
const statements = [];
|
|
171
|
+
if (tables.find((t)=>t.name === (0, $167eb860ccdaab7d$export$a17a6870a08b950e).Title)) {
|
|
172
|
+
statements.push(`INSERT INTO main.${(0, $167eb860ccdaab7d$export$a17a6870a08b950e).Title} SELECT id,content,page,parent FROM ${$2a3b237385dd2cff$var$ASL_DB_ALIAS}.${(0, $167eb860ccdaab7d$export$a17a6870a08b950e).Title} WHERE id IN (SELECT id FROM ${$2a3b237385dd2cff$var$PATCH_DB_ALIAS}.${(0, $167eb860ccdaab7d$export$a17a6870a08b950e).Title} WHERE is_deleted='0')`);
|
|
173
|
+
statements.push((0, $e6f751831b705ed8$export$a38d1618b943c74f)($2a3b237385dd2cff$var$PATCH_DB_ALIAS, (0, $167eb860ccdaab7d$export$a17a6870a08b950e).Title));
|
|
174
|
+
} else statements.push(`INSERT INTO main.${(0, $167eb860ccdaab7d$export$a17a6870a08b950e).Title} SELECT id,content,page,parent FROM ${$2a3b237385dd2cff$var$ASL_DB_ALIAS}.${(0, $167eb860ccdaab7d$export$a17a6870a08b950e).Title} WHERE is_deleted='0'`);
|
|
175
|
+
return statements;
|
|
176
|
+
};
|
|
177
|
+
const $2a3b237385dd2cff$export$a8b8e03e6bbe5473 = async (db, aslDB, patchDB)=>{
|
|
178
|
+
const statements = [
|
|
179
|
+
(0, $e6f751831b705ed8$export$ee56083bb7df7ecc)(aslDB, $2a3b237385dd2cff$var$ASL_DB_ALIAS)
|
|
180
|
+
];
|
|
181
|
+
if (patchDB) await db.execute((0, $e6f751831b705ed8$export$ee56083bb7df7ecc)(patchDB, $2a3b237385dd2cff$var$PATCH_DB_ALIAS));
|
|
182
|
+
const { rows: tables } = patchDB ? await db.execute(`SELECT name FROM ${$2a3b237385dd2cff$var$PATCH_DB_ALIAS}.sqlite_master WHERE type='table'`) : {
|
|
183
|
+
rows: []
|
|
184
|
+
};
|
|
185
|
+
(0, $2d2b29d79cbbfeda$export$2e2bcd8739ae039).debug({
|
|
186
|
+
tables: tables
|
|
187
|
+
}, `Applying patches for...`);
|
|
188
|
+
statements.push(...$2a3b237385dd2cff$var$getPagesToCopy(tables));
|
|
189
|
+
statements.push(...$2a3b237385dd2cff$var$getTitlesToCopy(tables));
|
|
190
|
+
await db.batch(statements);
|
|
191
|
+
const detachStatements = [];
|
|
192
|
+
detachStatements.push((0, $e6f751831b705ed8$export$7fec5208c714b262)($2a3b237385dd2cff$var$ASL_DB_ALIAS));
|
|
193
|
+
if (patchDB) detachStatements.push((0, $e6f751831b705ed8$export$7fec5208c714b262)($2a3b237385dd2cff$var$PATCH_DB_ALIAS));
|
|
194
|
+
return db.batch(detachStatements);
|
|
195
|
+
};
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
const $14c03a3c41757845$export$5bc725975f47e62c = 0;
|
|
200
|
+
const $14c03a3c41757845$export$3deaf0b0365f781e = "99999";
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
const $e19722dabbedc0a6$export$5d28a6b0dd65e4c4 = async (db)=>{
|
|
207
|
+
return db.batch([
|
|
208
|
+
`CREATE TABLE authors (id INTEGER PRIMARY KEY, name TEXT, biography TEXT, death INTEGER)`,
|
|
209
|
+
`CREATE TABLE books (id INTEGER PRIMARY KEY, name TEXT, category INTEGER, type INTEGER, date INTEGER, author TEXT, printed INTEGER, major INTEGER, minor INTEGER, bibliography TEXT, hint TEXT, pdf_links TEXT, metadata TEXT)`,
|
|
210
|
+
`CREATE TABLE categories (id INTEGER PRIMARY KEY, name TEXT)`
|
|
211
|
+
]);
|
|
212
|
+
};
|
|
213
|
+
const $e19722dabbedc0a6$export$b3b931905baa18df = async (db)=>{
|
|
214
|
+
const rows = await (0, $b142353d92e15b6f$export$3274d151f0598f1)(db, (0, $167eb860ccdaab7d$export$a17a6870a08b950e).Authors);
|
|
215
|
+
const authors = rows.map((r)=>({
|
|
216
|
+
...r.biography && {
|
|
217
|
+
biography: r.biography
|
|
218
|
+
},
|
|
219
|
+
...r.death && {
|
|
220
|
+
death: r.death
|
|
221
|
+
},
|
|
222
|
+
id: r.id,
|
|
223
|
+
name: r.name
|
|
224
|
+
}));
|
|
225
|
+
return authors;
|
|
226
|
+
};
|
|
227
|
+
const $e19722dabbedc0a6$export$36bfd9279b3a24b7 = async (db)=>{
|
|
228
|
+
const rows = await (0, $b142353d92e15b6f$export$3274d151f0598f1)(db, (0, $167eb860ccdaab7d$export$a17a6870a08b950e).Categories);
|
|
229
|
+
const categories = rows.map((r)=>({
|
|
230
|
+
id: r.id,
|
|
231
|
+
name: r.name
|
|
232
|
+
}));
|
|
233
|
+
return categories;
|
|
234
|
+
};
|
|
235
|
+
const $e19722dabbedc0a6$var$parseAuthor = (value)=>{
|
|
236
|
+
const result = value.split(",\\s+").map((id)=>parseInt(id.trim()));
|
|
237
|
+
return result.length > 1 ? result : result[0];
|
|
238
|
+
};
|
|
239
|
+
const $e19722dabbedc0a6$var$parsePdfLinks = (value)=>{
|
|
240
|
+
const result = JSON.parse(value);
|
|
241
|
+
if (result.files) result.files = result.files.map((f)=>{
|
|
242
|
+
const [file, id] = f.split("|");
|
|
243
|
+
return {
|
|
244
|
+
...id && {
|
|
245
|
+
id: id
|
|
246
|
+
},
|
|
247
|
+
file: file
|
|
248
|
+
};
|
|
249
|
+
});
|
|
250
|
+
return result;
|
|
251
|
+
};
|
|
252
|
+
const $e19722dabbedc0a6$export$7111c27bf38a004f = async (db)=>{
|
|
253
|
+
const rows = await (0, $b142353d92e15b6f$export$3274d151f0598f1)(db, (0, $167eb860ccdaab7d$export$a17a6870a08b950e).Books);
|
|
254
|
+
const books = rows.map((row)=>{
|
|
255
|
+
const r = row;
|
|
256
|
+
return {
|
|
257
|
+
author: $e19722dabbedc0a6$var$parseAuthor(r.author),
|
|
258
|
+
bibliography: r.bibliography,
|
|
259
|
+
category: r.category,
|
|
260
|
+
id: r.id,
|
|
261
|
+
major: r.major,
|
|
262
|
+
metadata: JSON.parse(r.metadata),
|
|
263
|
+
name: r.name,
|
|
264
|
+
printed: r.printed,
|
|
265
|
+
type: r.type,
|
|
266
|
+
...r.date && r.date.toString() !== (0, $14c03a3c41757845$export$3deaf0b0365f781e) && {
|
|
267
|
+
date: r.date
|
|
268
|
+
},
|
|
269
|
+
...r.hint && {
|
|
270
|
+
hint: r.hint
|
|
271
|
+
},
|
|
272
|
+
...r.pdf_links && {
|
|
273
|
+
pdfLinks: $e19722dabbedc0a6$var$parsePdfLinks(r.pdf_links)
|
|
274
|
+
},
|
|
275
|
+
...r.minor && {
|
|
276
|
+
minorRelease: r.minor
|
|
277
|
+
}
|
|
278
|
+
};
|
|
279
|
+
});
|
|
280
|
+
return books;
|
|
281
|
+
};
|
|
282
|
+
const $e19722dabbedc0a6$export$7a171f172be0782e = async (db)=>{
|
|
283
|
+
const [authors, books, categories] = await Promise.all([
|
|
284
|
+
$e19722dabbedc0a6$export$b3b931905baa18df(db),
|
|
285
|
+
$e19722dabbedc0a6$export$7111c27bf38a004f(db),
|
|
286
|
+
$e19722dabbedc0a6$export$36bfd9279b3a24b7(db)
|
|
287
|
+
]);
|
|
288
|
+
return {
|
|
289
|
+
authors: authors,
|
|
290
|
+
books: books,
|
|
291
|
+
categories: categories
|
|
292
|
+
};
|
|
293
|
+
};
|
|
294
|
+
const $e19722dabbedc0a6$export$b3179f41dfd6e35b = async (db, sourceTables)=>{
|
|
295
|
+
const aliasToPath = sourceTables.reduce((acc, tablePath)=>{
|
|
296
|
+
const { name: name } = (0, $5oumB$path).parse(tablePath);
|
|
297
|
+
return {
|
|
298
|
+
...acc,
|
|
299
|
+
[name]: tablePath
|
|
300
|
+
};
|
|
301
|
+
}, {});
|
|
302
|
+
const attachStatements = Object.entries(aliasToPath).map(([alias, dbPath])=>(0, $e6f751831b705ed8$export$ee56083bb7df7ecc)(dbPath, alias));
|
|
303
|
+
await db.batch(attachStatements);
|
|
304
|
+
const insertStatements = [
|
|
305
|
+
`INSERT INTO ${(0, $167eb860ccdaab7d$export$a17a6870a08b950e).Authors} SELECT id,name,biography,(CASE WHEN death_number = ${(0, $14c03a3c41757845$export$3deaf0b0365f781e)} THEN NULL ELSE death_number END) AS death_number FROM author WHERE is_deleted='0'`,
|
|
306
|
+
`INSERT INTO ${(0, $167eb860ccdaab7d$export$a17a6870a08b950e).Books} SELECT id,name,category,type,(CASE WHEN date = ${(0, $14c03a3c41757845$export$3deaf0b0365f781e)} THEN NULL ELSE date END) AS date,author,printed,major_release,minor_release,bibliography,hint,pdf_links,metadata FROM book WHERE is_deleted='0'`,
|
|
307
|
+
`INSERT INTO ${(0, $167eb860ccdaab7d$export$a17a6870a08b950e).Categories} SELECT id,name FROM category WHERE is_deleted='0'`
|
|
308
|
+
];
|
|
309
|
+
await db.batch(insertStatements);
|
|
310
|
+
const detachStatements = Object.keys(aliasToPath).map((0, $e6f751831b705ed8$export$7fec5208c714b262));
|
|
311
|
+
await db.batch(detachStatements);
|
|
312
|
+
};
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
const $e8ee15c0ce3f020d$export$1c500f521ad591da = async (prefix = "shamela")=>{
|
|
323
|
+
const tempDirBase = (0, $5oumB$path).join((0, $5oumB$os).tmpdir(), prefix);
|
|
324
|
+
return (0, $5oumB$promises).mkdtemp(tempDirBase);
|
|
325
|
+
};
|
|
326
|
+
const $e8ee15c0ce3f020d$export$ffc21166d570a16 = async (path)=>!!await (0, $5oumB$promises).stat(path).catch(()=>false);
|
|
327
|
+
async function $e8ee15c0ce3f020d$export$fb61e277af91ac0(url, outputDir) {
|
|
328
|
+
const extractedFiles = [];
|
|
329
|
+
const entryPromises = [];
|
|
330
|
+
try {
|
|
331
|
+
// Make HTTPS request and get the response stream
|
|
332
|
+
const response = await new Promise((resolve, reject)=>{
|
|
333
|
+
(0, $5oumB$https).get(url, (res)=>{
|
|
334
|
+
if (res.statusCode !== 200) reject(new Error(`Failed to download ZIP file: ${res.statusCode} ${res.statusMessage}`));
|
|
335
|
+
else resolve(res);
|
|
336
|
+
}).on("error", (err)=>{
|
|
337
|
+
reject(new Error(`HTTPS request failed: ${err.message}`));
|
|
338
|
+
});
|
|
339
|
+
});
|
|
340
|
+
// Create unzip stream
|
|
341
|
+
const unzipStream = (0, $5oumB$unzipper).Parse();
|
|
342
|
+
// Handle entries in the ZIP file
|
|
343
|
+
unzipStream.on("entry", (entry)=>{
|
|
344
|
+
const entryPromise = (async ()=>{
|
|
345
|
+
const filePath = (0, $5oumB$path).join(outputDir, entry.path);
|
|
346
|
+
if (entry.type === "Directory") {
|
|
347
|
+
// Ensure the directory exists
|
|
348
|
+
await (0, $5oumB$promises).mkdir(filePath, {
|
|
349
|
+
recursive: true
|
|
350
|
+
});
|
|
351
|
+
entry.autodrain();
|
|
352
|
+
} else {
|
|
353
|
+
// Ensure the parent directory exists
|
|
354
|
+
const dir = (0, $5oumB$path).dirname(filePath);
|
|
355
|
+
await (0, $5oumB$promises).mkdir(dir, {
|
|
356
|
+
recursive: true
|
|
357
|
+
});
|
|
358
|
+
// Pipe the entry to a file
|
|
359
|
+
await (0, $5oumB$pipeline)(entry, (0, $5oumB$createWriteStream)(filePath));
|
|
360
|
+
extractedFiles.push(filePath);
|
|
361
|
+
}
|
|
362
|
+
})().catch((err)=>{
|
|
363
|
+
// Emit errors to be handled by the unzipStream error handler
|
|
364
|
+
unzipStream.emit("error", err);
|
|
365
|
+
});
|
|
366
|
+
// Collect the promises
|
|
367
|
+
entryPromises.push(entryPromise);
|
|
368
|
+
});
|
|
369
|
+
// Handle errors in the unzip stream
|
|
370
|
+
unzipStream.on("error", (err)=>{
|
|
371
|
+
throw new Error(`Error during extraction: ${err.message}`);
|
|
372
|
+
});
|
|
373
|
+
// Pipe the response into the unzip stream
|
|
374
|
+
await (0, $5oumB$pipeline)(response, unzipStream);
|
|
375
|
+
// Wait for all entry promises to complete
|
|
376
|
+
await Promise.all(entryPromises);
|
|
377
|
+
return extractedFiles;
|
|
378
|
+
} catch (error) {
|
|
379
|
+
throw new Error(`Error processing URL: ${error.message}`);
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
|
|
385
|
+
|
|
386
|
+
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
const $932b4b3755196b46$export$b5cd97dee32de81d = (endpoint, params, useAuth = true)=>{
|
|
390
|
+
const url = new (0, $5oumB$URL)(endpoint);
|
|
391
|
+
{
|
|
392
|
+
const params = new (0, $5oumB$URLSearchParams)();
|
|
393
|
+
Object.entries(params).forEach(([key, value])=>{
|
|
394
|
+
params.append(key, value.toString());
|
|
395
|
+
});
|
|
396
|
+
if (useAuth) params.append("api_key", (0, $5oumB$process).env.SHAMELA_API_KEY);
|
|
397
|
+
url.search = params.toString();
|
|
398
|
+
}
|
|
399
|
+
return url;
|
|
400
|
+
};
|
|
401
|
+
const $932b4b3755196b46$export$c9e6217566c54f42 = (url)=>{
|
|
402
|
+
return new Promise((resolve, reject)=>{
|
|
403
|
+
(0, $5oumB$https).get(url, (res)=>{
|
|
404
|
+
const contentType = res.headers["content-type"] || "";
|
|
405
|
+
const dataChunks = [];
|
|
406
|
+
res.on("data", (chunk)=>{
|
|
407
|
+
dataChunks.push(chunk);
|
|
408
|
+
});
|
|
409
|
+
res.on("end", ()=>{
|
|
410
|
+
const fullData = (0, $5oumB$Buffer).concat(dataChunks);
|
|
411
|
+
if (contentType.includes("application/json")) try {
|
|
412
|
+
const json = JSON.parse(fullData.toString("utf-8"));
|
|
413
|
+
resolve(json);
|
|
414
|
+
} catch (error) {
|
|
415
|
+
reject(new Error(`Failed to parse JSON: ${error.message}`));
|
|
416
|
+
}
|
|
417
|
+
else resolve(fullData);
|
|
418
|
+
});
|
|
419
|
+
}).on("error", (error)=>{
|
|
420
|
+
reject(new Error(`Error making request: ${error.message}`));
|
|
421
|
+
});
|
|
422
|
+
});
|
|
423
|
+
};
|
|
424
|
+
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
|
|
428
|
+
const $da18f5255cf003e1$var$SOURCE_TABLES = [
|
|
429
|
+
"author.sqlite",
|
|
430
|
+
"book.sqlite",
|
|
431
|
+
"category.sqlite"
|
|
432
|
+
];
|
|
433
|
+
const $da18f5255cf003e1$export$c7660b0cda39b7c3 = (sourceTablePaths)=>{
|
|
434
|
+
const sourceTableNames = sourceTablePaths.map((tablePath)=>(0, $5oumB$path).parse(tablePath).base);
|
|
435
|
+
return $da18f5255cf003e1$var$SOURCE_TABLES.every((table)=>sourceTableNames.includes(table));
|
|
436
|
+
};
|
|
437
|
+
const $da18f5255cf003e1$export$37467b7f8cfc50b0 = ()=>{
|
|
438
|
+
if (!(0, $5oumB$process).env.SHAMELA_API_MASTER_PATCH_ENDPOINT) throw new Error("SHAMELA_API_MASTER_PATCH_ENDPOINT environment variable not set");
|
|
439
|
+
if (!(0, $5oumB$process).env.SHAMELA_API_KEY) throw new Error("SHAMELA_API_KEY environment variable not set");
|
|
440
|
+
};
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
const $96cb7a03b537cb37$export$b96de494209cdc35 = async (version = 0)=>{
|
|
444
|
+
(0, $da18f5255cf003e1$export$37467b7f8cfc50b0)();
|
|
445
|
+
const url = new (0, $5oumB$URL)((0, $5oumB$process).env.SHAMELA_API_MASTER_PATCH_ENDPOINT);
|
|
446
|
+
{
|
|
447
|
+
const params = new (0, $5oumB$URLSearchParams)();
|
|
448
|
+
params.append("api_key", (0, $5oumB$process).env.SHAMELA_API_KEY);
|
|
449
|
+
params.append("version", version.toString());
|
|
450
|
+
url.search = params.toString();
|
|
451
|
+
}
|
|
452
|
+
(0, $2d2b29d79cbbfeda$export$2e2bcd8739ae039).info(`Fetching shamela.ws master database patch link: ${url.toString()}`);
|
|
453
|
+
try {
|
|
454
|
+
const response = await (0, $932b4b3755196b46$export$c9e6217566c54f42)(url);
|
|
455
|
+
return {
|
|
456
|
+
url: response.patch_url,
|
|
457
|
+
version: response.version
|
|
458
|
+
};
|
|
459
|
+
} catch (error) {
|
|
460
|
+
throw new Error(`Error fetching master patch: ${error.message}`);
|
|
461
|
+
}
|
|
462
|
+
};
|
|
463
|
+
const $96cb7a03b537cb37$export$fd8b6353fde3f1de = async (options)=>{
|
|
464
|
+
(0, $2d2b29d79cbbfeda$export$2e2bcd8739ae039).info(`downloadMasterDatabase ${JSON.stringify(options)}`);
|
|
465
|
+
const outputDir = await (0, $e8ee15c0ce3f020d$export$1c500f521ad591da)("shamela_downloadMaster");
|
|
466
|
+
const masterResponse = options.masterMetadata || await $96cb7a03b537cb37$export$b96de494209cdc35((0, $14c03a3c41757845$export$5bc725975f47e62c));
|
|
467
|
+
(0, $2d2b29d79cbbfeda$export$2e2bcd8739ae039).info(`Downloading master database from: ${JSON.stringify(masterResponse)}`);
|
|
468
|
+
const sourceTables = await (0, $e8ee15c0ce3f020d$export$fb61e277af91ac0)(masterResponse.url, outputDir);
|
|
469
|
+
(0, $2d2b29d79cbbfeda$export$2e2bcd8739ae039).info(`sourceTables downloaded: ${sourceTables.toString()}`);
|
|
470
|
+
if (!(0, $da18f5255cf003e1$export$c7660b0cda39b7c3)(sourceTables)) {
|
|
471
|
+
(0, $2d2b29d79cbbfeda$export$2e2bcd8739ae039).error(`Some source tables were not found: ${sourceTables.toString()}`);
|
|
472
|
+
throw new Error("Expected tables not found!");
|
|
473
|
+
}
|
|
474
|
+
const dbPath = (0, $5oumB$path).join(outputDir, "master.db");
|
|
475
|
+
const client = (0, $5oumB$createClient)({
|
|
476
|
+
url: `file:${dbPath}`
|
|
477
|
+
});
|
|
478
|
+
try {
|
|
479
|
+
(0, $2d2b29d79cbbfeda$export$2e2bcd8739ae039).info(`Creating tables`);
|
|
480
|
+
await (0, $e19722dabbedc0a6$export$5d28a6b0dd65e4c4)(client);
|
|
481
|
+
(0, $2d2b29d79cbbfeda$export$2e2bcd8739ae039).info(`Copying data to master table`);
|
|
482
|
+
await (0, $e19722dabbedc0a6$export$b3179f41dfd6e35b)(client, sourceTables);
|
|
483
|
+
const { ext: extension } = (0, $5oumB$path).parse(options.outputFile.path);
|
|
484
|
+
if (extension === ".json") {
|
|
485
|
+
const result = await (0, $e19722dabbedc0a6$export$7a171f172be0782e)(client);
|
|
486
|
+
await (0, $5oumB$promises).writeFile(options.outputFile.path, JSON.stringify(result, undefined, 2), "utf8");
|
|
487
|
+
}
|
|
488
|
+
client.close();
|
|
489
|
+
if (extension === ".db" || extension === ".sqlite") await (0, $5oumB$promises).rename(dbPath, options.outputFile.path);
|
|
490
|
+
await (0, $5oumB$promises).rm(outputDir, {
|
|
491
|
+
recursive: true
|
|
492
|
+
});
|
|
493
|
+
} finally{
|
|
494
|
+
client.close();
|
|
495
|
+
}
|
|
496
|
+
return options.outputFile.path;
|
|
497
|
+
};
|
|
498
|
+
const $96cb7a03b537cb37$export$4c209aa17b4b3e57 = async (id, options)=>{
|
|
499
|
+
(0, $da18f5255cf003e1$export$37467b7f8cfc50b0)();
|
|
500
|
+
const url = new (0, $5oumB$URL)(`${(0, $5oumB$process).env.SHAMELA_API_BOOKS_ENDPOINT}/${id}`);
|
|
501
|
+
{
|
|
502
|
+
const params = new (0, $5oumB$URLSearchParams)();
|
|
503
|
+
params.append("api_key", (0, $5oumB$process).env.SHAMELA_API_KEY);
|
|
504
|
+
params.append("major_release", (options?.majorVersion || 0).toString());
|
|
505
|
+
params.append("minor_release", (options?.minorVersion || 0).toString());
|
|
506
|
+
url.search = params.toString();
|
|
507
|
+
}
|
|
508
|
+
(0, $2d2b29d79cbbfeda$export$2e2bcd8739ae039).info(`Fetching shamela.ws book link: ${url.toString()}`);
|
|
509
|
+
try {
|
|
510
|
+
const response = await (0, $932b4b3755196b46$export$c9e6217566c54f42)(url);
|
|
511
|
+
return {
|
|
512
|
+
majorRelease: response.major_release,
|
|
513
|
+
majorReleaseUrl: response.major_release_url,
|
|
514
|
+
...response.minor_release_url && {
|
|
515
|
+
minorReleaseUrl: response.minor_release_url
|
|
516
|
+
},
|
|
517
|
+
...response.minor_release_url && {
|
|
518
|
+
minorRelease: response.minor_release
|
|
519
|
+
}
|
|
520
|
+
};
|
|
521
|
+
} catch (error) {
|
|
522
|
+
throw new Error(`Error fetching master patch: ${error.message}`);
|
|
523
|
+
}
|
|
524
|
+
};
|
|
525
|
+
const $96cb7a03b537cb37$export$3560c45fd9de930d = async (id, options)=>{
|
|
526
|
+
(0, $2d2b29d79cbbfeda$export$2e2bcd8739ae039).info(`downloadBook ${id} ${JSON.stringify(options)}`);
|
|
527
|
+
const outputDir = await (0, $e8ee15c0ce3f020d$export$1c500f521ad591da)("shamela_downloadBook");
|
|
528
|
+
const bookResponse = options?.bookMetadata || await $96cb7a03b537cb37$export$4c209aa17b4b3e57(id);
|
|
529
|
+
const [[bookDatabase], [patchDatabase]] = await Promise.all([
|
|
530
|
+
(0, $e8ee15c0ce3f020d$export$fb61e277af91ac0)(bookResponse.majorReleaseUrl, outputDir),
|
|
531
|
+
...bookResponse.minorReleaseUrl ? [
|
|
532
|
+
(0, $e8ee15c0ce3f020d$export$fb61e277af91ac0)(bookResponse.minorReleaseUrl, outputDir)
|
|
533
|
+
] : []
|
|
534
|
+
]);
|
|
535
|
+
const dbPath = (0, $5oumB$path).join(outputDir, "book.db");
|
|
536
|
+
const client = (0, $5oumB$createClient)({
|
|
537
|
+
url: `file:${dbPath}`
|
|
538
|
+
});
|
|
539
|
+
try {
|
|
540
|
+
(0, $2d2b29d79cbbfeda$export$2e2bcd8739ae039).info(`Creating tables`);
|
|
541
|
+
await (0, $2a3b237385dd2cff$export$5d28a6b0dd65e4c4)(client);
|
|
542
|
+
(0, $2d2b29d79cbbfeda$export$2e2bcd8739ae039).info(`Applying patches from ${patchDatabase} to ${bookDatabase}`);
|
|
543
|
+
await (0, $2a3b237385dd2cff$export$a8b8e03e6bbe5473)(client, bookDatabase, patchDatabase);
|
|
544
|
+
const { ext: extension } = (0, $5oumB$path).parse(options.outputFile.path);
|
|
545
|
+
if (extension === ".json") {
|
|
546
|
+
const result = await (0, $2a3b237385dd2cff$export$7a171f172be0782e)(client);
|
|
547
|
+
await (0, $5oumB$promises).writeFile(options.outputFile.path, JSON.stringify(result, undefined, 2), "utf8");
|
|
548
|
+
}
|
|
549
|
+
client.close();
|
|
550
|
+
if (extension === ".db" || extension === ".sqlite") await (0, $5oumB$promises).rename(dbPath, options.outputFile.path);
|
|
551
|
+
await (0, $5oumB$promises).rm(outputDir, {
|
|
552
|
+
recursive: true
|
|
553
|
+
});
|
|
554
|
+
} finally{
|
|
555
|
+
client.close();
|
|
556
|
+
}
|
|
557
|
+
return options.outputFile.path;
|
|
558
|
+
};
|
|
559
|
+
const $96cb7a03b537cb37$export$be7c2acc48adceee = async (id)=>{
|
|
560
|
+
const outputDir = await (0, $e8ee15c0ce3f020d$export$1c500f521ad591da)("shamela_getBookData");
|
|
561
|
+
const outputPath = await $96cb7a03b537cb37$export$3560c45fd9de930d(id, {
|
|
562
|
+
outputFile: {
|
|
563
|
+
path: (0, $5oumB$path).join(outputDir, `${id}.json`)
|
|
564
|
+
}
|
|
565
|
+
});
|
|
566
|
+
const data = JSON.parse(await (0, $5oumB$promises).readFile(outputPath, "utf8"));
|
|
567
|
+
await (0, $5oumB$promises).rm(outputDir, {
|
|
568
|
+
recursive: true
|
|
569
|
+
});
|
|
570
|
+
return data;
|
|
571
|
+
};
|
|
572
|
+
|
|
573
|
+
|
|
574
|
+
|
|
575
|
+
|
|
576
|
+
export {$96cb7a03b537cb37$export$3560c45fd9de930d as downloadBook, $96cb7a03b537cb37$export$fd8b6353fde3f1de as downloadMasterDatabase, $96cb7a03b537cb37$export$be7c2acc48adceee as getBook, $96cb7a03b537cb37$export$4c209aa17b4b3e57 as getBookMetadata, $96cb7a03b537cb37$export$b96de494209cdc35 as getMasterMetadata};
|
|
577
|
+
//# sourceMappingURL=main.js.map
|
package/dist/main.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"mappings":";;;;;;;;;;;;;;;;;;;;;AGIA,MAAM,+BAAS,CAAA,GAAA,iBAAK,EAAE;IAClB,UAAU;AACd;AAEA,MAAM,+BAAiB,CAAA,GAAA,WAAG,EACtB;IACI,MAAM;QAAE,KAAK;QAAW,UAAU;IAAU;IAC5C,OAAO,CAAA,GAAA,cAAM,EAAE,GAAG,CAAC,SAAS,IAAI;AACpC,GACA;IAGJ,2CAAe;;;ACdR,MAAM,2CAAgB,OAAO,QAAgB;IAChD,MAAM,QAAE,IAAI,EAAE,GAAG,MAAM,OAAO,OAAO,CAAC,CAAC,cAAc,EAAE,MAAM,CAAC;IAC9D,OAAO;AACX;;;ACLA,MAAM,sCAAgB;AAEf,MAAM,4CAAc,CAAC,MAAc,SACtC,CAAC,2BAA2B,EAAE,KAAK,EAAE,EAAE,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC;AAExD,MAAM,4CAAW,CAAC,QAAgB,QAAkB,CAAC,iBAAiB,EAAE,OAAO,KAAK,EAAE,MAAM,CAAC;AAE7F,MAAM,4CAAW,CAAC,QAAkB,CAAC,gBAAgB,EAAE,MAAM,CAAC;AAErE,MAAM,yCAAmB,CAAC,YAAoB,UAAkB,aAA+B,CAAC;;kBAE9E,EAAE,WAAW,MAAM,EAAE,WAAW,aAAa,EAAE,WAAW,MAAM,EAAE,WAAW;kBAC7E,EAAE,SAAS,MAAM,EAAE,WAAW;;SAEvC,EAAE,WAAW;UACZ,EAAE,SAAS,WAAW,EAAE,WAAW;AAC7C,CAAC;AAEM,MAAM,4CAAsB,CAC/B,YACA,WACA,WAAmB,mCAAa,GACvB,CAAC;SACL,EAAE,SAAS,CAAC,EAAE,UAAU;gBACjB,EAAE,uCAAiB,WAAW,UAAU,YAAY;aACvD,EAAE,uCAAiB,QAAQ,UAAU,YAAY;aACjD,EAAE,uCAAiB,QAAQ,UAAU,YAAY;eAC/C,EAAE,uCAAiB,UAAU,UAAU,YAAY;;;SAGzD,EAAE,WAAW,CAAC,EAAE,UAAU;UACzB,EAAE,SAAS,CAAC,EAAE,UAAU,MAAM,EAAE,WAAW,CAAC,EAAE,UAAU;;AAElE,CAAC;AAED,MAAM,0CAAoB,CAAC,YAAoB,UAAkB,aAAuB,CAAC;;kBAEvE,EAAE,WAAW,OAAO,EAAE,WAAW,aAAa,EAAE,WAAW,OAAO,EAAE,WAAW;kBAC/E,EAAE,SAAS,OAAO,EAAE,WAAW;;SAExC,EAAE,WAAW;UACZ,EAAE,SAAS,YAAY,EAAE,WAAW;AAC9C,CAAC;AAEM,MAAM,4CAAuB,CAChC,YACA,WACA,WAAmB,mCAAa,GACvB,CAAC;SACL,EAAE,SAAS,CAAC,EAAE,UAAU;gBACjB,EAAE,wCAAkB,WAAW,UAAU,YAAY;aACxD,EAAE,wCAAkB,QAAQ,UAAU,YAAY;eAChD,EAAE,wCAAkB,UAAU,UAAU,YAAY;;;SAG1D,EAAE,WAAW,CAAC,EAAE,UAAU;UACzB,EAAE,SAAS,CAAC,EAAE,UAAU,MAAM,EAAE,WAAW,CAAC,EAAE,UAAU;;AAElE,CAAC;AAEM,MAAM,4CAAiB,CAAC,OAAe,cAAmC,YAAY,KAAK;IAC9F,MAAM,kBAAuC;QAAE,GAAG,YAAY;QAAE,YAAY,YAAY,MAAM;IAAI;IAElG,MAAM,aAAa,OAAO,IAAI,CAAC,iBAAiB,IAAI;IAEpD,MAAM,eAAe,WAAW,GAAG,CAAC,CAAC,MAAQ,eAAe,CAAC,IAAI;IAEjE,OAAO,CAAC,YAAY,EAAE,MAAM,EAAE,EAAE,WAAW,QAAQ,GAAG,UAAU,EAAE,aAC7D,GAAG,CAAC,CAAC;QACF,IAAI,QAAQ,MACR,OAAO;QAGX,OAAO,OAAO,QAAQ,WAAW,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,GAAG;IAClD,GACC,QAAQ,GAAG,CAAC,CAAC;AACtB;;;;UCjCY;;;;;;GAAA,8CAAA;;;AJnCZ,MAAM,uCAAiB;AACvB,MAAM,qCAAe;AAMd,MAAM,4CAAe,OAAO;IAC/B,OAAO,GAAG,KAAK,CAAC;QACZ,CAAC,oGAAoG,CAAC;QACtG,CAAC,uFAAuF,CAAC;KAC5F;AACL;AAEO,MAAM,4CAAc,OAAO;IAC9B,MAAM,OAAO,MAAM,CAAA,GAAA,wCAAY,EAAE,IAAI,CAAA,GAAA,yCAAK,EAAE,IAAI;IAEhD,MAAM,QAAgB,KAAK,GAAG,CAAC,CAAC;QAC5B,MAAM,WAAE,OAAO,MAAE,EAAE,UAAE,MAAM,QAAE,IAAI,QAAE,IAAI,EAAE,GAAG;QAE5C,OAAO;qBACH;gBACA;YACA,GAAI,QAAQ;sBAAE;YAAK,CAAC;YACpB,GAAI,UAAU;wBAAE;YAAO,CAAC;YACxB,GAAI,QAAQ;sBAAE;YAAK,CAAC;QACxB;IACJ;IAEA,OAAO;AACX;AAEO,MAAM,4CAAe,OAAO;IAC/B,MAAM,OAAO,MAAM,CAAA,GAAA,wCAAY,EAAE,IAAI,CAAA,GAAA,yCAAK,EAAE,KAAK;IAEjD,MAAM,SAAkB,KAAK,GAAG,CAAC,CAAC;QAC9B,MAAM,IAAI;QAEV,OAAO;YACH,SAAS,EAAE,OAAO;YAClB,IAAI,EAAE,EAAE;YACR,MAAM,EAAE,IAAI;YACZ,GAAI,EAAE,MAAM,IAAI;gBAAE,QAAQ,EAAE,MAAM;YAAC,CAAC;QACxC;IACJ;IAEA,OAAO;AACX;AAEO,MAAM,4CAAU,OAAO;IAC1B,MAAM,CAAC,OAAO,OAAO,GAAG,MAAM,QAAQ,GAAG,CAAC;QAAC,0CAAY;QAAK,0CAAa;KAAI;IAC7E,OAAO;eAAE;gBAAO;IAAO;AAC3B;AAEA,MAAM,uCAAiB,CAAC;IACpB,MAAM,aAAa,EAAE;IAErB,IAAI,OAAO,IAAI,CAAC,CAAC,IAAM,EAAE,IAAI,KAAK,CAAA,GAAA,yCAAK,EAAE,IAAI,GAAG;QAC5C,WAAW,IAAI,CACX,CAAC,iBAAiB,EAAE,CAAA,GAAA,yCAAK,EAAE,IAAI,CAAC,yCAAyC,EAAE,mCAAa,CAAC,EAAE,CAAA,GAAA,yCAAK,EAAE,IAAI,CAAC,6BAA6B,EAAE,qCAAe,CAAC,EAAE,CAAA,GAAA,yCAAK,EAAE,IAAI,CAAC,sBAAsB,CAAC;QAE/L,WAAW,IAAI,CAAC,CAAA,GAAA,yCAAkB,EAAE,sCAAgB,CAAA,GAAA,yCAAK,EAAE,IAAI;IACnE,OACI,WAAW,IAAI,CACX,CAAC,iBAAiB,EAAE,CAAA,GAAA,yCAAK,EAAE,IAAI,CAAC,yCAAyC,EAAE,mCAAa,CAAC,EAAE,CAAA,GAAA,yCAAK,EAAE,IAAI,CAAC,qBAAqB,CAAC;IAIrI,OAAO;AACX;AAEA,MAAM,wCAAkB,CAAC;IACrB,MAAM,aAAa,EAAE;IAErB,IAAI,OAAO,IAAI,CAAC,CAAC,IAAM,EAAE,IAAI,KAAK,CAAA,GAAA,yCAAK,EAAE,KAAK,GAAG;QAC7C,WAAW,IAAI,CACX,CAAC,iBAAiB,EAAE,CAAA,GAAA,yCAAK,EAAE,KAAK,CAAC,oCAAoC,EAAE,mCAAa,CAAC,EAAE,CAAA,GAAA,yCAAK,EAAE,KAAK,CAAC,6BAA6B,EAAE,qCAAe,CAAC,EAAE,CAAA,GAAA,yCAAK,EAAE,KAAK,CAAC,sBAAsB,CAAC;QAE7L,WAAW,IAAI,CAAC,CAAA,GAAA,yCAAmB,EAAE,sCAAgB,CAAA,GAAA,yCAAK,EAAE,KAAK;IACrE,OACI,WAAW,IAAI,CACX,CAAC,iBAAiB,EAAE,CAAA,GAAA,yCAAK,EAAE,KAAK,CAAC,oCAAoC,EAAE,mCAAa,CAAC,EAAE,CAAA,GAAA,yCAAK,EAAE,KAAK,CAAC,qBAAqB,CAAC;IAIlI,OAAO;AACX;AAEO,MAAM,4CAAe,OAAO,IAAY,OAAe;IAC1D,MAAM,aAAuB;QAAC,CAAA,GAAA,yCAAO,EAAE,OAAO;KAAc;IAE5D,IAAI,SACA,MAAM,GAAG,OAAO,CAAC,CAAA,GAAA,yCAAO,EAAE,SAAS;IAGvC,MAAM,EAAE,MAAM,MAAM,EAAE,GAAG,UACnB,MAAM,GAAG,OAAO,CAAC,CAAC,iBAAiB,EAAE,qCAAe,iCAAiC,CAAC,IACtF;QAAE,MAAM,EAAE;IAAC;IAEjB,CAAA,GAAA,wCAAK,EAAE,KAAK,CAAC;gBAAE;IAAO,GAAG,CAAC,uBAAuB,CAAC;IAElD,WAAW,IAAI,IAAI,qCAAe;IAClC,WAAW,IAAI,IAAI,sCAAgB;IAEnC,MAAM,GAAG,KAAK,CAAC;IAEf,MAAM,mBAAmB,EAAE;IAC3B,iBAAiB,IAAI,CAAC,CAAA,GAAA,yCAAO,EAAE;IAE/B,IAAI,SACA,iBAAiB,IAAI,CAAC,CAAA,GAAA,yCAAO,EAAE;IAGnC,OAAO,GAAG,KAAK,CAAC;AACpB;;;;AM1HO,MAAM,4CAAkC;AAExC,MAAM,4CAA4B;;;;;;ADOlC,MAAM,4CAAe,OAAO;IAC/B,OAAO,GAAG,KAAK,CAAC;QACZ,CAAC,uFAAuF,CAAC;QACzF,CAAC,6NAA6N,CAAC;QAC/N,CAAC,2DAA2D,CAAC;KAChE;AACL;AAEO,MAAM,4CAAgB,OAAO;IAChC,MAAM,OAAO,MAAM,CAAA,GAAA,wCAAY,EAAE,IAAI,CAAA,GAAA,yCAAK,EAAE,OAAO;IAEnD,MAAM,UAAoB,KAAK,GAAG,CAAC,CAAC,IAAY,CAAA;YAC5C,GAAI,EAAE,SAAS,IAAI;gBAAE,WAAW,EAAE,SAAS;YAAC,CAAC;YAC7C,GAAI,EAAE,KAAK,IAAI;gBAAE,OAAO,EAAE,KAAK;YAAC,CAAC;YACjC,IAAI,EAAE,EAAE;YACR,MAAM,EAAE,IAAI;QAChB,CAAA;IAEA,OAAO;AACX;AAEO,MAAM,4CAAmB,OAAO;IACnC,MAAM,OAAO,MAAM,CAAA,GAAA,wCAAY,EAAE,IAAI,CAAA,GAAA,yCAAK,EAAE,UAAU;IAEtD,MAAM,aAAyB,KAAK,GAAG,CAAC,CAAC,IAAY,CAAA;YACjD,IAAI,EAAE,EAAE;YACR,MAAM,EAAE,IAAI;QAChB,CAAA;IAEA,OAAO;AACX;AAEA,MAAM,oCAAc,CAAC;IACjB,MAAM,SAAmB,MAAM,KAAK,CAAC,SAAS,GAAG,CAAC,CAAC,KAAO,SAAS,GAAG,IAAI;IAC1E,OAAO,OAAO,MAAM,GAAG,IAAI,SAAS,MAAM,CAAC,EAAE;AACjD;AAEA,MAAM,sCAAgB,CAAC;IACnB,MAAM,SAAS,KAAK,KAAK,CAAC;IAE1B,IAAI,OAAO,KAAK,EACZ,OAAO,KAAK,GAAG,AAAC,OAAO,KAAK,CAAc,GAAG,CAAC,CAAC;QAC3C,MAAM,CAAC,MAAM,GAAG,GAAG,EAAE,KAAK,CAAC;QAC3B,OAAO;YAAE,GAAI,MAAM;oBAAE;YAAG,CAAC;kBAAG;QAAK;IACrC;IAGJ,OAAO;AACX;AAEO,MAAM,4CAAc,OAAO;IAC9B,MAAM,OAAO,MAAM,CAAA,GAAA,wCAAY,EAAE,IAAI,CAAA,GAAA,yCAAK,EAAE,KAAK;IAEjD,MAAM,QAAgB,KAAK,GAAG,CAAC,CAAC;QAC5B,MAAM,IAAI;QAEV,OAAO;YACH,QAAQ,kCAAY,EAAE,MAAM;YAC5B,cAAc,EAAE,YAAY;YAC5B,UAAU,EAAE,QAAQ;YACpB,IAAI,EAAE,EAAE;YACR,OAAO,EAAE,KAAK;YACd,UAAU,KAAK,KAAK,CAAC,EAAE,QAAQ;YAC/B,MAAM,EAAE,IAAI;YACZ,SAAS,EAAE,OAAO;YAClB,MAAM,EAAE,IAAI;YACZ,GAAI,EAAE,IAAI,IAAI,EAAE,IAAI,CAAC,QAAQ,OAAO,CAAA,GAAA,yCAAwB,KAAK;gBAAE,MAAM,EAAE,IAAI;YAAC,CAAC;YACjF,GAAI,EAAE,IAAI,IAAI;gBAAE,MAAM,EAAE,IAAI;YAAC,CAAC;YAC9B,GAAI,EAAE,SAAS,IAAI;gBAAE,UAAU,oCAAc,EAAE,SAAS;YAAE,CAAC;YAC3D,GAAI,EAAE,KAAK,IAAI;gBAAE,cAAc,EAAE,KAAK;YAAC,CAAC;QAC5C;IACJ;IAEA,OAAO;AACX;AAEO,MAAM,4CAAU,OAAO;IAC1B,MAAM,CAAC,SAAS,OAAO,WAAW,GAAG,MAAM,QAAQ,GAAG,CAAC;QAAC,0CAAc;QAAK,0CAAY;QAAK,0CAAiB;KAAI;IACjH,OAAO;iBAAE;eAAS;oBAAO;IAAW;AACxC;AAEO,MAAM,4CAA6B,OAAO,IAAY;IACzD,MAAM,cAAsC,aAAa,MAAM,CAAC,CAAC,KAAK;QAClE,MAAM,QAAE,IAAI,EAAE,GAAG,CAAA,GAAA,WAAG,EAAE,KAAK,CAAC;QAC5B,OAAO;YAAE,GAAG,GAAG;YAAE,CAAC,KAAK,EAAE;QAAU;IACvC,GAAG,CAAC;IAEJ,MAAM,mBAA6B,OAAO,OAAO,CAAC,aAAa,GAAG,CAAC,CAAC,CAAC,OAAO,OAAO,GAAK,CAAA,GAAA,yCAAO,EAAE,QAAQ;IACzG,MAAM,GAAG,KAAK,CAAC;IAEf,MAAM,mBAA6B;QAC/B,CAAC,YAAY,EAAE,CAAA,GAAA,yCAAK,EAAE,OAAO,CAAC,oDAAoD,EAAE,CAAA,GAAA,yCAAwB,EAAE,kFAAkF,CAAC;QACjM,CAAC,YAAY,EAAE,CAAA,GAAA,yCAAK,EAAE,KAAK,CAAC,gDAAgD,EAAE,CAAA,GAAA,yCAAwB,EAAE,gJAAgJ,CAAC;QACzP,CAAC,YAAY,EAAE,CAAA,GAAA,yCAAK,EAAE,UAAU,CAAC,kDAAkD,CAAC;KACvF;IACD,MAAM,GAAG,KAAK,CAAC;IAEf,MAAM,mBAA6B,OAAO,IAAI,CAAC,aAAa,GAAG,CAAC,CAAA,GAAA,yCAAO;IACvE,MAAM,GAAG,KAAK,CAAC;AACnB;;;;;;;;;;AEpGO,MAAM,4CAAgB,OAAO,SAAS,SAAS;IAClD,MAAM,cAAc,CAAA,GAAA,WAAG,EAAE,IAAI,CAAC,CAAA,GAAA,SAAC,EAAE,MAAM,IAAI;IAC3C,OAAO,CAAA,GAAA,eAAC,EAAE,OAAO,CAAC;AACtB;AAEO,MAAM,2CAAa,OAAO,OAAiB,CAAC,CAAE,MAAM,CAAA,GAAA,eAAC,EAAE,IAAI,CAAC,MAAM,KAAK,CAAC,IAAM;AAS9E,eAAe,yCAAa,GAAW,EAAE,SAAiB;IAC7D,MAAM,iBAA2B,EAAE;IACnC,MAAM,gBAAiC,EAAE;IAEzC,IAAI;QACA,iDAAiD;QACjD,MAAM,WAAW,MAAM,IAAI,QAAyB,CAAC,SAAS;YAC1D,CAAA,GAAA,YAAI,EACC,GAAG,CAAC,KAAK,CAAC;gBACP,IAAI,IAAI,UAAU,KAAK,KACnB,OAAO,IAAI,MAAM,CAAC,6BAA6B,EAAE,IAAI,UAAU,CAAC,CAAC,EAAE,IAAI,aAAa,CAAC,CAAC;qBAEtF,QAAQ;YAEhB,GACC,EAAE,CAAC,SAAS,CAAC;gBACV,OAAO,IAAI,MAAM,CAAC,sBAAsB,EAAE,IAAI,OAAO,CAAC,CAAC;YAC3D;QACR;QAEA,sBAAsB;QACtB,MAAM,cAAc,CAAA,GAAA,eAAO,EAAE,KAAK;QAElC,iCAAiC;QACjC,YAAY,EAAE,CAAC,SAAS,CAAC;YACrB,MAAM,eAAe,AAAC,CAAA;gBAClB,MAAM,WAAW,CAAA,GAAA,WAAG,EAAE,IAAI,CAAC,WAAW,MAAM,IAAI;gBAEhD,IAAI,MAAM,IAAI,KAAK,aAAa;oBAC5B,8BAA8B;oBAC9B,MAAM,CAAA,GAAA,eAAC,EAAE,KAAK,CAAC,UAAU;wBAAE,WAAW;oBAAK;oBAC3C,MAAM,SAAS;gBACnB,OAAO;oBACH,qCAAqC;oBACrC,MAAM,MAAM,CAAA,GAAA,WAAG,EAAE,OAAO,CAAC;oBACzB,MAAM,CAAA,GAAA,eAAC,EAAE,KAAK,CAAC,KAAK;wBAAE,WAAW;oBAAK;oBAEtC,2BAA2B;oBAC3B,MAAM,CAAA,GAAA,eAAO,EAAE,OAAO,CAAA,GAAA,wBAAgB,EAAE;oBACxC,eAAe,IAAI,CAAC;gBACxB;YACJ,CAAA,IAAK,KAAK,CAAC,CAAC;gBACR,6DAA6D;gBAC7D,YAAY,IAAI,CAAC,SAAS;YAC9B;YAEA,uBAAuB;YACvB,cAAc,IAAI,CAAC;QACvB;QAEA,oCAAoC;QACpC,YAAY,EAAE,CAAC,SAAS,CAAC;YACrB,MAAM,IAAI,MAAM,CAAC,yBAAyB,EAAE,IAAI,OAAO,CAAC,CAAC;QAC7D;QAEA,0CAA0C;QAC1C,MAAM,CAAA,GAAA,eAAO,EAAE,UAAU;QAEzB,0CAA0C;QAC1C,MAAM,QAAQ,GAAG,CAAC;QAElB,OAAO;IACX,EAAE,OAAO,OAAY;QACjB,MAAM,IAAI,MAAM,CAAC,sBAAsB,EAAE,MAAM,OAAO,CAAC,CAAC;IAC5D;AACJ;;;;;;;;ACjFO,MAAM,4CAAW,CAAC,UAAkB,QAA6B,UAAmB,IAAI;IAC3F,MAAM,MAAM,IAAI,CAAA,GAAA,UAAE,EAAE;IACpB;QACI,MAAM,SAAS,IAAI,CAAA,GAAA,sBAAc;QAEjC,OAAO,OAAO,CAAC,QAAQ,OAAO,CAAC,CAAC,CAAC,KAAK,MAAM;YACxC,OAAO,MAAM,CAAC,KAAK,MAAM,QAAQ;QACrC;QAEA,IAAI,SACA,OAAO,MAAM,CAAC,WAAW,CAAA,GAAA,cAAM,EAAE,GAAG,CAAC,eAAe;QAGxD,IAAI,MAAM,GAAG,OAAO,QAAQ;IAChC;IAEA,OAAO;AACX;AAEO,MAAM,4CAAW,CAAC;IACrB,OAAO,IAAI,QAAQ,CAAC,SAAS;QACzB,CAAA,GAAA,YAAI,EACC,GAAG,CAAC,KAAK,CAAC;YACP,MAAM,cAAc,IAAI,OAAO,CAAC,eAAe,IAAI;YACnD,MAAM,aAAuB,EAAE;YAE/B,IAAI,EAAE,CAAC,QAAQ,CAAC;gBACZ,WAAW,IAAI,CAAC;YACpB;YAEA,IAAI,EAAE,CAAC,OAAO;gBACV,MAAM,WAAW,CAAA,GAAA,aAAK,EAAE,MAAM,CAAC;gBAE/B,IAAI,YAAY,QAAQ,CAAC,qBACrB,IAAI;oBACA,MAAM,OAAO,KAAK,KAAK,CAAC,SAAS,QAAQ,CAAC;oBAC1C,QAAQ;gBACZ,EAAE,OAAO,OAAY;oBACjB,OAAO,IAAI,MAAM,CAAC,sBAAsB,EAAE,MAAM,OAAO,CAAC,CAAC;gBAC7D;qBAEA,QAAQ;YAEhB;QACJ,GACC,EAAE,CAAC,SAAS,CAAC;YACV,OAAO,IAAI,MAAM,CAAC,sBAAsB,EAAE,MAAM,OAAO,CAAC,CAAC;QAC7D;IACR;AACJ;;;;;ACpDA,MAAM,sCAAgB;IAAC;IAAiB;IAAe;CAAkB;AAElE,MAAM,4CAA6B,CAAC;IACvC,MAAM,mBAAmB,iBAAiB,GAAG,CAAC,CAAC,YAAc,CAAA,GAAA,WAAG,EAAE,KAAK,CAAC,WAAW,IAAI;IACvF,OAAO,oCAAc,KAAK,CAAC,CAAC,QAAU,iBAAiB,QAAQ,CAAC;AACpE;AAEO,MAAM,4CAAuB;IAChC,IAAI,CAAC,CAAA,GAAA,cAAM,EAAE,GAAG,CAAC,iCAAiC,EAC9C,MAAM,IAAI,MAAM;IAGpB,IAAI,CAAC,CAAA,GAAA,cAAM,EAAE,GAAG,CAAC,eAAe,EAC5B,MAAM,IAAI,MAAM;AAExB;;;AVQO,MAAM,4CAAoB,OAAO,UAAkB,CAAC;IACvD,CAAA,GAAA,yCAAmB;IAEnB,MAAM,MAAM,IAAI,CAAA,GAAA,UAAE,EAAE,CAAA,GAAA,cAAM,EAAE,GAAG,CAAC,iCAAiC;IACjE;QACI,MAAM,SAAS,IAAI,CAAA,GAAA,sBAAc;QACjC,OAAO,MAAM,CAAC,WAAW,CAAA,GAAA,cAAM,EAAE,GAAG,CAAC,eAAe;QACpD,OAAO,MAAM,CAAC,WAAW,QAAQ,QAAQ;QACzC,IAAI,MAAM,GAAG,OAAO,QAAQ;IAChC;IAEA,CAAA,GAAA,wCAAK,EAAE,IAAI,CAAC,CAAC,gDAAgD,EAAE,IAAI,QAAQ,GAAG,CAAC;IAE/E,IAAI;QACA,MAAM,WAAgC,MAAM,CAAA,GAAA,yCAAO,EAAE;QACrD,OAAO;YAAE,KAAK,SAAS,SAAS;YAAE,SAAS,SAAS,OAAO;QAAC;IAChE,EAAE,OAAO,OAAY;QACjB,MAAM,IAAI,MAAM,CAAC,6BAA6B,EAAE,MAAM,OAAO,CAAC,CAAC;IACnE;AACJ;AAEO,MAAM,4CAAyB,OAAO;IACzC,CAAA,GAAA,wCAAK,EAAE,IAAI,CAAC,CAAC,uBAAuB,EAAE,KAAK,SAAS,CAAC,SAAS,CAAC;IAE/D,MAAM,YAAY,MAAM,CAAA,GAAA,yCAAY,EAAE;IAEtC,MAAM,iBACF,QAAQ,cAAc,IAAK,MAAM,0CAAkB,CAAA,GAAA,yCAA8B;IAErF,CAAA,GAAA,wCAAK,EAAE,IAAI,CAAC,CAAC,kCAAkC,EAAE,KAAK,SAAS,CAAC,gBAAgB,CAAC;IACjF,MAAM,eAAyB,MAAM,CAAA,GAAA,wCAAW,EAAE,eAAe,GAAG,EAAE;IAEtE,CAAA,GAAA,wCAAK,EAAE,IAAI,CAAC,CAAC,yBAAyB,EAAE,aAAa,QAAQ,GAAG,CAAC;IAEjE,IAAI,CAAC,CAAA,GAAA,yCAAyB,EAAE,eAAe;QAC3C,CAAA,GAAA,wCAAK,EAAE,KAAK,CAAC,CAAC,mCAAmC,EAAE,aAAa,QAAQ,GAAG,CAAC;QAC5E,MAAM,IAAI,MAAM;IACpB;IAEA,MAAM,SAAS,CAAA,GAAA,WAAG,EAAE,IAAI,CAAC,WAAW;IAEpC,MAAM,SAAiB,CAAA,GAAA,mBAAW,EAAE;QAChC,KAAK,CAAC,KAAK,EAAE,OAAO,CAAC;IACzB;IAEA,IAAI;QACA,CAAA,GAAA,wCAAK,EAAE,IAAI,CAAC,CAAC,eAAe,CAAC;QAC7B,MAAM,CAAA,GAAA,yCAAiB,EAAE;QAEzB,CAAA,GAAA,wCAAK,EAAE,IAAI,CAAC,CAAC,4BAA4B,CAAC;QAC1C,MAAM,CAAA,GAAA,yCAAyB,EAAE,QAAQ;QAEzC,MAAM,EAAE,KAAK,SAAS,EAAE,GAAG,CAAA,GAAA,WAAG,EAAE,KAAK,CAAC,QAAQ,UAAU,CAAC,IAAI;QAE7D,IAAI,cAAc,SAAS;YACvB,MAAM,SAAS,MAAM,CAAA,GAAA,yCAAY,EAAE;YACnC,MAAM,CAAA,GAAA,eAAC,EAAE,SAAS,CAAC,QAAQ,UAAU,CAAC,IAAI,EAAE,KAAK,SAAS,CAAC,QAAQ,WAAW,IAAI;QACtF;QAEA,OAAO,KAAK;QAEZ,IAAI,cAAc,SAAS,cAAc,WACrC,MAAM,CAAA,GAAA,eAAC,EAAE,MAAM,CAAC,QAAQ,QAAQ,UAAU,CAAC,IAAI;QAGnD,MAAM,CAAA,GAAA,eAAC,EAAE,EAAE,CAAC,WAAW;YAAE,WAAW;QAAK;IAC7C,SAAU;QACN,OAAO,KAAK;IAChB;IAEA,OAAO,QAAQ,UAAU,CAAC,IAAI;AAClC;AAEO,MAAM,4CAAkB,OAC3B,IACA;IAEA,CAAA,GAAA,yCAAmB;IAEnB,MAAM,MAAM,IAAI,CAAA,GAAA,UAAE,EAAE,CAAC,EAAE,CAAA,GAAA,cAAM,EAAE,GAAG,CAAC,0BAA0B,CAAC,CAAC,EAAE,GAAG,CAAC;IACrE;QACI,MAAM,SAAS,IAAI,CAAA,GAAA,sBAAc;QACjC,OAAO,MAAM,CAAC,WAAW,CAAA,GAAA,cAAM,EAAE,GAAG,CAAC,eAAe;QACpD,OAAO,MAAM,CAAC,iBAAiB,AAAC,CAAA,SAAS,gBAAgB,CAAA,EAAG,QAAQ;QACpE,OAAO,MAAM,CAAC,iBAAiB,AAAC,CAAA,SAAS,gBAAgB,CAAA,EAAG,QAAQ;QACpE,IAAI,MAAM,GAAG,OAAO,QAAQ;IAChC;IAEA,CAAA,GAAA,wCAAK,EAAE,IAAI,CAAC,CAAC,+BAA+B,EAAE,IAAI,QAAQ,GAAG,CAAC;IAE9D,IAAI;QACA,MAAM,WAAgC,MAAM,CAAA,GAAA,yCAAO,EAAE;QACrD,OAAO;YACH,cAAc,SAAS,aAAa;YACpC,iBAAiB,SAAS,iBAAiB;YAC3C,GAAI,SAAS,iBAAiB,IAAI;gBAAE,iBAAiB,SAAS,iBAAiB;YAAC,CAAC;YACjF,GAAI,SAAS,iBAAiB,IAAI;gBAAE,cAAc,SAAS,aAAa;YAAC,CAAC;QAC9E;IACJ,EAAE,OAAO,OAAY;QACjB,MAAM,IAAI,MAAM,CAAC,6BAA6B,EAAE,MAAM,OAAO,CAAC,CAAC;IACnE;AACJ;AAEO,MAAM,4CAAe,OAAO,IAAY;IAC3C,CAAA,GAAA,wCAAK,EAAE,IAAI,CAAC,CAAC,aAAa,EAAE,GAAG,CAAC,EAAE,KAAK,SAAS,CAAC,SAAS,CAAC;IAE3D,MAAM,YAAY,MAAM,CAAA,GAAA,yCAAY,EAAE;IAEtC,MAAM,eAA+C,SAAS,gBAAiB,MAAM,0CAAgB;IACrG,MAAM,CAAC,CAAC,aAAa,EAAE,CAAC,cAAc,CAAC,GAAe,MAAM,QAAQ,GAAG,CAAC;QACpE,CAAA,GAAA,wCAAW,EAAE,aAAa,eAAe,EAAE;WACvC,aAAa,eAAe,GAAG;YAAC,CAAA,GAAA,wCAAW,EAAE,aAAa,eAAe,EAAE;SAAW,GAAG,EAAE;KAClG;IACD,MAAM,SAAS,CAAA,GAAA,WAAG,EAAE,IAAI,CAAC,WAAW;IAEpC,MAAM,SAAiB,CAAA,GAAA,mBAAW,EAAE;QAChC,KAAK,CAAC,KAAK,EAAE,OAAO,CAAC;IACzB;IAEA,IAAI;QACA,CAAA,GAAA,wCAAK,EAAE,IAAI,CAAC,CAAC,eAAe,CAAC;QAC7B,MAAM,CAAA,GAAA,yCAAe,EAAE;QAEvB,CAAA,GAAA,wCAAK,EAAE,IAAI,CAAC,CAAC,sBAAsB,EAAE,cAAc,IAAI,EAAE,aAAa,CAAC;QACvE,MAAM,CAAA,GAAA,yCAAW,EAAE,QAAQ,cAAc;QAEzC,MAAM,EAAE,KAAK,SAAS,EAAE,GAAG,CAAA,GAAA,WAAG,EAAE,KAAK,CAAC,QAAQ,UAAU,CAAC,IAAI;QAE7D,IAAI,cAAc,SAAS;YACvB,MAAM,SAAS,MAAM,CAAA,GAAA,yCAAU,EAAE;YACjC,MAAM,CAAA,GAAA,eAAC,EAAE,SAAS,CAAC,QAAQ,UAAU,CAAC,IAAI,EAAE,KAAK,SAAS,CAAC,QAAQ,WAAW,IAAI;QACtF;QAEA,OAAO,KAAK;QAEZ,IAAI,cAAc,SAAS,cAAc,WACrC,MAAM,CAAA,GAAA,eAAC,EAAE,MAAM,CAAC,QAAQ,QAAQ,UAAU,CAAC,IAAI;QAGnD,MAAM,CAAA,GAAA,eAAC,EAAE,EAAE,CAAC,WAAW;YAAE,WAAW;QAAK;IAC7C,SAAU;QACN,OAAO,KAAK;IAChB;IAEA,OAAO,QAAQ,UAAU,CAAC,IAAI;AAClC;AAEO,MAAM,4CAAU,OAAO;IAC1B,MAAM,YAAY,MAAM,CAAA,GAAA,yCAAY,EAAE;IACtC,MAAM,aAAa,MAAM,0CAAa,IAAI;QAAE,YAAY;YAAE,MAAM,CAAA,GAAA,WAAG,EAAE,IAAI,CAAC,WAAW,CAAC,EAAE,GAAG,KAAK,CAAC;QAAE;IAAE;IAErG,MAAM,OAAO,KAAK,KAAK,CAAC,MAAM,CAAA,GAAA,eAAC,EAAE,QAAQ,CAAC,YAAY;IACtD,MAAM,CAAA,GAAA,eAAC,EAAE,EAAE,CAAC,WAAW;QAAE,WAAW;IAAK;IAEzC,OAAO;AACX","sources":["src/index.ts","src/api.ts","src/db/book.ts","src/utils/logger.ts","src/db/common.ts","src/db/queryBuilder.ts","src/db/types.ts","src/db/master.ts","src/utils/constants.ts","src/utils/io.ts","src/utils/network.ts","src/utils/validation.ts"],"sourcesContent":["import { downloadBook, downloadMasterDatabase, getBook, getBookMetadata, getMasterMetadata } from './api';\n\nexport { downloadBook, downloadMasterDatabase, getBook, getBookMetadata, getMasterMetadata };\n","import { Client, createClient } from '@libsql/client';\nimport { promises as fs } from 'fs';\nimport path from 'path';\nimport process from 'process';\nimport { URL, URLSearchParams } from 'url';\n\nimport { applyPatches, createTables as createBookTables, getData as getBookData } from './db/book.js';\nimport {\n copyForeignMasterTableData,\n createTables as createMasterTables,\n getData as getMasterData,\n} from './db/master.js';\nimport {\n BookData,\n DownloadBookOptions,\n DownloadMasterOptions,\n GetBookMetadataOptions,\n GetBookMetadataResponsePayload,\n GetMasterMetadataResponsePayload,\n} from './types.js';\nimport { DEFAULT_MASTER_METADATA_VERSION } from './utils/constants.js';\nimport { createTempDir, unzipFromUrl } from './utils/io.js';\nimport logger from './utils/logger.js';\nimport { httpsGet } from './utils/network.js';\nimport { validateEnvVariables, validateMasterSourceTables } from './utils/validation.js';\n\nexport const getMasterMetadata = async (version: number = 0): Promise<GetMasterMetadataResponsePayload> => {\n validateEnvVariables();\n\n const url = new URL(process.env.SHAMELA_API_MASTER_PATCH_ENDPOINT as string);\n {\n const params = new URLSearchParams();\n params.append('api_key', process.env.SHAMELA_API_KEY as string);\n params.append('version', version.toString());\n url.search = params.toString();\n }\n\n logger.info(`Fetching shamela.ws master database patch link: ${url.toString()}`);\n\n try {\n const response: Record<string, any> = await httpsGet(url);\n return { url: response.patch_url, version: response.version };\n } catch (error: any) {\n throw new Error(`Error fetching master patch: ${error.message}`);\n }\n};\n\nexport const downloadMasterDatabase = async (options: DownloadMasterOptions): Promise<string> => {\n logger.info(`downloadMasterDatabase ${JSON.stringify(options)}`);\n\n const outputDir = await createTempDir('shamela_downloadMaster');\n\n const masterResponse: GetMasterMetadataResponsePayload =\n options.masterMetadata || (await getMasterMetadata(DEFAULT_MASTER_METADATA_VERSION));\n\n logger.info(`Downloading master database from: ${JSON.stringify(masterResponse)}`);\n const sourceTables: string[] = await unzipFromUrl(masterResponse.url, outputDir);\n\n logger.info(`sourceTables downloaded: ${sourceTables.toString()}`);\n\n if (!validateMasterSourceTables(sourceTables)) {\n logger.error(`Some source tables were not found: ${sourceTables.toString()}`);\n throw new Error('Expected tables not found!');\n }\n\n const dbPath = path.join(outputDir, 'master.db');\n\n const client: Client = createClient({\n url: `file:${dbPath}`,\n });\n\n try {\n logger.info(`Creating tables`);\n await createMasterTables(client);\n\n logger.info(`Copying data to master table`);\n await copyForeignMasterTableData(client, sourceTables);\n\n const { ext: extension } = path.parse(options.outputFile.path);\n\n if (extension === '.json') {\n const result = await getMasterData(client);\n await fs.writeFile(options.outputFile.path, JSON.stringify(result, undefined, 2), 'utf8');\n }\n\n client.close();\n\n if (extension === '.db' || extension === '.sqlite') {\n await fs.rename(dbPath, options.outputFile.path);\n }\n\n await fs.rm(outputDir, { recursive: true });\n } finally {\n client.close();\n }\n\n return options.outputFile.path;\n};\n\nexport const getBookMetadata = async (\n id: number,\n options?: GetBookMetadataOptions,\n): Promise<GetBookMetadataResponsePayload> => {\n validateEnvVariables();\n\n const url = new URL(`${process.env.SHAMELA_API_BOOKS_ENDPOINT}/${id}`);\n {\n const params = new URLSearchParams();\n params.append('api_key', process.env.SHAMELA_API_KEY as string);\n params.append('major_release', (options?.majorVersion || 0).toString());\n params.append('minor_release', (options?.minorVersion || 0).toString());\n url.search = params.toString();\n }\n\n logger.info(`Fetching shamela.ws book link: ${url.toString()}`);\n\n try {\n const response: Record<string, any> = await httpsGet(url);\n return {\n majorRelease: response.major_release,\n majorReleaseUrl: response.major_release_url,\n ...(response.minor_release_url && { minorReleaseUrl: response.minor_release_url }),\n ...(response.minor_release_url && { minorRelease: response.minor_release }),\n };\n } catch (error: any) {\n throw new Error(`Error fetching master patch: ${error.message}`);\n }\n};\n\nexport const downloadBook = async (id: number, options: DownloadBookOptions): Promise<string> => {\n logger.info(`downloadBook ${id} ${JSON.stringify(options)}`);\n\n const outputDir = await createTempDir('shamela_downloadBook');\n\n const bookResponse: GetBookMetadataResponsePayload = options?.bookMetadata || (await getBookMetadata(id));\n const [[bookDatabase], [patchDatabase]]: string[][] = await Promise.all([\n unzipFromUrl(bookResponse.majorReleaseUrl, outputDir),\n ...(bookResponse.minorReleaseUrl ? [unzipFromUrl(bookResponse.minorReleaseUrl, outputDir)] : []),\n ]);\n const dbPath = path.join(outputDir, 'book.db');\n\n const client: Client = createClient({\n url: `file:${dbPath}`,\n });\n\n try {\n logger.info(`Creating tables`);\n await createBookTables(client);\n\n logger.info(`Applying patches from ${patchDatabase} to ${bookDatabase}`);\n await applyPatches(client, bookDatabase, patchDatabase);\n\n const { ext: extension } = path.parse(options.outputFile.path);\n\n if (extension === '.json') {\n const result = await getBookData(client);\n await fs.writeFile(options.outputFile.path, JSON.stringify(result, undefined, 2), 'utf8');\n }\n\n client.close();\n\n if (extension === '.db' || extension === '.sqlite') {\n await fs.rename(dbPath, options.outputFile.path);\n }\n\n await fs.rm(outputDir, { recursive: true });\n } finally {\n client.close();\n }\n\n return options.outputFile.path;\n};\n\nexport const getBook = async (id: number): Promise<BookData> => {\n const outputDir = await createTempDir('shamela_getBookData');\n const outputPath = await downloadBook(id, { outputFile: { path: path.join(outputDir, `${id}.json`) } });\n\n const data = JSON.parse(await fs.readFile(outputPath, 'utf8')) as BookData;\n await fs.rm(outputDir, { recursive: true });\n\n return data;\n};\n","import { Client } from '@libsql/client';\n\nimport { BookData, Page, Title } from '../types';\nimport logger from '../utils/logger';\nimport { selectAllRows } from './common';\nimport { attachDB, buildPagePatchQuery, buildTitlePatchQuery, detachDB } from './queryBuilder';\nimport { PageRow, Tables, TitleRow } from './types';\n\nconst PATCH_DB_ALIAS = 'patch';\nconst ASL_DB_ALIAS = 'asl';\n\ntype InternalTable = {\n name: string;\n};\n\nexport const createTables = async (db: Client) => {\n return db.batch([\n `CREATE TABLE page (id INTEGER PRIMARY KEY, content TEXT, part INTEGER, page INTEGER, number INTEGER)`,\n `CREATE TABLE title (id INTEGER PRIMARY KEY, content TEXT, page INTEGER, parent INTEGER)`,\n ]);\n};\n\nexport const getAllPages = async (db: Client): Promise<Page[]> => {\n const rows = await selectAllRows(db, Tables.Page);\n\n const pages: Page[] = rows.map((row: any) => {\n const { content, id, number, page, part } = row as PageRow;\n\n return {\n content,\n id,\n ...(page && { page }),\n ...(number && { number }),\n ...(part && { part }),\n };\n });\n\n return pages;\n};\n\nexport const getAllTitles = async (db: Client): Promise<Title[]> => {\n const rows = await selectAllRows(db, Tables.Title);\n\n const titles: Title[] = rows.map((row: any) => {\n const r = row as TitleRow;\n\n return {\n content: r.content,\n id: r.id,\n page: r.page,\n ...(r.parent && { number: r.parent }),\n };\n });\n\n return titles;\n};\n\nexport const getData = async (db: Client): Promise<BookData> => {\n const [pages, titles] = await Promise.all([getAllPages(db), getAllTitles(db)]);\n return { pages, titles };\n};\n\nconst getPagesToCopy = (tables: InternalTable[]): string[] => {\n const statements = [];\n\n if (tables.find((t) => t.name === Tables.Page)) {\n statements.push(\n `INSERT INTO main.${Tables.Page} SELECT id,content,part,page,number FROM ${ASL_DB_ALIAS}.${Tables.Page} WHERE id IN (SELECT id FROM ${PATCH_DB_ALIAS}.${Tables.Page} WHERE is_deleted='0')`,\n );\n statements.push(buildPagePatchQuery(PATCH_DB_ALIAS, Tables.Page));\n } else {\n statements.push(\n `INSERT INTO main.${Tables.Page} SELECT id,content,part,page,number FROM ${ASL_DB_ALIAS}.${Tables.Page} WHERE is_deleted='0'`,\n );\n }\n\n return statements;\n};\n\nconst getTitlesToCopy = (tables: InternalTable[]): string[] => {\n const statements = [];\n\n if (tables.find((t) => t.name === Tables.Title)) {\n statements.push(\n `INSERT INTO main.${Tables.Title} SELECT id,content,page,parent FROM ${ASL_DB_ALIAS}.${Tables.Title} WHERE id IN (SELECT id FROM ${PATCH_DB_ALIAS}.${Tables.Title} WHERE is_deleted='0')`,\n );\n statements.push(buildTitlePatchQuery(PATCH_DB_ALIAS, Tables.Title));\n } else {\n statements.push(\n `INSERT INTO main.${Tables.Title} SELECT id,content,page,parent FROM ${ASL_DB_ALIAS}.${Tables.Title} WHERE is_deleted='0'`,\n );\n }\n\n return statements;\n};\n\nexport const applyPatches = async (db: Client, aslDB: string, patchDB?: string) => {\n const statements: string[] = [attachDB(aslDB, ASL_DB_ALIAS)];\n\n if (patchDB) {\n await db.execute(attachDB(patchDB, PATCH_DB_ALIAS));\n }\n\n const { rows: tables } = patchDB\n ? await db.execute(`SELECT name FROM ${PATCH_DB_ALIAS}.sqlite_master WHERE type='table'`)\n : { rows: [] };\n\n logger.debug({ tables }, `Applying patches for...`);\n\n statements.push(...getPagesToCopy(tables as InternalTable[]));\n statements.push(...getTitlesToCopy(tables as InternalTable[]));\n\n await db.batch(statements);\n\n const detachStatements = [];\n detachStatements.push(detachDB(ASL_DB_ALIAS));\n\n if (patchDB) {\n detachStatements.push(detachDB(PATCH_DB_ALIAS));\n }\n\n return db.batch(detachStatements);\n};\n","import pino, { Logger } from 'pino';\nimport pretty, { PrettyOptions } from 'pino-pretty';\nimport process from 'process';\n\nconst stream = pretty({\n colorize: true,\n} as PrettyOptions);\n\nconst logger: Logger = pino(\n {\n base: { pid: undefined, hostname: undefined }, // This will remove pid and hostname but keep time\n level: process.env.LOG_LEVEL || 'info',\n },\n stream,\n);\n\nexport default logger;\n","import { Client, Row } from '@libsql/client';\n\nexport const selectAllRows = async (client: Client, table: string): Promise<Row[]> => {\n const { rows } = await client.execute(`SELECT * FROM ${table}`);\n return rows;\n};\n","const MAIN_DB_ALIAS = 'main';\n\nexport const createTable = (name: string, fields: string[]): string =>\n `CREATE TABLE IF NOT EXISTS ${name} (${fields.join(', ')})`;\n\nexport const attachDB = (dbFile: string, alias: string) => `ATTACH DATABASE '${dbFile}' AS ${alias}`;\n\nexport const detachDB = (alias: string) => `DETACH DATABASE ${alias}`;\n\nconst updatePageColumn = (columnName: string, aslAlias: string, patchAlias: string): string => `\n (SELECT CASE \n WHEN ${patchAlias}.page.${columnName} != '#' THEN ${patchAlias}.page.${columnName}\n ELSE ${aslAlias}.page.${columnName}\n END \n FROM ${patchAlias}.page\n WHERE ${aslAlias}.page.id = ${patchAlias}.page.id)\n`;\n\nexport const buildPagePatchQuery = (\n patchAlias: string,\n tableName: string,\n aslAlias: string = MAIN_DB_ALIAS,\n): string => `\n UPDATE ${aslAlias}.${tableName}\n SET content = ${updatePageColumn('content', aslAlias, patchAlias)},\n part = ${updatePageColumn('part', aslAlias, patchAlias)},\n page = ${updatePageColumn('page', aslAlias, patchAlias)},\n number = ${updatePageColumn('number', aslAlias, patchAlias)}\n WHERE EXISTS (\n SELECT 1\n FROM ${patchAlias}.${tableName}\n WHERE ${aslAlias}.${tableName}.id = ${patchAlias}.${tableName}.id\n );\n`;\n\nconst updateTitleColumn = (columnName: string, aslAlias: string, patchAlias: string) => `\n (SELECT CASE \n WHEN ${patchAlias}.title.${columnName} != '#' THEN ${patchAlias}.title.${columnName}\n ELSE ${aslAlias}.title.${columnName}\n END \n FROM ${patchAlias}.title\n WHERE ${aslAlias}.title.id = ${patchAlias}.title.id)\n`;\n\nexport const buildTitlePatchQuery = (\n patchAlias: string,\n tableName: string,\n aslAlias: string = MAIN_DB_ALIAS,\n): string => `\n UPDATE ${aslAlias}.${tableName}\n SET content = ${updateTitleColumn('content', aslAlias, patchAlias)},\n page = ${updateTitleColumn('page', aslAlias, patchAlias)},\n parent = ${updateTitleColumn('parent', aslAlias, patchAlias)}\n WHERE EXISTS (\n SELECT 1\n FROM ${patchAlias}.${tableName}\n WHERE ${aslAlias}.${tableName}.id = ${patchAlias}.${tableName}.id\n );\n`;\n\nexport const insertUnsafely = (table: string, fieldToValue: Record<string, any>, isDeleted = false): string => {\n const combinedRecords: Record<string, any> = { ...fieldToValue, is_deleted: isDeleted ? '1' : '0' };\n\n const sortedKeys = Object.keys(combinedRecords).sort();\n\n const sortedValues = sortedKeys.map((key) => combinedRecords[key]);\n\n return `INSERT INTO ${table} (${sortedKeys.toString()}) VALUES (${sortedValues\n .map((val) => {\n if (val === null) {\n return 'NULL';\n }\n\n return typeof val === 'string' ? `'${val}'` : val;\n })\n .toString()})`;\n};\n","export type AuthorRow = {\n biography: string;\n death: number;\n id: number;\n name: string;\n};\n\nexport type BookRow = {\n author: string;\n bibliography: string;\n category: number;\n date?: null | number;\n hint: null | string;\n id: number;\n major: number;\n metadata: string;\n minor?: number;\n name: string;\n pdf_links: null | string;\n printed: number;\n type: number;\n};\n\nexport type PageRow = {\n content: string;\n id: number;\n number: null | number;\n page: null | number;\n part: null | number;\n};\n\nexport type TitleRow = {\n content: string;\n id: number;\n page: number;\n parent: null | number;\n};\n\nexport type CategoryRow = {\n id: number;\n name: string;\n};\n\nexport enum Tables {\n Authors = 'authors',\n Books = 'books',\n Categories = 'categories',\n Page = 'page',\n Title = 'title',\n}\n","import { Client } from '@libsql/client';\nimport path from 'path';\n\nimport { Author, Book, Category, MasterData, PDFLinks } from '../types';\nimport { UNKNOWN_VALUE_PLACEHOLDER } from '../utils/constants';\nimport { selectAllRows } from './common';\nimport { attachDB, detachDB } from './queryBuilder';\nimport { BookRow, Tables } from './types';\n\nexport const createTables = async (db: Client) => {\n return db.batch([\n `CREATE TABLE authors (id INTEGER PRIMARY KEY, name TEXT, biography TEXT, death INTEGER)`,\n `CREATE TABLE books (id INTEGER PRIMARY KEY, name TEXT, category INTEGER, type INTEGER, date INTEGER, author TEXT, printed INTEGER, major INTEGER, minor INTEGER, bibliography TEXT, hint TEXT, pdf_links TEXT, metadata TEXT)`,\n `CREATE TABLE categories (id INTEGER PRIMARY KEY, name TEXT)`,\n ]);\n};\n\nexport const getAllAuthors = async (db: Client): Promise<Author[]> => {\n const rows = await selectAllRows(db, Tables.Authors);\n\n const authors: Author[] = rows.map((r: any) => ({\n ...(r.biography && { biography: r.biography }),\n ...(r.death && { death: r.death }),\n id: r.id,\n name: r.name,\n }));\n\n return authors;\n};\n\nexport const getAllCategories = async (db: Client): Promise<Category[]> => {\n const rows = await selectAllRows(db, Tables.Categories);\n\n const categories: Category[] = rows.map((r: any) => ({\n id: r.id,\n name: r.name,\n }));\n\n return categories;\n};\n\nconst parseAuthor = (value: string): number | number[] => {\n const result: number[] = value.split(',\\\\s+').map((id) => parseInt(id.trim()));\n return result.length > 1 ? result : result[0];\n};\n\nconst parsePdfLinks = (value: string): PDFLinks => {\n const result = JSON.parse(value);\n\n if (result.files) {\n result.files = (result.files as string[]).map((f: string) => {\n const [file, id] = f.split('|');\n return { ...(id && { id }), file };\n });\n }\n\n return result as PDFLinks;\n};\n\nexport const getAllBooks = async (db: Client): Promise<Book[]> => {\n const rows = await selectAllRows(db, Tables.Books);\n\n const books: Book[] = rows.map((row: any) => {\n const r = row as BookRow;\n\n return {\n author: parseAuthor(r.author),\n bibliography: r.bibliography,\n category: r.category,\n id: r.id,\n major: r.major,\n metadata: JSON.parse(r.metadata),\n name: r.name,\n printed: r.printed,\n type: r.type,\n ...(r.date && r.date.toString() !== UNKNOWN_VALUE_PLACEHOLDER && { date: r.date }),\n ...(r.hint && { hint: r.hint }),\n ...(r.pdf_links && { pdfLinks: parsePdfLinks(r.pdf_links) }),\n ...(r.minor && { minorRelease: r.minor }),\n };\n });\n\n return books;\n};\n\nexport const getData = async (db: Client): Promise<MasterData> => {\n const [authors, books, categories] = await Promise.all([getAllAuthors(db), getAllBooks(db), getAllCategories(db)]);\n return { authors, books, categories };\n};\n\nexport const copyForeignMasterTableData = async (db: Client, sourceTables: string[]) => {\n const aliasToPath: Record<string, string> = sourceTables.reduce((acc, tablePath) => {\n const { name } = path.parse(tablePath);\n return { ...acc, [name]: tablePath };\n }, {});\n\n const attachStatements: string[] = Object.entries(aliasToPath).map(([alias, dbPath]) => attachDB(dbPath, alias));\n await db.batch(attachStatements);\n\n const insertStatements: string[] = [\n `INSERT INTO ${Tables.Authors} SELECT id,name,biography,(CASE WHEN death_number = ${UNKNOWN_VALUE_PLACEHOLDER} THEN NULL ELSE death_number END) AS death_number FROM author WHERE is_deleted='0'`,\n `INSERT INTO ${Tables.Books} SELECT id,name,category,type,(CASE WHEN date = ${UNKNOWN_VALUE_PLACEHOLDER} THEN NULL ELSE date END) AS date,author,printed,major_release,minor_release,bibliography,hint,pdf_links,metadata FROM book WHERE is_deleted='0'`,\n `INSERT INTO ${Tables.Categories} SELECT id,name FROM category WHERE is_deleted='0'`,\n ];\n await db.batch(insertStatements);\n\n const detachStatements: string[] = Object.keys(aliasToPath).map(detachDB);\n await db.batch(detachStatements);\n};\n","export const DEFAULT_MASTER_METADATA_VERSION = 0;\n\nexport const UNKNOWN_VALUE_PLACEHOLDER = '99999';\n","import { createWriteStream, promises as fs } from 'fs';\nimport { IncomingMessage } from 'http';\nimport https from 'https';\nimport os from 'os';\nimport path from 'path';\nimport { pipeline } from 'stream/promises';\nimport unzipper, { Entry } from 'unzipper';\n\nexport const createTempDir = async (prefix = 'shamela') => {\n const tempDirBase = path.join(os.tmpdir(), prefix);\n return fs.mkdtemp(tempDirBase);\n};\n\nexport const fileExists = async (path: string) => !!(await fs.stat(path).catch(() => false));\n\n/**\n * Downloads and extracts a ZIP file from a given URL without loading the entire file into memory.\n *\n * @param url - The URL of the ZIP file to download and extract.\n * @param outputDir - The directory where the files should be extracted.\n * @returns A promise that resolves with the list of all extracted files.\n */\nexport async function unzipFromUrl(url: string, outputDir: string): Promise<string[]> {\n const extractedFiles: string[] = [];\n const entryPromises: Promise<void>[] = [];\n\n try {\n // Make HTTPS request and get the response stream\n const response = await new Promise<IncomingMessage>((resolve, reject) => {\n https\n .get(url, (res) => {\n if (res.statusCode !== 200) {\n reject(new Error(`Failed to download ZIP file: ${res.statusCode} ${res.statusMessage}`));\n } else {\n resolve(res);\n }\n })\n .on('error', (err) => {\n reject(new Error(`HTTPS request failed: ${err.message}`));\n });\n });\n\n // Create unzip stream\n const unzipStream = unzipper.Parse();\n\n // Handle entries in the ZIP file\n unzipStream.on('entry', (entry: Entry) => {\n const entryPromise = (async () => {\n const filePath = path.join(outputDir, entry.path);\n\n if (entry.type === 'Directory') {\n // Ensure the directory exists\n await fs.mkdir(filePath, { recursive: true });\n entry.autodrain();\n } else {\n // Ensure the parent directory exists\n const dir = path.dirname(filePath);\n await fs.mkdir(dir, { recursive: true });\n\n // Pipe the entry to a file\n await pipeline(entry, createWriteStream(filePath));\n extractedFiles.push(filePath);\n }\n })().catch((err) => {\n // Emit errors to be handled by the unzipStream error handler\n unzipStream.emit('error', err);\n });\n\n // Collect the promises\n entryPromises.push(entryPromise);\n });\n\n // Handle errors in the unzip stream\n unzipStream.on('error', (err) => {\n throw new Error(`Error during extraction: ${err.message}`);\n });\n\n // Pipe the response into the unzip stream\n await pipeline(response, unzipStream);\n\n // Wait for all entry promises to complete\n await Promise.all(entryPromises);\n\n return extractedFiles;\n } catch (error: any) {\n throw new Error(`Error processing URL: ${error.message}`);\n }\n}\n","import { Buffer } from 'buffer';\nimport { IncomingMessage } from 'http';\nimport https from 'https';\nimport process from 'process';\nimport { URL, URLSearchParams } from 'url';\n\nexport const buildUrl = (endpoint: string, params: Record<string, any>, useAuth: boolean = true): URL => {\n const url = new URL(endpoint);\n {\n const params = new URLSearchParams();\n\n Object.entries(params).forEach(([key, value]) => {\n params.append(key, value.toString());\n });\n\n if (useAuth) {\n params.append('api_key', process.env.SHAMELA_API_KEY as string);\n }\n\n url.search = params.toString();\n }\n\n return url;\n};\n\nexport const httpsGet = (url: string | URL): Promise<Buffer | Record<string, any>> => {\n return new Promise((resolve, reject) => {\n https\n .get(url, (res: IncomingMessage) => {\n const contentType = res.headers['content-type'] || '';\n const dataChunks: Buffer[] = [];\n\n res.on('data', (chunk: Buffer) => {\n dataChunks.push(chunk);\n });\n\n res.on('end', () => {\n const fullData = Buffer.concat(dataChunks);\n\n if (contentType.includes('application/json')) {\n try {\n const json = JSON.parse(fullData.toString('utf-8'));\n resolve(json);\n } catch (error: any) {\n reject(new Error(`Failed to parse JSON: ${error.message}`));\n }\n } else {\n resolve(fullData);\n }\n });\n })\n .on('error', (error) => {\n reject(new Error(`Error making request: ${error.message}`));\n });\n });\n};\n","import path from 'path';\nimport process from 'process';\n\nconst SOURCE_TABLES = ['author.sqlite', 'book.sqlite', 'category.sqlite'];\n\nexport const validateMasterSourceTables = (sourceTablePaths: string[]) => {\n const sourceTableNames = sourceTablePaths.map((tablePath) => path.parse(tablePath).base);\n return SOURCE_TABLES.every((table) => sourceTableNames.includes(table));\n};\n\nexport const validateEnvVariables = () => {\n if (!process.env.SHAMELA_API_MASTER_PATCH_ENDPOINT) {\n throw new Error('SHAMELA_API_MASTER_PATCH_ENDPOINT environment variable not set');\n }\n\n if (!process.env.SHAMELA_API_KEY) {\n throw new Error('SHAMELA_API_KEY environment variable not set');\n }\n};\n"],"names":[],"version":3,"file":"main.js.map","sourceRoot":"../"}
|
package/package.json
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "shamela",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Library to interact with the Maktabah Shamela v4 APIs",
|
|
5
|
+
"repository": {
|
|
6
|
+
"type": "git",
|
|
7
|
+
"url": "git+https://github.com/ragaeeb/shamela.git"
|
|
8
|
+
},
|
|
9
|
+
"private": false,
|
|
10
|
+
"main": "dist/main.js",
|
|
11
|
+
"types": "dist/index.d.ts",
|
|
12
|
+
"source": "src/index.ts",
|
|
13
|
+
"type": "module",
|
|
14
|
+
"engines": {
|
|
15
|
+
"node": ">=20.0.0"
|
|
16
|
+
},
|
|
17
|
+
"scripts": {
|
|
18
|
+
"build": "parcel build",
|
|
19
|
+
"test": "vitest run --coverage",
|
|
20
|
+
"e2e": "node --env-file .env $(which pnpm) exec vitest run --coverage --config vitest.e2e.config.ts",
|
|
21
|
+
"e2e:ci": "pnpm exec vitest --config vitest.e2e.config.ts --run"
|
|
22
|
+
},
|
|
23
|
+
"files": [
|
|
24
|
+
"dist/main.js",
|
|
25
|
+
"dist/main.js.map",
|
|
26
|
+
"dist/*.d.ts"
|
|
27
|
+
],
|
|
28
|
+
"keywords": [
|
|
29
|
+
"shamela",
|
|
30
|
+
"Arabic",
|
|
31
|
+
"Islamic",
|
|
32
|
+
"Muslim"
|
|
33
|
+
],
|
|
34
|
+
"author": "Ragaeeb Haq",
|
|
35
|
+
"license": "MIT",
|
|
36
|
+
"devDependencies": {
|
|
37
|
+
"@eslint/js": "^9.10.0",
|
|
38
|
+
"@parcel/packager-ts": "^2.12.0",
|
|
39
|
+
"@parcel/transformer-typescript-types": "^2.12.0",
|
|
40
|
+
"@semantic-release/changelog": "^6.0.3",
|
|
41
|
+
"@semantic-release/git": "^10.0.1",
|
|
42
|
+
"@types/eslint__js": "^8.42.3",
|
|
43
|
+
"@types/node": "^22.5.5",
|
|
44
|
+
"@types/unzipper": "^0.10.10",
|
|
45
|
+
"@vitest/coverage-v8": "^2.1.1",
|
|
46
|
+
"dotenv-vault": "^1.26.2",
|
|
47
|
+
"eslint": "^9.10.0",
|
|
48
|
+
"eslint-config-prettier": "^9.1.0",
|
|
49
|
+
"eslint-plugin-import": "^2.30.0",
|
|
50
|
+
"eslint-plugin-perfectionist": "^3.6.0",
|
|
51
|
+
"eslint-plugin-prettier": "^5.2.1",
|
|
52
|
+
"eslint-plugin-vitest": "^0.5.4",
|
|
53
|
+
"eslint-plugin-vitest-globals": "^1.5.0",
|
|
54
|
+
"parcel": "^2.12.0",
|
|
55
|
+
"prettier": "^3.3.3",
|
|
56
|
+
"semantic-release": "^24.1.1",
|
|
57
|
+
"ts-node": "^10.9.2",
|
|
58
|
+
"typescript": "^5.6.2",
|
|
59
|
+
"typescript-eslint": "^8.5.0",
|
|
60
|
+
"vitest": "^2.1.1"
|
|
61
|
+
},
|
|
62
|
+
"dependencies": {
|
|
63
|
+
"@libsql/client": "^0.11.0",
|
|
64
|
+
"pino": "^9.4.0",
|
|
65
|
+
"pino-pretty": "^11.2.2",
|
|
66
|
+
"unzipper": "^0.12.3"
|
|
67
|
+
}
|
|
68
|
+
}
|