@fs/mycroft 0.3.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/completions/mycroft.bash +11 -1
- package/completions/mycroft.fish +15 -2
- package/completions/mycroft.zsh +14 -1
- package/dist/batch-embedder-C2E6OHBQ.js +14 -0
- package/dist/batch-embedder-C2E6OHBQ.js.map +1 -0
- package/dist/batch-summarizer-CM3NO7TK.js +14 -0
- package/dist/batch-summarizer-CM3NO7TK.js.map +1 -0
- package/dist/chunk-LV52FEMB.js +169 -0
- package/dist/chunk-LV52FEMB.js.map +1 -0
- package/dist/chunk-T6X7DRBN.js +275 -0
- package/dist/chunk-T6X7DRBN.js.map +1 -0
- package/dist/chunk-VBEGUDHG.js +103 -0
- package/dist/chunk-VBEGUDHG.js.map +1 -0
- package/dist/cli.js +150 -157
- package/dist/cli.js.map +1 -1
- package/package.json +8 -2
package/completions/mycroft.bash
CHANGED
|
@@ -6,6 +6,7 @@ _mycroft() {
|
|
|
6
6
|
local top_commands="book config chat"
|
|
7
7
|
local global_flags="--help --version --data-dir"
|
|
8
8
|
local book_commands="ingest list show ask search delete"
|
|
9
|
+
local ingest_commands="status resume"
|
|
9
10
|
local config_commands="path init resolve onboard"
|
|
10
11
|
local chat_commands="start ask list show repl"
|
|
11
12
|
|
|
@@ -22,7 +23,16 @@ _mycroft() {
|
|
|
22
23
|
fi
|
|
23
24
|
case "${COMP_WORDS[2]}" in
|
|
24
25
|
ingest)
|
|
25
|
-
|
|
26
|
+
if [[ ${COMP_CWORD} -eq 3 ]]; then
|
|
27
|
+
COMPREPLY=( $(compgen -W "${ingest_commands} --manual --summary --batch" -f -- "${cur}") )
|
|
28
|
+
return 0
|
|
29
|
+
fi
|
|
30
|
+
case "${COMP_WORDS[3]}" in
|
|
31
|
+
status|resume)
|
|
32
|
+
return 0
|
|
33
|
+
;;
|
|
34
|
+
esac
|
|
35
|
+
COMPREPLY=( $(compgen -W "--manual --summary --batch" -- "${cur}") )
|
|
26
36
|
return 0
|
|
27
37
|
;;
|
|
28
38
|
ask|search)
|
package/completions/mycroft.fish
CHANGED
|
@@ -15,6 +15,16 @@ function __mycroft_using_command
|
|
|
15
15
|
return 1
|
|
16
16
|
end
|
|
17
17
|
|
|
18
|
+
function __mycroft_using_subcommand
|
|
19
|
+
set -l cmd (commandline -opc)
|
|
20
|
+
set -l sub $argv[1]
|
|
21
|
+
set -l subsub $argv[2]
|
|
22
|
+
if test (count $cmd) -ge 3; and test $cmd[2] = $sub; and test $cmd[3] = $subsub
|
|
23
|
+
return 0
|
|
24
|
+
end
|
|
25
|
+
return 1
|
|
26
|
+
end
|
|
27
|
+
|
|
18
28
|
complete -c mycroft -n '__mycroft_needs_command' -a 'book' -d 'Manage books and queries'
|
|
19
29
|
complete -c mycroft -n '__mycroft_needs_command' -a 'config' -d 'Manage configuration'
|
|
20
30
|
complete -c mycroft -n '__mycroft_needs_command' -a 'chat' -d 'Run multi-turn chat sessions'
|
|
@@ -29,8 +39,11 @@ complete -c mycroft -n '__mycroft_using_command book' -a 'ask' -d 'Ask a questio
|
|
|
29
39
|
complete -c mycroft -n '__mycroft_using_command book' -a 'search' -d 'Vector search without LLM'
|
|
30
40
|
complete -c mycroft -n '__mycroft_using_command book' -a 'delete' -d 'Remove book, EPUB, and vectors'
|
|
31
41
|
|
|
32
|
-
complete -c mycroft -n '
|
|
33
|
-
complete -c mycroft -n '
|
|
42
|
+
complete -c mycroft -n '__mycroft_using_subcommand book ingest' -a 'status' -d 'Check ingestion status'
|
|
43
|
+
complete -c mycroft -n '__mycroft_using_subcommand book ingest' -a 'resume' -d 'Resume a pending ingestion'
|
|
44
|
+
complete -c mycroft -n '__mycroft_using_subcommand book ingest' -l manual -d 'Interactive chapter selection'
|
|
45
|
+
complete -c mycroft -n '__mycroft_using_subcommand book ingest' -l summary -d 'Enable AI chapter summaries'
|
|
46
|
+
complete -c mycroft -n '__mycroft_using_subcommand book ingest' -l batch -d 'Use OpenAI Batch API'
|
|
34
47
|
complete -c mycroft -n '__mycroft_using_command book; and __fish_seen_subcommand_from ask search' -l top-k -r -d 'Number of passages to retrieve'
|
|
35
48
|
complete -c mycroft -n '__mycroft_using_command book; and __fish_seen_subcommand_from ask search' -l max-chapter -r -d 'Spoiler-free limit'
|
|
36
49
|
complete -c mycroft -n '__mycroft_using_command book; and __fish_seen_subcommand_from delete' -l force -d 'Skip confirmation'
|
package/completions/mycroft.zsh
CHANGED
|
@@ -7,6 +7,9 @@ _mycroft() {
|
|
|
7
7
|
local -a book_commands
|
|
8
8
|
book_commands=(ingest list show ask search delete)
|
|
9
9
|
|
|
10
|
+
local -a ingest_commands
|
|
11
|
+
ingest_commands=(status resume)
|
|
12
|
+
|
|
10
13
|
local -a config_commands
|
|
11
14
|
config_commands=(path init resolve onboard)
|
|
12
15
|
|
|
@@ -31,7 +34,17 @@ _mycroft() {
|
|
|
31
34
|
fi
|
|
32
35
|
case ${words[3]} in
|
|
33
36
|
ingest)
|
|
34
|
-
|
|
37
|
+
if (( CURRENT == 4 )); then
|
|
38
|
+
_describe -t commands "ingest commands" ingest_commands
|
|
39
|
+
_arguments "--manual[Interactive chapter selection]" "--summary[Enable AI chapter summaries]" "--batch[Use OpenAI Batch API]" '*:epub file:_files -g "*.epub"'
|
|
40
|
+
return
|
|
41
|
+
fi
|
|
42
|
+
case ${words[4]} in
|
|
43
|
+
status|resume)
|
|
44
|
+
return
|
|
45
|
+
;;
|
|
46
|
+
esac
|
|
47
|
+
_arguments "--manual[Interactive chapter selection]" "--summary[Enable AI chapter summaries]" "--batch[Use OpenAI Batch API]"
|
|
35
48
|
return
|
|
36
49
|
;;
|
|
37
50
|
ask|search)
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import {
|
|
2
|
+
checkBatchStatus,
|
|
3
|
+
cleanupBatchFiles,
|
|
4
|
+
downloadBatchResults,
|
|
5
|
+
submitBatchEmbeddings
|
|
6
|
+
} from "./chunk-VBEGUDHG.js";
|
|
7
|
+
import "./chunk-LV52FEMB.js";
|
|
8
|
+
export {
|
|
9
|
+
checkBatchStatus,
|
|
10
|
+
cleanupBatchFiles,
|
|
11
|
+
downloadBatchResults,
|
|
12
|
+
submitBatchEmbeddings
|
|
13
|
+
};
|
|
14
|
+
//# sourceMappingURL=batch-embedder-C2E6OHBQ.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import {
|
|
2
|
+
downloadBatchSummaryResults,
|
|
3
|
+
downloadMergeResults,
|
|
4
|
+
submitBatchSummaries,
|
|
5
|
+
submitMergePass
|
|
6
|
+
} from "./chunk-T6X7DRBN.js";
|
|
7
|
+
import "./chunk-LV52FEMB.js";
|
|
8
|
+
export {
|
|
9
|
+
downloadBatchSummaryResults,
|
|
10
|
+
downloadMergeResults,
|
|
11
|
+
submitBatchSummaries,
|
|
12
|
+
submitMergePass
|
|
13
|
+
};
|
|
14
|
+
//# sourceMappingURL=batch-summarizer-CM3NO7TK.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
// src/config.ts
|
|
2
|
+
import { mkdir, readFile } from "fs/promises";
|
|
3
|
+
import { homedir } from "os";
|
|
4
|
+
import { dirname, join, resolve } from "path";
|
|
5
|
+
var DEFAULT_CONFIG = {
|
|
6
|
+
dataDir: "~/.local/share/mycroft",
|
|
7
|
+
askEnabled: true,
|
|
8
|
+
models: {
|
|
9
|
+
embedding: "text-embedding-3-small",
|
|
10
|
+
summary: "gpt-5-nano",
|
|
11
|
+
chat: "gpt-5.1"
|
|
12
|
+
}
|
|
13
|
+
};
|
|
14
|
+
var expandHome = (input) => {
|
|
15
|
+
if (!input.startsWith("~")) return input;
|
|
16
|
+
return join(homedir(), input.slice(1));
|
|
17
|
+
};
|
|
18
|
+
var resolvePath = (input) => resolve(expandHome(input));
|
|
19
|
+
var getConfigPath = () => {
|
|
20
|
+
const override = process.env.MYCROFT_CONFIG;
|
|
21
|
+
if (override) return resolvePath(override);
|
|
22
|
+
return resolvePath("~/.config/mycroft/config.json");
|
|
23
|
+
};
|
|
24
|
+
var normalizeModels = (models) => ({
|
|
25
|
+
embedding: models?.embedding || DEFAULT_CONFIG.models.embedding,
|
|
26
|
+
summary: models?.summary || DEFAULT_CONFIG.models.summary,
|
|
27
|
+
chat: models?.chat || DEFAULT_CONFIG.models.chat
|
|
28
|
+
});
|
|
29
|
+
var overrides = {};
|
|
30
|
+
var setConfigOverrides = (next) => {
|
|
31
|
+
overrides = { ...overrides, ...next };
|
|
32
|
+
};
|
|
33
|
+
var normalizeConfig = (input) => {
|
|
34
|
+
const dataDirEnv = process.env.MYCROFT_DATA_DIR;
|
|
35
|
+
const dataDir = overrides.dataDir || dataDirEnv || input?.dataDir || DEFAULT_CONFIG.dataDir;
|
|
36
|
+
return {
|
|
37
|
+
dataDir,
|
|
38
|
+
askEnabled: input?.askEnabled ?? DEFAULT_CONFIG.askEnabled,
|
|
39
|
+
models: normalizeModels(input?.models)
|
|
40
|
+
};
|
|
41
|
+
};
|
|
42
|
+
var readConfigFile = async (path) => {
|
|
43
|
+
try {
|
|
44
|
+
const contents = await readFile(path, "utf-8");
|
|
45
|
+
return JSON.parse(contents);
|
|
46
|
+
} catch {
|
|
47
|
+
return null;
|
|
48
|
+
}
|
|
49
|
+
};
|
|
50
|
+
var loadConfig = async () => {
|
|
51
|
+
const configPath2 = getConfigPath();
|
|
52
|
+
const data = await readConfigFile(configPath2);
|
|
53
|
+
const normalized = normalizeConfig(data);
|
|
54
|
+
return {
|
|
55
|
+
...normalized,
|
|
56
|
+
dataDir: resolvePath(normalized.dataDir)
|
|
57
|
+
};
|
|
58
|
+
};
|
|
59
|
+
var ensureConfigDirs = async (configPath2) => {
|
|
60
|
+
const path = configPath2 || getConfigPath();
|
|
61
|
+
await mkdir(dirname(path), { recursive: true });
|
|
62
|
+
};
|
|
63
|
+
var configPath = () => getConfigPath();
|
|
64
|
+
|
|
65
|
+
// src/commands/io.ts
|
|
66
|
+
import chalk from "chalk";
|
|
67
|
+
var isTTY = () => Boolean(process.stdout.isTTY);
|
|
68
|
+
var isInteractive = () => Boolean(process.stdin.isTTY && process.stdout.isTTY);
|
|
69
|
+
var formatError = (text) => isTTY() ? chalk.red(text) : text;
|
|
70
|
+
var formatWarn = (text) => isTTY() ? chalk.yellow(text) : text;
|
|
71
|
+
var stdout = (message) => {
|
|
72
|
+
process.stdout.write(message.endsWith("\n") ? message : `${message}
|
|
73
|
+
`);
|
|
74
|
+
};
|
|
75
|
+
var stderr = (message) => {
|
|
76
|
+
process.stderr.write(message.endsWith("\n") ? message : `${message}
|
|
77
|
+
`);
|
|
78
|
+
};
|
|
79
|
+
var printError = (message) => {
|
|
80
|
+
stderr(formatError(`Error: ${message}`));
|
|
81
|
+
};
|
|
82
|
+
var logInfo = (message) => {
|
|
83
|
+
stderr(message);
|
|
84
|
+
};
|
|
85
|
+
var logWarn = (message) => {
|
|
86
|
+
stderr(formatWarn(message));
|
|
87
|
+
};
|
|
88
|
+
var handleSigint = (onCancel) => {
|
|
89
|
+
const handler = () => {
|
|
90
|
+
if (onCancel) onCancel();
|
|
91
|
+
stderr("\nCancelled.");
|
|
92
|
+
process.exit(130);
|
|
93
|
+
};
|
|
94
|
+
process.once("SIGINT", handler);
|
|
95
|
+
return () => process.off("SIGINT", handler);
|
|
96
|
+
};
|
|
97
|
+
|
|
98
|
+
// src/services/constants.ts
|
|
99
|
+
import { mkdir as mkdir2 } from "fs/promises";
|
|
100
|
+
var CHUNK_SIZE = 1e3;
|
|
101
|
+
var CHUNK_OVERLAP = 100;
|
|
102
|
+
var SEPARATORS = ["\n\n", "\n", ". ", " ", ""];
|
|
103
|
+
var SUMMARY_MAX_TOKENS = 3e4;
|
|
104
|
+
var SUMMARY_CONCURRENCY = 3;
|
|
105
|
+
var SUMMARY_TARGET_WORDS = 250;
|
|
106
|
+
var cachedConfig = null;
|
|
107
|
+
var getCachedConfig = async () => {
|
|
108
|
+
if (!cachedConfig) {
|
|
109
|
+
cachedConfig = await loadConfig();
|
|
110
|
+
}
|
|
111
|
+
return cachedConfig;
|
|
112
|
+
};
|
|
113
|
+
var resolvePaths = async () => {
|
|
114
|
+
const config = await getCachedConfig();
|
|
115
|
+
const dataDir = config.dataDir;
|
|
116
|
+
return {
|
|
117
|
+
dataDir,
|
|
118
|
+
booksDir: `${dataDir}/books`,
|
|
119
|
+
vectorsDir: `${dataDir}/vectors`,
|
|
120
|
+
ingestDir: `${dataDir}/ingest`,
|
|
121
|
+
dbPath: `${dataDir}/metadata.db`
|
|
122
|
+
};
|
|
123
|
+
};
|
|
124
|
+
var ensureDataDirs = async () => {
|
|
125
|
+
const paths = await resolvePaths();
|
|
126
|
+
await mkdir2(paths.dataDir, { recursive: true });
|
|
127
|
+
await mkdir2(paths.booksDir, { recursive: true });
|
|
128
|
+
await mkdir2(paths.vectorsDir, { recursive: true });
|
|
129
|
+
await mkdir2(paths.ingestDir, { recursive: true });
|
|
130
|
+
return paths;
|
|
131
|
+
};
|
|
132
|
+
var getModels = async () => {
|
|
133
|
+
const config = await getCachedConfig();
|
|
134
|
+
return config.models;
|
|
135
|
+
};
|
|
136
|
+
var isAskEnabled = async () => {
|
|
137
|
+
const config = await getCachedConfig();
|
|
138
|
+
return config.askEnabled;
|
|
139
|
+
};
|
|
140
|
+
var requireOpenAIKey = () => {
|
|
141
|
+
if (!process.env.OPENAI_API_KEY) {
|
|
142
|
+
throw new Error("OPENAI_API_KEY is not set. Export it to use embeddings and chat.");
|
|
143
|
+
}
|
|
144
|
+
};
|
|
145
|
+
|
|
146
|
+
export {
|
|
147
|
+
setConfigOverrides,
|
|
148
|
+
loadConfig,
|
|
149
|
+
ensureConfigDirs,
|
|
150
|
+
configPath,
|
|
151
|
+
isInteractive,
|
|
152
|
+
stdout,
|
|
153
|
+
printError,
|
|
154
|
+
logInfo,
|
|
155
|
+
logWarn,
|
|
156
|
+
handleSigint,
|
|
157
|
+
CHUNK_SIZE,
|
|
158
|
+
CHUNK_OVERLAP,
|
|
159
|
+
SEPARATORS,
|
|
160
|
+
SUMMARY_MAX_TOKENS,
|
|
161
|
+
SUMMARY_CONCURRENCY,
|
|
162
|
+
SUMMARY_TARGET_WORDS,
|
|
163
|
+
resolvePaths,
|
|
164
|
+
ensureDataDirs,
|
|
165
|
+
getModels,
|
|
166
|
+
isAskEnabled,
|
|
167
|
+
requireOpenAIKey
|
|
168
|
+
};
|
|
169
|
+
//# sourceMappingURL=chunk-LV52FEMB.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/config.ts","../src/commands/io.ts","../src/services/constants.ts"],"sourcesContent":["import { mkdir, readFile } from \"node:fs/promises\";\nimport { homedir } from \"node:os\";\nimport { dirname, join, resolve } from \"node:path\";\n\nexport type ConfigModels = {\n embedding: string;\n summary: string;\n chat: string;\n};\n\nexport type AppConfig = {\n dataDir: string;\n askEnabled: boolean;\n models: ConfigModels;\n};\n\nconst DEFAULT_CONFIG: AppConfig = {\n dataDir: \"~/.local/share/mycroft\",\n askEnabled: true,\n models: {\n embedding: \"text-embedding-3-small\",\n summary: \"gpt-5-nano\",\n chat: \"gpt-5.1\",\n },\n};\n\nconst expandHome = (input: string): string => {\n if (!input.startsWith(\"~\")) return input;\n return join(homedir(), input.slice(1));\n};\n\nconst resolvePath = (input: string): string => resolve(expandHome(input));\n\nconst getConfigPath = (): string => {\n const override = process.env.MYCROFT_CONFIG;\n if (override) return resolvePath(override);\n return resolvePath(\"~/.config/mycroft/config.json\");\n};\n\nconst normalizeModels = (models?: Partial<ConfigModels>): ConfigModels => ({\n embedding: models?.embedding || DEFAULT_CONFIG.models.embedding,\n summary: models?.summary || DEFAULT_CONFIG.models.summary,\n chat: models?.chat || DEFAULT_CONFIG.models.chat,\n});\n\ntype ConfigOverrides = {\n dataDir?: string;\n};\n\nlet overrides: ConfigOverrides = {};\n\nexport const setConfigOverrides = (next: ConfigOverrides) => {\n overrides = { ...overrides, ...next };\n};\n\nconst normalizeConfig = (input: Partial<AppConfig> | null): AppConfig => {\n const dataDirEnv = process.env.MYCROFT_DATA_DIR;\n const dataDir = overrides.dataDir || dataDirEnv || input?.dataDir || DEFAULT_CONFIG.dataDir;\n return {\n dataDir,\n askEnabled: input?.askEnabled ?? DEFAULT_CONFIG.askEnabled,\n models: normalizeModels(input?.models),\n };\n};\n\nconst readConfigFile = async (path: string): Promise<Partial<AppConfig> | null> => {\n try {\n const contents = await readFile(path, \"utf-8\");\n return JSON.parse(contents) as Partial<AppConfig>;\n } catch {\n return null;\n }\n};\n\nexport const loadConfig = async (): Promise<AppConfig> => {\n const configPath = getConfigPath();\n const data = await readConfigFile(configPath);\n const normalized = normalizeConfig(data);\n return {\n ...normalized,\n dataDir: resolvePath(normalized.dataDir),\n };\n};\n\nexport const ensureConfigDirs = async (configPath?: string) => {\n const path = configPath || getConfigPath();\n await mkdir(dirname(path), { recursive: true });\n};\n\nexport const configPath = () => getConfigPath();\n","import chalk from \"chalk\";\n\nconst isTTY = () => Boolean(process.stdout.isTTY);\nexport const isInteractive = () => Boolean(process.stdin.isTTY && process.stdout.isTTY);\n\nexport const formatError = (text: string) => (isTTY() ? chalk.red(text) : text);\nexport const formatWarn = (text: string) => (isTTY() ? chalk.yellow(text) : text);\n\nexport const stdout = (message: string) => {\n process.stdout.write(message.endsWith(\"\\n\") ? message : `${message}\\n`);\n};\n\nexport const stderr = (message: string) => {\n process.stderr.write(message.endsWith(\"\\n\") ? message : `${message}\\n`);\n};\n\nexport const printError = (message: string) => {\n stderr(formatError(`Error: ${message}`));\n};\n\nexport const logInfo = (message: string) => {\n stderr(message);\n};\n\nexport const logWarn = (message: string) => {\n stderr(formatWarn(message));\n};\n\nexport const handleSigint = (onCancel?: () => void) => {\n const handler = () => {\n if (onCancel) onCancel();\n stderr(\"\\nCancelled.\");\n process.exit(130);\n };\n process.once(\"SIGINT\", handler);\n return () => process.off(\"SIGINT\", handler);\n};\n","import { mkdir } from \"node:fs/promises\";\nimport { loadConfig, type AppConfig } from \"../config.js\";\nimport { logInfo, logWarn } from \"../commands/io.js\";\n\nexport const CHUNK_SIZE: number = 1000;\nexport const CHUNK_OVERLAP: number = 100;\nexport const SEPARATORS = [\"\\n\\n\", \"\\n\", \". \", \" \", \"\"] as const;\n\nexport const SUMMARY_MAX_TOKENS = 30000;\nexport const SUMMARY_CONCURRENCY = 3;\nexport const SUMMARY_TARGET_WORDS = 250;\n\nlet cachedConfig: AppConfig | null = null;\n\nconst getCachedConfig = async (): Promise<AppConfig> => {\n if (!cachedConfig) {\n cachedConfig = await loadConfig();\n }\n return cachedConfig;\n};\n\nexport type ResolvedPaths = {\n dataDir: string;\n booksDir: string;\n vectorsDir: string;\n ingestDir: string;\n dbPath: string;\n};\n\nexport const resolvePaths = async (): Promise<ResolvedPaths> => {\n const config = await getCachedConfig();\n const dataDir = config.dataDir;\n return {\n dataDir,\n booksDir: `${dataDir}/books`,\n vectorsDir: `${dataDir}/vectors`,\n ingestDir: `${dataDir}/ingest`,\n dbPath: `${dataDir}/metadata.db`,\n };\n};\n\nexport const ensureDataDirs = async () => {\n const paths = await resolvePaths();\n await mkdir(paths.dataDir, { recursive: true });\n await mkdir(paths.booksDir, { recursive: true });\n await mkdir(paths.vectorsDir, { recursive: true });\n await mkdir(paths.ingestDir, { recursive: true });\n return paths;\n};\n\nexport const getModels = async () => {\n const config = await getCachedConfig();\n return config.models;\n};\n\nexport const isAskEnabled = async () => {\n const config = await getCachedConfig();\n return config.askEnabled;\n};\n\nexport const requireOpenAIKey = () => {\n if (!process.env.OPENAI_API_KEY) {\n throw new Error(\"OPENAI_API_KEY is not set. Export it to use embeddings and chat.\");\n }\n};\n\nexport { logInfo, logWarn };\n"],"mappings":";AAAA,SAAS,OAAO,gBAAgB;AAChC,SAAS,eAAe;AACxB,SAAS,SAAS,MAAM,eAAe;AAcvC,IAAM,iBAA4B;AAAA,EAChC,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,QAAQ;AAAA,IACN,WAAW;AAAA,IACX,SAAS;AAAA,IACT,MAAM;AAAA,EACR;AACF;AAEA,IAAM,aAAa,CAAC,UAA0B;AAC5C,MAAI,CAAC,MAAM,WAAW,GAAG,EAAG,QAAO;AACnC,SAAO,KAAK,QAAQ,GAAG,MAAM,MAAM,CAAC,CAAC;AACvC;AAEA,IAAM,cAAc,CAAC,UAA0B,QAAQ,WAAW,KAAK,CAAC;AAExE,IAAM,gBAAgB,MAAc;AAClC,QAAM,WAAW,QAAQ,IAAI;AAC7B,MAAI,SAAU,QAAO,YAAY,QAAQ;AACzC,SAAO,YAAY,+BAA+B;AACpD;AAEA,IAAM,kBAAkB,CAAC,YAAkD;AAAA,EACzE,WAAW,QAAQ,aAAa,eAAe,OAAO;AAAA,EACtD,SAAS,QAAQ,WAAW,eAAe,OAAO;AAAA,EAClD,MAAM,QAAQ,QAAQ,eAAe,OAAO;AAC9C;AAMA,IAAI,YAA6B,CAAC;AAE3B,IAAM,qBAAqB,CAAC,SAA0B;AAC3D,cAAY,EAAE,GAAG,WAAW,GAAG,KAAK;AACtC;AAEA,IAAM,kBAAkB,CAAC,UAAgD;AACvE,QAAM,aAAa,QAAQ,IAAI;AAC/B,QAAM,UAAU,UAAU,WAAW,cAAc,OAAO,WAAW,eAAe;AACpF,SAAO;AAAA,IACL;AAAA,IACA,YAAY,OAAO,cAAc,eAAe;AAAA,IAChD,QAAQ,gBAAgB,OAAO,MAAM;AAAA,EACvC;AACF;AAEA,IAAM,iBAAiB,OAAO,SAAqD;AACjF,MAAI;AACF,UAAM,WAAW,MAAM,SAAS,MAAM,OAAO;AAC7C,WAAO,KAAK,MAAM,QAAQ;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEO,IAAM,aAAa,YAAgC;AACxD,QAAMA,cAAa,cAAc;AACjC,QAAM,OAAO,MAAM,eAAeA,WAAU;AAC5C,QAAM,aAAa,gBAAgB,IAAI;AACvC,SAAO;AAAA,IACL,GAAG;AAAA,IACH,SAAS,YAAY,WAAW,OAAO;AAAA,EACzC;AACF;AAEO,IAAM,mBAAmB,OAAOA,gBAAwB;AAC7D,QAAM,OAAOA,eAAc,cAAc;AACzC,QAAM,MAAM,QAAQ,IAAI,GAAG,EAAE,WAAW,KAAK,CAAC;AAChD;AAEO,IAAM,aAAa,MAAM,cAAc;;;ACzF9C,OAAO,WAAW;AAElB,IAAM,QAAQ,MAAM,QAAQ,QAAQ,OAAO,KAAK;AACzC,IAAM,gBAAgB,MAAM,QAAQ,QAAQ,MAAM,SAAS,QAAQ,OAAO,KAAK;AAE/E,IAAM,cAAc,CAAC,SAAkB,MAAM,IAAI,MAAM,IAAI,IAAI,IAAI;AACnE,IAAM,aAAa,CAAC,SAAkB,MAAM,IAAI,MAAM,OAAO,IAAI,IAAI;AAErE,IAAM,SAAS,CAAC,YAAoB;AACzC,UAAQ,OAAO,MAAM,QAAQ,SAAS,IAAI,IAAI,UAAU,GAAG,OAAO;AAAA,CAAI;AACxE;AAEO,IAAM,SAAS,CAAC,YAAoB;AACzC,UAAQ,OAAO,MAAM,QAAQ,SAAS,IAAI,IAAI,UAAU,GAAG,OAAO;AAAA,CAAI;AACxE;AAEO,IAAM,aAAa,CAAC,YAAoB;AAC7C,SAAO,YAAY,UAAU,OAAO,EAAE,CAAC;AACzC;AAEO,IAAM,UAAU,CAAC,YAAoB;AAC1C,SAAO,OAAO;AAChB;AAEO,IAAM,UAAU,CAAC,YAAoB;AAC1C,SAAO,WAAW,OAAO,CAAC;AAC5B;AAEO,IAAM,eAAe,CAAC,aAA0B;AACrD,QAAM,UAAU,MAAM;AACpB,QAAI,SAAU,UAAS;AACvB,WAAO,cAAc;AACrB,YAAQ,KAAK,GAAG;AAAA,EAClB;AACA,UAAQ,KAAK,UAAU,OAAO;AAC9B,SAAO,MAAM,QAAQ,IAAI,UAAU,OAAO;AAC5C;;;ACpCA,SAAS,SAAAC,cAAa;AAIf,IAAM,aAAqB;AAC3B,IAAM,gBAAwB;AAC9B,IAAM,aAAa,CAAC,QAAQ,MAAM,MAAM,KAAK,EAAE;AAE/C,IAAM,qBAAqB;AAC3B,IAAM,sBAAsB;AAC5B,IAAM,uBAAuB;AAEpC,IAAI,eAAiC;AAErC,IAAM,kBAAkB,YAAgC;AACtD,MAAI,CAAC,cAAc;AACjB,mBAAe,MAAM,WAAW;AAAA,EAClC;AACA,SAAO;AACT;AAUO,IAAM,eAAe,YAAoC;AAC9D,QAAM,SAAS,MAAM,gBAAgB;AACrC,QAAM,UAAU,OAAO;AACvB,SAAO;AAAA,IACL;AAAA,IACA,UAAU,GAAG,OAAO;AAAA,IACpB,YAAY,GAAG,OAAO;AAAA,IACtB,WAAW,GAAG,OAAO;AAAA,IACrB,QAAQ,GAAG,OAAO;AAAA,EACpB;AACF;AAEO,IAAM,iBAAiB,YAAY;AACxC,QAAM,QAAQ,MAAM,aAAa;AACjC,QAAMC,OAAM,MAAM,SAAS,EAAE,WAAW,KAAK,CAAC;AAC9C,QAAMA,OAAM,MAAM,UAAU,EAAE,WAAW,KAAK,CAAC;AAC/C,QAAMA,OAAM,MAAM,YAAY,EAAE,WAAW,KAAK,CAAC;AACjD,QAAMA,OAAM,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAChD,SAAO;AACT;AAEO,IAAM,YAAY,YAAY;AACnC,QAAM,SAAS,MAAM,gBAAgB;AACrC,SAAO,OAAO;AAChB;AAEO,IAAM,eAAe,YAAY;AACtC,QAAM,SAAS,MAAM,gBAAgB;AACrC,SAAO,OAAO;AAChB;AAEO,IAAM,mBAAmB,MAAM;AACpC,MAAI,CAAC,QAAQ,IAAI,gBAAgB;AAC/B,UAAM,IAAI,MAAM,kEAAkE;AAAA,EACpF;AACF;","names":["configPath","mkdir","mkdir"]}
|
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
import {
|
|
2
|
+
SUMMARY_MAX_TOKENS,
|
|
3
|
+
SUMMARY_TARGET_WORDS,
|
|
4
|
+
getModels,
|
|
5
|
+
logInfo,
|
|
6
|
+
logWarn
|
|
7
|
+
} from "./chunk-LV52FEMB.js";
|
|
8
|
+
|
|
9
|
+
// src/services/batch-summarizer.ts
|
|
10
|
+
import OpenAI from "openai";
|
|
11
|
+
|
|
12
|
+
// src/shared/summary.ts
|
|
13
|
+
var CHARS_PER_TOKEN = 4;
|
|
14
|
+
var SUMMARY_PROMPT = (title, chapterNum, content, targetWords) => `You are analyzing a chapter from a book (fiction or nonfiction). Extract key information to help readers understand the chapter's content.
|
|
15
|
+
|
|
16
|
+
Chapter Title: ${title}
|
|
17
|
+
Chapter Number: ${chapterNum}
|
|
18
|
+
|
|
19
|
+
---
|
|
20
|
+
${content}
|
|
21
|
+
---
|
|
22
|
+
|
|
23
|
+
Extract the following information and respond ONLY with valid JSON (no markdown, no code blocks):
|
|
24
|
+
|
|
25
|
+
{
|
|
26
|
+
"characters": ["Name - brief description (role, traits, first appearance)", ...],
|
|
27
|
+
"events": "What happens in this chapter? (2-3 sentences)",
|
|
28
|
+
"setting": "Where does this chapter take place?",
|
|
29
|
+
"revelations": "Any important information revealed? (secrets, backstory, foreshadowing)"
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
Keep the total response around ${targetWords} words.`;
|
|
33
|
+
var parseStructuredSummary = (text, chapterIndex, title) => {
|
|
34
|
+
try {
|
|
35
|
+
let jsonText = text.trim();
|
|
36
|
+
if (jsonText.startsWith("```json")) {
|
|
37
|
+
jsonText = jsonText.slice(7, -3).trim();
|
|
38
|
+
} else if (jsonText.startsWith("```")) {
|
|
39
|
+
jsonText = jsonText.slice(3, -3).trim();
|
|
40
|
+
}
|
|
41
|
+
const parsed = JSON.parse(jsonText);
|
|
42
|
+
const fullSummary = `Chapter ${chapterIndex + 1}: ${title}
|
|
43
|
+
|
|
44
|
+
Characters: ${parsed.characters.join(", ")}
|
|
45
|
+
|
|
46
|
+
Events: ${parsed.events}
|
|
47
|
+
|
|
48
|
+
Setting: ${parsed.setting}
|
|
49
|
+
|
|
50
|
+
Revelations: ${parsed.revelations}`;
|
|
51
|
+
return {
|
|
52
|
+
chapterIndex,
|
|
53
|
+
chapterTitle: title,
|
|
54
|
+
characters: parsed.characters,
|
|
55
|
+
events: parsed.events,
|
|
56
|
+
setting: parsed.setting,
|
|
57
|
+
revelations: parsed.revelations,
|
|
58
|
+
fullSummary
|
|
59
|
+
};
|
|
60
|
+
} catch (error) {
|
|
61
|
+
logWarn(`[Summary] Failed to parse summary JSON for "${title}": ${error instanceof Error ? error.message : String(error)}`);
|
|
62
|
+
return null;
|
|
63
|
+
}
|
|
64
|
+
};
|
|
65
|
+
var splitIntoSections = (text, maxTokens) => {
|
|
66
|
+
const estimatedTokens = Math.ceil(text.length / CHARS_PER_TOKEN);
|
|
67
|
+
if (estimatedTokens <= maxTokens) {
|
|
68
|
+
return [text];
|
|
69
|
+
}
|
|
70
|
+
const numSections = Math.ceil(estimatedTokens / maxTokens);
|
|
71
|
+
const charsPerSection = Math.floor(text.length / numSections);
|
|
72
|
+
const sections = [];
|
|
73
|
+
for (let i = 0; i < numSections; i++) {
|
|
74
|
+
const start = i * charsPerSection;
|
|
75
|
+
const end = i === numSections - 1 ? text.length : (i + 1) * charsPerSection;
|
|
76
|
+
sections.push(text.slice(start, end));
|
|
77
|
+
}
|
|
78
|
+
return sections;
|
|
79
|
+
};
|
|
80
|
+
|
|
81
|
+
// src/services/batch-summarizer.ts
|
|
82
|
+
var estimateTokens = (text) => Math.ceil(text.length / CHARS_PER_TOKEN);
|
|
83
|
+
var buildJsonl = (chapters, model) => {
|
|
84
|
+
const lines = [];
|
|
85
|
+
const metadata = [];
|
|
86
|
+
for (let i = 0; i < chapters.length; i++) {
|
|
87
|
+
const chapter = chapters[i];
|
|
88
|
+
const tokens = estimateTokens(chapter.content);
|
|
89
|
+
if (tokens <= SUMMARY_MAX_TOKENS) {
|
|
90
|
+
const line = {
|
|
91
|
+
custom_id: `summary-${i}`,
|
|
92
|
+
method: "POST",
|
|
93
|
+
url: "/v1/chat/completions",
|
|
94
|
+
body: {
|
|
95
|
+
model,
|
|
96
|
+
messages: [{ role: "user", content: SUMMARY_PROMPT(chapter.title, i + 1, chapter.content, SUMMARY_TARGET_WORDS) }]
|
|
97
|
+
}
|
|
98
|
+
};
|
|
99
|
+
lines.push(JSON.stringify(line));
|
|
100
|
+
metadata.push({ chapterIndex: i, title: chapter.title, needsTwoPass: false, sectionCount: 1 });
|
|
101
|
+
} else {
|
|
102
|
+
const sections = splitIntoSections(chapter.content, SUMMARY_MAX_TOKENS);
|
|
103
|
+
for (let s = 0; s < sections.length; s++) {
|
|
104
|
+
const line = {
|
|
105
|
+
custom_id: `section-${i}-${s}`,
|
|
106
|
+
method: "POST",
|
|
107
|
+
url: "/v1/chat/completions",
|
|
108
|
+
body: {
|
|
109
|
+
model,
|
|
110
|
+
messages: [{
|
|
111
|
+
role: "user",
|
|
112
|
+
content: `Summarize this section from chapter "${chapter.title}" (Part ${s + 1}). Focus on key events, characters, and revelations. Keep it concise (100-150 words):
|
|
113
|
+
|
|
114
|
+
${sections[s]}`
|
|
115
|
+
}]
|
|
116
|
+
}
|
|
117
|
+
};
|
|
118
|
+
lines.push(JSON.stringify(line));
|
|
119
|
+
}
|
|
120
|
+
metadata.push({ chapterIndex: i, title: chapter.title, needsTwoPass: true, sectionCount: sections.length });
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
return { jsonl: lines.join("\n"), metadata };
|
|
124
|
+
};
|
|
125
|
+
var submitBatchSummaries = async (chapters) => {
|
|
126
|
+
const models = await getModels();
|
|
127
|
+
const client = new OpenAI();
|
|
128
|
+
logInfo(`[BatchSummarizer] Preparing batch request for ${chapters.length} chapters`);
|
|
129
|
+
const { jsonl, metadata } = buildJsonl(chapters, models.summary);
|
|
130
|
+
const blob = new Blob([jsonl], { type: "application/jsonl" });
|
|
131
|
+
const file = await client.files.create({
|
|
132
|
+
file: new File([blob], "summaries.jsonl", { type: "application/jsonl" }),
|
|
133
|
+
purpose: "batch"
|
|
134
|
+
});
|
|
135
|
+
logInfo(`[BatchSummarizer] Uploaded input file ${file.id}`);
|
|
136
|
+
const batch = await client.batches.create({
|
|
137
|
+
input_file_id: file.id,
|
|
138
|
+
endpoint: "/v1/chat/completions",
|
|
139
|
+
completion_window: "24h"
|
|
140
|
+
});
|
|
141
|
+
logInfo(`[BatchSummarizer] Created batch ${batch.id} \u2014 status: ${batch.status}`);
|
|
142
|
+
return { batchId: batch.id, inputFileId: file.id, metadata };
|
|
143
|
+
};
|
|
144
|
+
var downloadBatchSummaryResults = async (outputFileId, chapters, metadata) => {
|
|
145
|
+
const client = new OpenAI();
|
|
146
|
+
logInfo(`[BatchSummarizer] Downloading results from ${outputFileId}`);
|
|
147
|
+
const response = await client.files.content(outputFileId);
|
|
148
|
+
const text = await response.text();
|
|
149
|
+
const lines = text.trim().split("\n");
|
|
150
|
+
const results = /* @__PURE__ */ new Map();
|
|
151
|
+
for (const line of lines) {
|
|
152
|
+
let result;
|
|
153
|
+
try {
|
|
154
|
+
result = JSON.parse(line);
|
|
155
|
+
} catch {
|
|
156
|
+
logWarn(`[BatchSummarizer] Skipping malformed JSONL line`);
|
|
157
|
+
continue;
|
|
158
|
+
}
|
|
159
|
+
if (result.response?.status_code === 200) {
|
|
160
|
+
const content = result.response.body?.choices?.[0]?.message?.content;
|
|
161
|
+
if (content) {
|
|
162
|
+
results.set(result.custom_id, content);
|
|
163
|
+
}
|
|
164
|
+
} else {
|
|
165
|
+
logWarn(`[BatchSummarizer] Request ${result.custom_id} failed: ${JSON.stringify(result.response?.body?.error ?? result.error)}`);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
const summaries = [];
|
|
169
|
+
const needsMergePass = [];
|
|
170
|
+
for (const meta of metadata) {
|
|
171
|
+
if (!meta.needsTwoPass) {
|
|
172
|
+
const content = results.get(`summary-${meta.chapterIndex}`);
|
|
173
|
+
if (content) {
|
|
174
|
+
const summary = parseStructuredSummary(content, meta.chapterIndex, meta.title);
|
|
175
|
+
if (summary) summaries.push(summary);
|
|
176
|
+
}
|
|
177
|
+
} else {
|
|
178
|
+
const sectionSummaries = [];
|
|
179
|
+
let allPresent = true;
|
|
180
|
+
for (let s = 0; s < meta.sectionCount; s++) {
|
|
181
|
+
const content = results.get(`section-${meta.chapterIndex}-${s}`);
|
|
182
|
+
if (content) {
|
|
183
|
+
sectionSummaries.push(content);
|
|
184
|
+
} else {
|
|
185
|
+
allPresent = false;
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
if (allPresent && sectionSummaries.length > 0) {
|
|
189
|
+
needsMergePass.push({ chapterIndex: meta.chapterIndex, title: meta.title, sectionSummaries });
|
|
190
|
+
} else {
|
|
191
|
+
logWarn(`[BatchSummarizer] Missing section results for chapter ${meta.chapterIndex + 1} "${meta.title}"`);
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
logInfo(`[BatchSummarizer] Parsed ${summaries.length} direct summaries, ${needsMergePass.length} chapters need merge pass`);
|
|
196
|
+
return { summaries, needsMergePass };
|
|
197
|
+
};
|
|
198
|
+
var submitMergePass = async (mergeChapters) => {
|
|
199
|
+
const models = await getModels();
|
|
200
|
+
const client = new OpenAI();
|
|
201
|
+
const lines = [];
|
|
202
|
+
const metadata = [];
|
|
203
|
+
for (const ch of mergeChapters) {
|
|
204
|
+
const combined = ch.sectionSummaries.join("\n\n");
|
|
205
|
+
const line = {
|
|
206
|
+
custom_id: `summary-${ch.chapterIndex}`,
|
|
207
|
+
method: "POST",
|
|
208
|
+
url: "/v1/chat/completions",
|
|
209
|
+
body: {
|
|
210
|
+
model: models.summary,
|
|
211
|
+
messages: [{ role: "user", content: SUMMARY_PROMPT(ch.title, ch.chapterIndex + 1, combined, SUMMARY_TARGET_WORDS) }]
|
|
212
|
+
}
|
|
213
|
+
};
|
|
214
|
+
lines.push(JSON.stringify(line));
|
|
215
|
+
metadata.push({ chapterIndex: ch.chapterIndex, title: ch.title, needsTwoPass: false, sectionCount: 1 });
|
|
216
|
+
}
|
|
217
|
+
const jsonl = lines.join("\n");
|
|
218
|
+
const blob = new Blob([jsonl], { type: "application/jsonl" });
|
|
219
|
+
const file = await client.files.create({
|
|
220
|
+
file: new File([blob], "summaries-merge.jsonl", { type: "application/jsonl" }),
|
|
221
|
+
purpose: "batch"
|
|
222
|
+
});
|
|
223
|
+
logInfo(`[BatchSummarizer] Uploaded merge input file ${file.id} (${mergeChapters.length} chapters)`);
|
|
224
|
+
const batch = await client.batches.create({
|
|
225
|
+
input_file_id: file.id,
|
|
226
|
+
endpoint: "/v1/chat/completions",
|
|
227
|
+
completion_window: "24h"
|
|
228
|
+
});
|
|
229
|
+
logInfo(`[BatchSummarizer] Created merge batch ${batch.id} \u2014 status: ${batch.status}`);
|
|
230
|
+
return { batchId: batch.id, inputFileId: file.id, metadata };
|
|
231
|
+
};
|
|
232
|
+
var downloadMergeResults = async (outputFileId, mergeChapters) => {
|
|
233
|
+
const client = new OpenAI();
|
|
234
|
+
logInfo(`[BatchSummarizer] Downloading merge results from ${outputFileId}`);
|
|
235
|
+
const response = await client.files.content(outputFileId);
|
|
236
|
+
const text = await response.text();
|
|
237
|
+
const lines = text.trim().split("\n");
|
|
238
|
+
const summaries = [];
|
|
239
|
+
for (const line of lines) {
|
|
240
|
+
let result;
|
|
241
|
+
try {
|
|
242
|
+
result = JSON.parse(line);
|
|
243
|
+
} catch {
|
|
244
|
+
logWarn(`[BatchSummarizer] Skipping malformed JSONL line in merge results`);
|
|
245
|
+
continue;
|
|
246
|
+
}
|
|
247
|
+
if (result.response?.status_code === 200) {
|
|
248
|
+
const content = result.response.body?.choices?.[0]?.message?.content;
|
|
249
|
+
if (content) {
|
|
250
|
+
const idx = Number(result.custom_id.replace("summary-", ""));
|
|
251
|
+
const meta = mergeChapters.find((ch) => ch.chapterIndex === idx);
|
|
252
|
+
if (meta) {
|
|
253
|
+
const summary = parseStructuredSummary(content, idx, meta.title);
|
|
254
|
+
if (summary) summaries.push(summary);
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
} else {
|
|
258
|
+
logWarn(`[BatchSummarizer] Merge request ${result.custom_id} failed: ${JSON.stringify(result.response?.body?.error ?? result.error)}`);
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
logInfo(`[BatchSummarizer] Parsed ${summaries.length} merged summaries`);
|
|
262
|
+
return summaries;
|
|
263
|
+
};
|
|
264
|
+
|
|
265
|
+
export {
|
|
266
|
+
CHARS_PER_TOKEN,
|
|
267
|
+
SUMMARY_PROMPT,
|
|
268
|
+
parseStructuredSummary,
|
|
269
|
+
splitIntoSections,
|
|
270
|
+
submitBatchSummaries,
|
|
271
|
+
downloadBatchSummaryResults,
|
|
272
|
+
submitMergePass,
|
|
273
|
+
downloadMergeResults
|
|
274
|
+
};
|
|
275
|
+
//# sourceMappingURL=chunk-T6X7DRBN.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/services/batch-summarizer.ts","../src/shared/summary.ts"],"sourcesContent":["import OpenAI from \"openai\";\nimport type { Chapter, ChapterSummary } from \"../shared/types.js\";\nimport { SUMMARY_MAX_TOKENS, SUMMARY_TARGET_WORDS, getModels, logInfo, logWarn } from \"./constants.js\";\nimport { CHARS_PER_TOKEN, SUMMARY_PROMPT, parseStructuredSummary, splitIntoSections } from \"../shared/summary.js\";\n\nconst estimateTokens = (text: string): number => Math.ceil(text.length / CHARS_PER_TOKEN);\n\ntype BatchRequestLine = {\n custom_id: string;\n method: \"POST\";\n url: \"/v1/chat/completions\";\n body: {\n model: string;\n messages: { role: \"user\"; content: string }[];\n };\n};\n\nexport type SummaryBatchChapter = {\n chapterIndex: number;\n title: string;\n needsTwoPass: boolean;\n sectionCount: number;\n};\n\nconst buildJsonl = (chapters: Chapter[], model: string): { jsonl: string; metadata: SummaryBatchChapter[] } => {\n const lines: string[] = [];\n const metadata: SummaryBatchChapter[] = [];\n\n for (let i = 0; i < chapters.length; i++) {\n const chapter = chapters[i]!;\n const tokens = estimateTokens(chapter.content);\n\n if (tokens <= SUMMARY_MAX_TOKENS) {\n // Single-pass: one request for the structured summary\n const line: BatchRequestLine = {\n custom_id: `summary-${i}`,\n method: \"POST\",\n url: \"/v1/chat/completions\",\n body: {\n model,\n messages: [{ role: \"user\", content: SUMMARY_PROMPT(chapter.title, i + 1, chapter.content, SUMMARY_TARGET_WORDS) }],\n },\n };\n lines.push(JSON.stringify(line));\n metadata.push({ chapterIndex: i, title: chapter.title, needsTwoPass: false, sectionCount: 1 });\n } else {\n // Two-pass: first submit section summary requests, then a merge request\n const sections = splitIntoSections(chapter.content, SUMMARY_MAX_TOKENS);\n\n for (let s = 0; s < sections.length; s++) {\n const line: BatchRequestLine = {\n custom_id: `section-${i}-${s}`,\n method: \"POST\",\n url: \"/v1/chat/completions\",\n body: {\n model,\n messages: [{\n role: \"user\",\n content: `Summarize this section from chapter \"${chapter.title}\" (Part ${s + 1}). Focus on key events, characters, and revelations. Keep it concise (100-150 words):\\n\\n${sections[s]}`,\n }],\n },\n };\n lines.push(JSON.stringify(line));\n }\n\n metadata.push({ chapterIndex: i, title: chapter.title, needsTwoPass: true, sectionCount: sections.length });\n }\n }\n\n return { jsonl: lines.join(\"\\n\"), metadata };\n};\n\nexport type BatchSubmitResult = {\n batchId: string;\n inputFileId: string;\n metadata: SummaryBatchChapter[];\n};\n\nexport const submitBatchSummaries = async (chapters: Chapter[]): Promise<BatchSubmitResult> => {\n const models = await getModels();\n const client = new OpenAI();\n\n logInfo(`[BatchSummarizer] Preparing batch request for ${chapters.length} chapters`);\n\n const { jsonl, metadata } = buildJsonl(chapters, models.summary);\n const blob = new Blob([jsonl], { type: \"application/jsonl\" });\n const file = await client.files.create({\n file: new File([blob], \"summaries.jsonl\", { type: \"application/jsonl\" }),\n purpose: \"batch\",\n });\n logInfo(`[BatchSummarizer] Uploaded input file ${file.id}`);\n\n const batch = await client.batches.create({\n input_file_id: file.id,\n endpoint: \"/v1/chat/completions\",\n completion_window: \"24h\",\n });\n logInfo(`[BatchSummarizer] Created batch ${batch.id} — status: ${batch.status}`);\n\n return { batchId: batch.id, inputFileId: file.id, metadata };\n};\n\nexport const downloadBatchSummaryResults = async (\n outputFileId: string,\n chapters: Chapter[],\n metadata: SummaryBatchChapter[],\n): Promise<{ summaries: ChapterSummary[]; needsMergePass: { chapterIndex: number; title: string; sectionSummaries: string[] }[] }> => {\n const client = new OpenAI();\n\n logInfo(`[BatchSummarizer] Downloading results from ${outputFileId}`);\n const response = await client.files.content(outputFileId);\n const text = await response.text();\n const lines = text.trim().split(\"\\n\");\n\n const results = new Map<string, string>();\n for (const line of lines) {\n let result: any;\n try {\n result = JSON.parse(line);\n } catch {\n logWarn(`[BatchSummarizer] Skipping malformed JSONL line`);\n continue;\n }\n if (result.response?.status_code === 200) {\n const content = result.response.body?.choices?.[0]?.message?.content;\n if (content) {\n results.set(result.custom_id, content);\n }\n } else {\n logWarn(`[BatchSummarizer] Request ${result.custom_id} failed: ${JSON.stringify(result.response?.body?.error ?? result.error)}`);\n }\n }\n\n const summaries: ChapterSummary[] = [];\n const needsMergePass: { chapterIndex: number; title: string; sectionSummaries: string[] }[] = [];\n\n for (const meta of metadata) {\n if (!meta.needsTwoPass) {\n // Single-pass chapter: parse the structured summary directly\n const content = results.get(`summary-${meta.chapterIndex}`);\n if (content) {\n const summary = parseStructuredSummary(content, meta.chapterIndex, meta.title);\n if (summary) summaries.push(summary);\n }\n } else {\n // Two-pass chapter: collect section summaries, need a merge pass\n const sectionSummaries: string[] = [];\n let allPresent = true;\n for (let s = 0; s < meta.sectionCount; s++) {\n const content = results.get(`section-${meta.chapterIndex}-${s}`);\n if (content) {\n sectionSummaries.push(content);\n } else {\n allPresent = false;\n }\n }\n if (allPresent && sectionSummaries.length > 0) {\n needsMergePass.push({ chapterIndex: meta.chapterIndex, title: meta.title, sectionSummaries });\n } else {\n logWarn(`[BatchSummarizer] Missing section results for chapter ${meta.chapterIndex + 1} \"${meta.title}\"`);\n }\n }\n }\n\n logInfo(`[BatchSummarizer] Parsed ${summaries.length} direct summaries, ${needsMergePass.length} chapters need merge pass`);\n return { summaries, needsMergePass };\n};\n\n/**\n * Submit a second batch for the merge pass: combine section summaries into structured summaries.\n * Returns a new batch ID for the merge requests.\n */\nexport const submitMergePass = async (\n mergeChapters: { chapterIndex: number; title: string; sectionSummaries: string[] }[],\n): Promise<BatchSubmitResult> => {\n const models = await getModels();\n const client = new OpenAI();\n\n const lines: string[] = [];\n const metadata: SummaryBatchChapter[] = [];\n\n for (const ch of mergeChapters) {\n const combined = ch.sectionSummaries.join(\"\\n\\n\");\n const line: BatchRequestLine = {\n custom_id: `summary-${ch.chapterIndex}`,\n method: \"POST\",\n url: \"/v1/chat/completions\",\n body: {\n model: models.summary,\n messages: [{ role: \"user\", content: SUMMARY_PROMPT(ch.title, ch.chapterIndex + 1, combined, SUMMARY_TARGET_WORDS) }],\n },\n };\n lines.push(JSON.stringify(line));\n metadata.push({ chapterIndex: ch.chapterIndex, title: ch.title, needsTwoPass: false, sectionCount: 1 });\n }\n\n const jsonl = lines.join(\"\\n\");\n const blob = new Blob([jsonl], { type: \"application/jsonl\" });\n const file = await client.files.create({\n file: new File([blob], \"summaries-merge.jsonl\", { type: \"application/jsonl\" }),\n purpose: \"batch\",\n });\n\n logInfo(`[BatchSummarizer] Uploaded merge input file ${file.id} (${mergeChapters.length} chapters)`);\n\n const batch = await client.batches.create({\n input_file_id: file.id,\n endpoint: \"/v1/chat/completions\",\n completion_window: \"24h\",\n });\n logInfo(`[BatchSummarizer] Created merge batch ${batch.id} — status: ${batch.status}`);\n\n return { batchId: batch.id, inputFileId: file.id, metadata };\n};\n\nexport const downloadMergeResults = async (\n outputFileId: string,\n mergeChapters: { chapterIndex: number; title: string }[],\n): Promise<ChapterSummary[]> => {\n const client = new OpenAI();\n\n logInfo(`[BatchSummarizer] Downloading merge results from ${outputFileId}`);\n const response = await client.files.content(outputFileId);\n const text = await response.text();\n const lines = text.trim().split(\"\\n\");\n\n const summaries: ChapterSummary[] = [];\n for (const line of lines) {\n let result: any;\n try {\n result = JSON.parse(line);\n } catch {\n logWarn(`[BatchSummarizer] Skipping malformed JSONL line in merge results`);\n continue;\n }\n if (result.response?.status_code === 200) {\n const content = result.response.body?.choices?.[0]?.message?.content;\n if (content) {\n // Extract chapter index from custom_id: \"summary-{idx}\"\n const idx = Number(result.custom_id.replace(\"summary-\", \"\"));\n const meta = mergeChapters.find((ch) => ch.chapterIndex === idx);\n if (meta) {\n const summary = parseStructuredSummary(content, idx, meta.title);\n if (summary) summaries.push(summary);\n }\n }\n } else {\n logWarn(`[BatchSummarizer] Merge request ${result.custom_id} failed: ${JSON.stringify(result.response?.body?.error ?? result.error)}`);\n }\n }\n\n logInfo(`[BatchSummarizer] Parsed ${summaries.length} merged summaries`);\n return summaries;\n};\n","import type { ChapterSummary } from \"./types.js\";\nimport { logWarn } from \"../commands/io.js\";\n\nexport const CHARS_PER_TOKEN = 4;\n\nexport type SummaryJSON = {\n characters: string[];\n events: string;\n setting: string;\n revelations: string;\n};\n\nexport const SUMMARY_PROMPT = (title: string, chapterNum: number, content: string, targetWords: number) => `You are analyzing a chapter from a book (fiction or nonfiction). Extract key information to help readers understand the chapter's content.\n\nChapter Title: ${title}\nChapter Number: ${chapterNum}\n\n---\n${content}\n---\n\nExtract the following information and respond ONLY with valid JSON (no markdown, no code blocks):\n\n{\n \"characters\": [\"Name - brief description (role, traits, first appearance)\", ...],\n \"events\": \"What happens in this chapter? (2-3 sentences)\",\n \"setting\": \"Where does this chapter take place?\",\n \"revelations\": \"Any important information revealed? (secrets, backstory, foreshadowing)\"\n}\n\nKeep the total response around ${targetWords} words.`;\n\nexport const parseStructuredSummary = (text: string, chapterIndex: number, title: string): ChapterSummary | null => {\n try {\n let jsonText = text.trim();\n if (jsonText.startsWith(\"```json\")) {\n jsonText = jsonText.slice(7, -3).trim();\n } else if (jsonText.startsWith(\"```\")) {\n jsonText = jsonText.slice(3, -3).trim();\n }\n\n const parsed: SummaryJSON = JSON.parse(jsonText);\n\n const fullSummary = `Chapter ${chapterIndex + 1}: ${title}\n\nCharacters: ${parsed.characters.join(\", \")}\n\nEvents: ${parsed.events}\n\nSetting: ${parsed.setting}\n\nRevelations: ${parsed.revelations}`;\n\n return {\n chapterIndex,\n chapterTitle: title,\n characters: parsed.characters,\n events: parsed.events,\n setting: parsed.setting,\n revelations: parsed.revelations,\n fullSummary,\n };\n } catch (error) {\n logWarn(`[Summary] Failed to parse summary JSON for \"${title}\": ${error instanceof Error ? error.message : String(error)}`);\n return null;\n }\n};\n\nexport const splitIntoSections = (text: string, maxTokens: number): string[] => {\n const estimatedTokens = Math.ceil(text.length / CHARS_PER_TOKEN);\n\n if (estimatedTokens <= maxTokens) {\n return [text];\n }\n\n const numSections = Math.ceil(estimatedTokens / maxTokens);\n const charsPerSection = Math.floor(text.length / numSections);\n const sections: string[] = [];\n\n for (let i = 0; i < numSections; i++) {\n const start = i * charsPerSection;\n const end = i === numSections - 1 ? text.length : (i + 1) * charsPerSection;\n sections.push(text.slice(start, end));\n }\n\n return sections;\n};\n"],"mappings":";;;;;;;;;AAAA,OAAO,YAAY;;;ACGZ,IAAM,kBAAkB;AASxB,IAAM,iBAAiB,CAAC,OAAe,YAAoB,SAAiB,gBAAwB;AAAA;AAAA,iBAE1F,KAAK;AAAA,kBACJ,UAAU;AAAA;AAAA;AAAA,EAG1B,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iCAYwB,WAAW;AAErC,IAAM,yBAAyB,CAAC,MAAc,cAAsB,UAAyC;AAClH,MAAI;AACF,QAAI,WAAW,KAAK,KAAK;AACzB,QAAI,SAAS,WAAW,SAAS,GAAG;AAClC,iBAAW,SAAS,MAAM,GAAG,EAAE,EAAE,KAAK;AAAA,IACxC,WAAW,SAAS,WAAW,KAAK,GAAG;AACrC,iBAAW,SAAS,MAAM,GAAG,EAAE,EAAE,KAAK;AAAA,IACxC;AAEA,UAAM,SAAsB,KAAK,MAAM,QAAQ;AAE/C,UAAM,cAAc,WAAW,eAAe,CAAC,KAAK,KAAK;AAAA;AAAA,cAE/C,OAAO,WAAW,KAAK,IAAI,CAAC;AAAA;AAAA,UAEhC,OAAO,MAAM;AAAA;AAAA,WAEZ,OAAO,OAAO;AAAA;AAAA,eAEV,OAAO,WAAW;AAE7B,WAAO;AAAA,MACL;AAAA,MACA,cAAc;AAAA,MACd,YAAY,OAAO;AAAA,MACnB,QAAQ,OAAO;AAAA,MACf,SAAS,OAAO;AAAA,MAChB,aAAa,OAAO;AAAA,MACpB;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,+CAA+C,KAAK,MAAM,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AAC1H,WAAO;AAAA,EACT;AACF;AAEO,IAAM,oBAAoB,CAAC,MAAc,cAAgC;AAC9E,QAAM,kBAAkB,KAAK,KAAK,KAAK,SAAS,eAAe;AAE/D,MAAI,mBAAmB,WAAW;AAChC,WAAO,CAAC,IAAI;AAAA,EACd;AAEA,QAAM,cAAc,KAAK,KAAK,kBAAkB,SAAS;AACzD,QAAM,kBAAkB,KAAK,MAAM,KAAK,SAAS,WAAW;AAC5D,QAAM,WAAqB,CAAC;AAE5B,WAAS,IAAI,GAAG,IAAI,aAAa,KAAK;AACpC,UAAM,QAAQ,IAAI;AAClB,UAAM,MAAM,MAAM,cAAc,IAAI,KAAK,UAAU,IAAI,KAAK;AAC5D,aAAS,KAAK,KAAK,MAAM,OAAO,GAAG,CAAC;AAAA,EACtC;AAEA,SAAO;AACT;;;ADjFA,IAAM,iBAAiB,CAAC,SAAyB,KAAK,KAAK,KAAK,SAAS,eAAe;AAmBxF,IAAM,aAAa,CAAC,UAAqB,UAAsE;AAC7G,QAAM,QAAkB,CAAC;AACzB,QAAM,WAAkC,CAAC;AAEzC,WAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,UAAM,UAAU,SAAS,CAAC;AAC1B,UAAM,SAAS,eAAe,QAAQ,OAAO;AAE7C,QAAI,UAAU,oBAAoB;AAEhC,YAAM,OAAyB;AAAA,QAC7B,WAAW,WAAW,CAAC;AAAA,QACvB,QAAQ;AAAA,QACR,KAAK;AAAA,QACL,MAAM;AAAA,UACJ;AAAA,UACA,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,eAAe,QAAQ,OAAO,IAAI,GAAG,QAAQ,SAAS,oBAAoB,EAAE,CAAC;AAAA,QACnH;AAAA,MACF;AACA,YAAM,KAAK,KAAK,UAAU,IAAI,CAAC;AAC/B,eAAS,KAAK,EAAE,cAAc,GAAG,OAAO,QAAQ,OAAO,cAAc,OAAO,cAAc,EAAE,CAAC;AAAA,IAC/F,OAAO;AAEL,YAAM,WAAW,kBAAkB,QAAQ,SAAS,kBAAkB;AAEtE,eAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,cAAM,OAAyB;AAAA,UAC7B,WAAW,WAAW,CAAC,IAAI,CAAC;AAAA,UAC5B,QAAQ;AAAA,UACR,KAAK;AAAA,UACH,MAAM;AAAA,YACJ;AAAA,YACA,UAAU,CAAC;AAAA,cACT,MAAM;AAAA,cACN,SAAS,wCAAwC,QAAQ,KAAK,WAAW,IAAI,CAAC;AAAA;AAAA,EAA4F,SAAS,CAAC,CAAC;AAAA,YACvL,CAAC;AAAA,UACH;AAAA,QACJ;AACA,cAAM,KAAK,KAAK,UAAU,IAAI,CAAC;AAAA,MACjC;AAEA,eAAS,KAAK,EAAE,cAAc,GAAG,OAAO,QAAQ,OAAO,cAAc,MAAM,cAAc,SAAS,OAAO,CAAC;AAAA,IAC5G;AAAA,EACF;AAEA,SAAO,EAAE,OAAO,MAAM,KAAK,IAAI,GAAG,SAAS;AAC7C;AAQO,IAAM,uBAAuB,OAAO,aAAoD;AAC7F,QAAM,SAAS,MAAM,UAAU;AAC/B,QAAM,SAAS,IAAI,OAAO;AAE1B,UAAQ,iDAAiD,SAAS,MAAM,WAAW;AAEnF,QAAM,EAAE,OAAO,SAAS,IAAI,WAAW,UAAU,OAAO,OAAO;AAC/D,QAAM,OAAO,IAAI,KAAK,CAAC,KAAK,GAAG,EAAE,MAAM,oBAAoB,CAAC;AAC5D,QAAM,OAAO,MAAM,OAAO,MAAM,OAAO;AAAA,IACrC,MAAM,IAAI,KAAK,CAAC,IAAI,GAAG,mBAAmB,EAAE,MAAM,oBAAoB,CAAC;AAAA,IACvE,SAAS;AAAA,EACX,CAAC;AACD,UAAQ,yCAAyC,KAAK,EAAE,EAAE;AAE1D,QAAM,QAAQ,MAAM,OAAO,QAAQ,OAAO;AAAA,IACxC,eAAe,KAAK;AAAA,IACpB,UAAU;AAAA,IACV,mBAAmB;AAAA,EACrB,CAAC;AACD,UAAQ,mCAAmC,MAAM,EAAE,mBAAc,MAAM,MAAM,EAAE;AAE/E,SAAO,EAAE,SAAS,MAAM,IAAI,aAAa,KAAK,IAAI,SAAS;AAC7D;AAEO,IAAM,8BAA8B,OACzC,cACA,UACA,aACoI;AACpI,QAAM,SAAS,IAAI,OAAO;AAE1B,UAAQ,8CAA8C,YAAY,EAAE;AACpE,QAAM,WAAW,MAAM,OAAO,MAAM,QAAQ,YAAY;AACxD,QAAM,OAAO,MAAM,SAAS,KAAK;AACjC,QAAM,QAAQ,KAAK,KAAK,EAAE,MAAM,IAAI;AAEpC,QAAM,UAAU,oBAAI,IAAoB;AACxC,aAAW,QAAQ,OAAO;AACxB,QAAI;AACJ,QAAI;AACF,eAAS,KAAK,MAAM,IAAI;AAAA,IAC1B,QAAQ;AACN,cAAQ,iDAAiD;AACzD;AAAA,IACF;AACA,QAAI,OAAO,UAAU,gBAAgB,KAAK;AACxC,YAAM,UAAU,OAAO,SAAS,MAAM,UAAU,CAAC,GAAG,SAAS;AAC7D,UAAI,SAAS;AACX,gBAAQ,IAAI,OAAO,WAAW,OAAO;AAAA,MACvC;AAAA,IACF,OAAO;AACL,cAAQ,6BAA6B,OAAO,SAAS,YAAY,KAAK,UAAU,OAAO,UAAU,MAAM,SAAS,OAAO,KAAK,CAAC,EAAE;AAAA,IACjI;AAAA,EACF;AAEA,QAAM,YAA8B,CAAC;AACrC,QAAM,iBAAwF,CAAC;AAE/F,aAAW,QAAQ,UAAU;AAC3B,QAAI,CAAC,KAAK,cAAc;AAEtB,YAAM,UAAU,QAAQ,IAAI,WAAW,KAAK,YAAY,EAAE;AAC1D,UAAI,SAAS;AACX,cAAM,UAAU,uBAAuB,SAAS,KAAK,cAAc,KAAK,KAAK;AAC7E,YAAI,QAAS,WAAU,KAAK,OAAO;AAAA,MACrC;AAAA,IACF,OAAO;AAEL,YAAM,mBAA6B,CAAC;AACpC,UAAI,aAAa;AACjB,eAAS,IAAI,GAAG,IAAI,KAAK,cAAc,KAAK;AAC1C,cAAM,UAAU,QAAQ,IAAI,WAAW,KAAK,YAAY,IAAI,CAAC,EAAE;AAC/D,YAAI,SAAS;AACX,2BAAiB,KAAK,OAAO;AAAA,QAC/B,OAAO;AACL,uBAAa;AAAA,QACf;AAAA,MACF;AACA,UAAI,cAAc,iBAAiB,SAAS,GAAG;AAC7C,uBAAe,KAAK,EAAE,cAAc,KAAK,cAAc,OAAO,KAAK,OAAO,iBAAiB,CAAC;AAAA,MAC9F,OAAO;AACL,gBAAQ,yDAAyD,KAAK,eAAe,CAAC,KAAK,KAAK,KAAK,GAAG;AAAA,MAC1G;AAAA,IACF;AAAA,EACF;AAEA,UAAQ,4BAA4B,UAAU,MAAM,sBAAsB,eAAe,MAAM,2BAA2B;AAC1H,SAAO,EAAE,WAAW,eAAe;AACrC;AAMO,IAAM,kBAAkB,OAC7B,kBAC+B;AAC/B,QAAM,SAAS,MAAM,UAAU;AAC/B,QAAM,SAAS,IAAI,OAAO;AAE1B,QAAM,QAAkB,CAAC;AACzB,QAAM,WAAkC,CAAC;AAEzC,aAAW,MAAM,eAAe;AAC9B,UAAM,WAAW,GAAG,iBAAiB,KAAK,MAAM;AAChD,UAAM,OAAyB;AAAA,MAC7B,WAAW,WAAW,GAAG,YAAY;AAAA,MACrC,QAAQ;AAAA,MACR,KAAK;AAAA,MACL,MAAM;AAAA,QACJ,OAAO,OAAO;AAAA,QACd,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,eAAe,GAAG,OAAO,GAAG,eAAe,GAAG,UAAU,oBAAoB,EAAE,CAAC;AAAA,MACrH;AAAA,IACF;AACA,UAAM,KAAK,KAAK,UAAU,IAAI,CAAC;AAC/B,aAAS,KAAK,EAAE,cAAc,GAAG,cAAc,OAAO,GAAG,OAAO,cAAc,OAAO,cAAc,EAAE,CAAC;AAAA,EACxG;AAEA,QAAM,QAAQ,MAAM,KAAK,IAAI;AAC7B,QAAM,OAAO,IAAI,KAAK,CAAC,KAAK,GAAG,EAAE,MAAM,oBAAoB,CAAC;AAC5D,QAAM,OAAO,MAAM,OAAO,MAAM,OAAO;AAAA,IACrC,MAAM,IAAI,KAAK,CAAC,IAAI,GAAG,yBAAyB,EAAE,MAAM,oBAAoB,CAAC;AAAA,IAC7E,SAAS;AAAA,EACX,CAAC;AAED,UAAQ,+CAA+C,KAAK,EAAE,KAAK,cAAc,MAAM,YAAY;AAEnG,QAAM,QAAQ,MAAM,OAAO,QAAQ,OAAO;AAAA,IACxC,eAAe,KAAK;AAAA,IACpB,UAAU;AAAA,IACV,mBAAmB;AAAA,EACrB,CAAC;AACD,UAAQ,yCAAyC,MAAM,EAAE,mBAAc,MAAM,MAAM,EAAE;AAErF,SAAO,EAAE,SAAS,MAAM,IAAI,aAAa,KAAK,IAAI,SAAS;AAC7D;AAEO,IAAM,uBAAuB,OAClC,cACA,kBAC8B;AAC9B,QAAM,SAAS,IAAI,OAAO;AAE1B,UAAQ,oDAAoD,YAAY,EAAE;AAC1E,QAAM,WAAW,MAAM,OAAO,MAAM,QAAQ,YAAY;AACxD,QAAM,OAAO,MAAM,SAAS,KAAK;AACjC,QAAM,QAAQ,KAAK,KAAK,EAAE,MAAM,IAAI;AAEpC,QAAM,YAA8B,CAAC;AACrC,aAAW,QAAQ,OAAO;AACxB,QAAI;AACJ,QAAI;AACF,eAAS,KAAK,MAAM,IAAI;AAAA,IAC1B,QAAQ;AACN,cAAQ,kEAAkE;AAC1E;AAAA,IACF;AACA,QAAI,OAAO,UAAU,gBAAgB,KAAK;AACxC,YAAM,UAAU,OAAO,SAAS,MAAM,UAAU,CAAC,GAAG,SAAS;AAC7D,UAAI,SAAS;AAEX,cAAM,MAAM,OAAO,OAAO,UAAU,QAAQ,YAAY,EAAE,CAAC;AAC3D,cAAM,OAAO,cAAc,KAAK,CAAC,OAAO,GAAG,iBAAiB,GAAG;AAC/D,YAAI,MAAM;AACR,gBAAM,UAAU,uBAAuB,SAAS,KAAK,KAAK,KAAK;AAC/D,cAAI,QAAS,WAAU,KAAK,OAAO;AAAA,QACrC;AAAA,MACF;AAAA,IACF,OAAO;AACL,cAAQ,mCAAmC,OAAO,SAAS,YAAY,KAAK,UAAU,OAAO,UAAU,MAAM,SAAS,OAAO,KAAK,CAAC,EAAE;AAAA,IACvI;AAAA,EACF;AAEA,UAAQ,4BAA4B,UAAU,MAAM,mBAAmB;AACvE,SAAO;AACT;","names":[]}
|