@bonvoy/plugin-ai 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +37 -0
- package/dist/index.mjs +161 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +1 -1
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { BonvoyPlugin } from "@bonvoy/core";
|
|
2
|
+
|
|
3
|
+
//#region src/providers/index.d.ts
|
|
4
|
+
interface AiProvider {
|
|
5
|
+
generateText(prompt: string, maxTokens: number): Promise<string>;
|
|
6
|
+
}
|
|
7
|
+
type ProviderName = "openai" | "anthropic" | "gemini";
|
|
8
|
+
interface ProviderConfig {
|
|
9
|
+
provider: ProviderName;
|
|
10
|
+
model?: string;
|
|
11
|
+
apiKey?: string;
|
|
12
|
+
maxTokens?: number;
|
|
13
|
+
}
|
|
14
|
+
//#endregion
|
|
15
|
+
//#region src/ai.d.ts
|
|
16
|
+
interface AiPluginConfig {
|
|
17
|
+
provider: ProviderName;
|
|
18
|
+
model?: string;
|
|
19
|
+
apiKey?: string;
|
|
20
|
+
promptTemplate?: string;
|
|
21
|
+
maxTokens?: number;
|
|
22
|
+
}
|
|
23
|
+
declare class AiPlugin implements BonvoyPlugin {
|
|
24
|
+
name: string;
|
|
25
|
+
private provider;
|
|
26
|
+
private config;
|
|
27
|
+
constructor(config: AiPluginConfig);
|
|
28
|
+
apply(bonvoy: {
|
|
29
|
+
hooks: {
|
|
30
|
+
afterChangelog: any;
|
|
31
|
+
};
|
|
32
|
+
}): void;
|
|
33
|
+
private createProvider;
|
|
34
|
+
}
|
|
35
|
+
//#endregion
|
|
36
|
+
export { type AiPluginConfig, type AiProvider, type ProviderConfig, type ProviderName, AiPlugin as default };
|
|
37
|
+
//# sourceMappingURL=index.d.mts.map
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
//#region src/prompt.ts
|
|
2
|
+
const DEFAULT_PROMPT = `You are a release notes writer. Given a list of commits for a software package, write a 2-3 sentence summary of what changed in this release.
|
|
3
|
+
|
|
4
|
+
Be concise and specific. Focus on what matters to users, not implementation details.
|
|
5
|
+
Do not use bullet points. Do not repeat commit messages verbatim.
|
|
6
|
+
Write in present tense ("adds", "fixes", "improves").
|
|
7
|
+
|
|
8
|
+
Package: {packageName}
|
|
9
|
+
Version: {version}
|
|
10
|
+
|
|
11
|
+
Commits:
|
|
12
|
+
{commitList}`;
|
|
13
|
+
const MAX_COMMITS = 50;
|
|
14
|
+
function buildPrompt(commits, packageName, version, template) {
|
|
15
|
+
const commitList = commits.slice(0, MAX_COMMITS).map((c) => `- ${c.message}`).join("\n");
|
|
16
|
+
return (template ?? DEFAULT_PROMPT).replace("{packageName}", packageName).replace("{version}", version).replace("{commitList}", commitList);
|
|
17
|
+
}
|
|
18
|
+
function insertSummary(changelog, summary) {
|
|
19
|
+
const blockquote = summary.split("\n").map((line) => `> ${line}`).join("\n");
|
|
20
|
+
const firstNewline = changelog.indexOf("\n");
|
|
21
|
+
if (firstNewline === -1) return `${changelog}\n\n${blockquote}`;
|
|
22
|
+
return `${changelog.slice(0, firstNewline)}\n\n${blockquote}\n${changelog.slice(firstNewline + 1)}`;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
//#endregion
|
|
26
|
+
//#region src/providers/anthropic.ts
|
|
27
|
+
const DEFAULT_MODEL$2 = "claude-sonnet-4-20250514";
|
|
28
|
+
const ENDPOINT$2 = "https://api.anthropic.com/v1/messages";
|
|
29
|
+
var AnthropicProvider = class {
|
|
30
|
+
apiKey;
|
|
31
|
+
model;
|
|
32
|
+
constructor(apiKey, model) {
|
|
33
|
+
this.apiKey = apiKey;
|
|
34
|
+
this.model = model ?? DEFAULT_MODEL$2;
|
|
35
|
+
}
|
|
36
|
+
async generateText(prompt, maxTokens) {
|
|
37
|
+
const response = await fetch(ENDPOINT$2, {
|
|
38
|
+
method: "POST",
|
|
39
|
+
headers: {
|
|
40
|
+
"Content-Type": "application/json",
|
|
41
|
+
"x-api-key": this.apiKey,
|
|
42
|
+
"anthropic-version": "2023-06-01"
|
|
43
|
+
},
|
|
44
|
+
body: JSON.stringify({
|
|
45
|
+
model: this.model,
|
|
46
|
+
messages: [{
|
|
47
|
+
role: "user",
|
|
48
|
+
content: prompt
|
|
49
|
+
}],
|
|
50
|
+
max_tokens: maxTokens
|
|
51
|
+
})
|
|
52
|
+
});
|
|
53
|
+
if (!response.ok) throw new Error(`Anthropic API error: ${response.status} ${response.statusText}`);
|
|
54
|
+
return (await response.json()).content[0].text.trim();
|
|
55
|
+
}
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
//#endregion
|
|
59
|
+
//#region src/providers/gemini.ts
|
|
60
|
+
const DEFAULT_MODEL$1 = "gemini-2.0-flash";
|
|
61
|
+
const ENDPOINT$1 = "https://generativelanguage.googleapis.com/v1beta/models";
|
|
62
|
+
var GeminiProvider = class {
|
|
63
|
+
apiKey;
|
|
64
|
+
model;
|
|
65
|
+
constructor(apiKey, model) {
|
|
66
|
+
this.apiKey = apiKey;
|
|
67
|
+
this.model = model ?? DEFAULT_MODEL$1;
|
|
68
|
+
}
|
|
69
|
+
async generateText(prompt, maxTokens) {
|
|
70
|
+
const response = await fetch(`${ENDPOINT$1}/${this.model}:generateContent?key=${this.apiKey}`, {
|
|
71
|
+
method: "POST",
|
|
72
|
+
headers: { "Content-Type": "application/json" },
|
|
73
|
+
body: JSON.stringify({
|
|
74
|
+
contents: [{ parts: [{ text: prompt }] }],
|
|
75
|
+
generationConfig: { maxOutputTokens: maxTokens }
|
|
76
|
+
})
|
|
77
|
+
});
|
|
78
|
+
if (!response.ok) throw new Error(`Gemini API error: ${response.status} ${response.statusText}`);
|
|
79
|
+
return (await response.json()).candidates[0].content.parts[0].text.trim();
|
|
80
|
+
}
|
|
81
|
+
};
|
|
82
|
+
|
|
83
|
+
//#endregion
|
|
84
|
+
//#region src/providers/openai.ts
|
|
85
|
+
const DEFAULT_MODEL = "gpt-4o-mini";
|
|
86
|
+
const ENDPOINT = "https://api.openai.com/v1/chat/completions";
|
|
87
|
+
var OpenAiProvider = class {
|
|
88
|
+
apiKey;
|
|
89
|
+
model;
|
|
90
|
+
constructor(apiKey, model) {
|
|
91
|
+
this.apiKey = apiKey;
|
|
92
|
+
this.model = model ?? DEFAULT_MODEL;
|
|
93
|
+
}
|
|
94
|
+
async generateText(prompt, maxTokens) {
|
|
95
|
+
const response = await fetch(ENDPOINT, {
|
|
96
|
+
method: "POST",
|
|
97
|
+
headers: {
|
|
98
|
+
"Content-Type": "application/json",
|
|
99
|
+
Authorization: `Bearer ${this.apiKey}`
|
|
100
|
+
},
|
|
101
|
+
body: JSON.stringify({
|
|
102
|
+
model: this.model,
|
|
103
|
+
messages: [{
|
|
104
|
+
role: "user",
|
|
105
|
+
content: prompt
|
|
106
|
+
}],
|
|
107
|
+
max_tokens: maxTokens
|
|
108
|
+
})
|
|
109
|
+
});
|
|
110
|
+
if (!response.ok) throw new Error(`OpenAI API error: ${response.status} ${response.statusText}`);
|
|
111
|
+
return (await response.json()).choices[0].message.content.trim();
|
|
112
|
+
}
|
|
113
|
+
};
|
|
114
|
+
|
|
115
|
+
//#endregion
|
|
116
|
+
//#region src/ai.ts
|
|
117
|
+
const ENV_KEYS = {
|
|
118
|
+
openai: "OPENAI_API_KEY",
|
|
119
|
+
anthropic: "ANTHROPIC_API_KEY",
|
|
120
|
+
gemini: "GEMINI_API_KEY"
|
|
121
|
+
};
|
|
122
|
+
const DEFAULT_MAX_TOKENS = 200;
|
|
123
|
+
var AiPlugin = class {
|
|
124
|
+
name = "ai";
|
|
125
|
+
provider;
|
|
126
|
+
config;
|
|
127
|
+
constructor(config) {
|
|
128
|
+
this.config = config;
|
|
129
|
+
const apiKey = config.apiKey ?? process.env[ENV_KEYS[config.provider]];
|
|
130
|
+
if (!apiKey) throw new Error(`@bonvoy/plugin-ai: Missing API key. Set ${ENV_KEYS[config.provider]} or pass apiKey in config.`);
|
|
131
|
+
this.provider = this.createProvider(config.provider, apiKey, config.model);
|
|
132
|
+
}
|
|
133
|
+
apply(bonvoy) {
|
|
134
|
+
bonvoy.hooks.afterChangelog.tapPromise(this.name, async (context) => {
|
|
135
|
+
const pkg = context.currentPackage;
|
|
136
|
+
if (!pkg || context.isDryRun) return;
|
|
137
|
+
const changelog = context.changelogs[pkg.name];
|
|
138
|
+
if (!changelog) return;
|
|
139
|
+
const commits = context.commits?.filter((c) => c.packages.includes(pkg.name)) ?? [];
|
|
140
|
+
if (!commits.length) return;
|
|
141
|
+
try {
|
|
142
|
+
const prompt = buildPrompt(commits, pkg.name, pkg.version, this.config.promptTemplate);
|
|
143
|
+
const summary = await this.provider.generateText(prompt, this.config.maxTokens ?? DEFAULT_MAX_TOKENS);
|
|
144
|
+
if (summary) context.changelogs[pkg.name] = insertSummary(changelog, summary);
|
|
145
|
+
} catch (error) {
|
|
146
|
+
context.logger.warn(` ⚠️ AI summary failed for ${pkg.name}: ${error}`);
|
|
147
|
+
}
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
createProvider(name, apiKey, model) {
|
|
151
|
+
switch (name) {
|
|
152
|
+
case "openai": return new OpenAiProvider(apiKey, model);
|
|
153
|
+
case "anthropic": return new AnthropicProvider(apiKey, model);
|
|
154
|
+
case "gemini": return new GeminiProvider(apiKey, model);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
};
|
|
158
|
+
|
|
159
|
+
//#endregion
|
|
160
|
+
export { AiPlugin as default };
|
|
161
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["DEFAULT_MODEL","ENDPOINT","DEFAULT_MODEL","ENDPOINT"],"sources":["../src/prompt.ts","../src/providers/anthropic.ts","../src/providers/gemini.ts","../src/providers/openai.ts","../src/ai.ts"],"sourcesContent":["import type { CommitInfo } from '@bonvoy/core';\n\nconst DEFAULT_PROMPT = `You are a release notes writer. Given a list of commits for a software package, write a 2-3 sentence summary of what changed in this release.\n\nBe concise and specific. Focus on what matters to users, not implementation details.\nDo not use bullet points. Do not repeat commit messages verbatim.\nWrite in present tense (\"adds\", \"fixes\", \"improves\").\n\nPackage: {packageName}\nVersion: {version}\n\nCommits:\n{commitList}`;\n\nconst MAX_COMMITS = 50;\n\nexport function buildPrompt(\n commits: CommitInfo[],\n packageName: string,\n version: string,\n template?: string,\n): string {\n const truncated = commits.slice(0, MAX_COMMITS);\n const commitList = truncated.map((c) => `- ${c.message}`).join('\\n');\n\n return (template ?? DEFAULT_PROMPT)\n .replace('{packageName}', packageName)\n .replace('{version}', version)\n .replace('{commitList}', commitList);\n}\n\nexport function insertSummary(changelog: string, summary: string): string {\n const blockquote = summary\n .split('\\n')\n .map((line) => `> ${line}`)\n .join('\\n');\n\n // Insert after the first line (version header)\n const firstNewline = changelog.indexOf('\\n');\n if (firstNewline === -1) {\n return `${changelog}\\n\\n${blockquote}`;\n }\n\n const header = changelog.slice(0, firstNewline);\n const rest = changelog.slice(firstNewline + 1);\n return `${header}\\n\\n${blockquote}\\n${rest}`;\n}\n","import type { AiProvider } from './index.js';\n\nconst DEFAULT_MODEL = 'claude-sonnet-4-20250514';\nconst ENDPOINT = 'https://api.anthropic.com/v1/messages';\n\nexport class AnthropicProvider implements AiProvider {\n private apiKey: string;\n private model: string;\n\n constructor(apiKey: string, model?: string) {\n this.apiKey = apiKey;\n this.model = model ?? DEFAULT_MODEL;\n }\n\n async generateText(prompt: string, maxTokens: number): Promise<string> {\n const response = await fetch(ENDPOINT, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'x-api-key': this.apiKey,\n 'anthropic-version': '2023-06-01',\n },\n body: JSON.stringify({\n model: this.model,\n messages: [{ role: 'user', content: prompt }],\n max_tokens: maxTokens,\n }),\n });\n\n if (!response.ok) {\n throw new Error(`Anthropic API error: ${response.status} ${response.statusText}`);\n }\n\n const data = (await response.json()) as { content: [{ text: string }] };\n return data.content[0].text.trim();\n }\n}\n","import type { AiProvider } from './index.js';\n\nconst DEFAULT_MODEL = 'gemini-2.0-flash';\nconst ENDPOINT = 'https://generativelanguage.googleapis.com/v1beta/models';\n\nexport class GeminiProvider implements AiProvider {\n private apiKey: string;\n private model: string;\n\n constructor(apiKey: string, model?: string) {\n this.apiKey = apiKey;\n this.model = model ?? DEFAULT_MODEL;\n }\n\n async generateText(prompt: string, maxTokens: number): Promise<string> {\n const response = await fetch(`${ENDPOINT}/${this.model}:generateContent?key=${this.apiKey}`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({\n contents: [{ parts: [{ text: prompt }] }],\n generationConfig: { maxOutputTokens: maxTokens },\n }),\n });\n\n if (!response.ok) {\n throw new Error(`Gemini API error: ${response.status} ${response.statusText}`);\n }\n\n const data = (await response.json()) as {\n candidates: [{ content: { parts: [{ text: string }] } }];\n };\n return data.candidates[0].content.parts[0].text.trim();\n }\n}\n","import type { AiProvider } from './index.js';\n\nconst DEFAULT_MODEL = 'gpt-4o-mini';\nconst ENDPOINT = 'https://api.openai.com/v1/chat/completions';\n\nexport class OpenAiProvider implements AiProvider {\n private apiKey: string;\n private model: string;\n\n constructor(apiKey: string, model?: string) {\n this.apiKey = apiKey;\n this.model = model ?? DEFAULT_MODEL;\n }\n\n async generateText(prompt: string, maxTokens: number): Promise<string> {\n const response = await fetch(ENDPOINT, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${this.apiKey}`,\n },\n body: JSON.stringify({\n model: this.model,\n messages: [{ role: 'user', content: prompt }],\n max_tokens: maxTokens,\n }),\n });\n\n if (!response.ok) {\n throw new Error(`OpenAI API error: ${response.status} ${response.statusText}`);\n }\n\n const data = (await response.json()) as { choices: [{ message: { content: string } }] };\n return data.choices[0].message.content.trim();\n }\n}\n","import type { BonvoyPlugin, ChangelogContext } from '@bonvoy/core';\n\nimport { buildPrompt, insertSummary } from './prompt.js';\nimport { AnthropicProvider } from './providers/anthropic.js';\nimport { GeminiProvider } from './providers/gemini.js';\nimport type { AiProvider, ProviderName } from './providers/index.js';\nimport { OpenAiProvider } from './providers/openai.js';\n\nexport interface AiPluginConfig {\n provider: ProviderName;\n model?: string;\n apiKey?: string;\n promptTemplate?: string;\n maxTokens?: number;\n}\n\nconst ENV_KEYS: Record<ProviderName, string> = {\n openai: 'OPENAI_API_KEY',\n anthropic: 'ANTHROPIC_API_KEY',\n gemini: 'GEMINI_API_KEY',\n};\n\nconst DEFAULT_MAX_TOKENS = 200;\n\nexport default class AiPlugin implements BonvoyPlugin {\n name = 'ai';\n private provider: AiProvider;\n private config: AiPluginConfig;\n\n constructor(config: AiPluginConfig) {\n this.config = config;\n const apiKey = config.apiKey ?? process.env[ENV_KEYS[config.provider]];\n if (!apiKey) {\n throw new Error(\n `@bonvoy/plugin-ai: Missing API key. Set ${ENV_KEYS[config.provider]} or pass apiKey in config.`,\n );\n }\n this.provider = this.createProvider(config.provider, apiKey, config.model);\n }\n\n // biome-ignore lint/suspicious/noExplicitAny: Hook types are complex and vary by implementation\n apply(bonvoy: { hooks: { afterChangelog: any } }): void {\n bonvoy.hooks.afterChangelog.tapPromise(this.name, async (context: ChangelogContext) => {\n const pkg = context.currentPackage;\n if (!pkg || context.isDryRun) return;\n\n const changelog = context.changelogs[pkg.name];\n if (!changelog) return;\n\n const commits = context.commits?.filter((c) => c.packages.includes(pkg.name)) ?? [];\n if (!commits.length) return;\n\n try {\n const prompt = buildPrompt(commits, pkg.name, pkg.version, this.config.promptTemplate);\n const summary = await this.provider.generateText(\n prompt,\n this.config.maxTokens ?? DEFAULT_MAX_TOKENS,\n );\n if (summary) {\n context.changelogs[pkg.name] = insertSummary(changelog, summary);\n }\n } catch (error) {\n context.logger.warn(` ⚠️ AI summary failed for ${pkg.name}: ${error}`);\n }\n });\n }\n\n private createProvider(name: ProviderName, apiKey: string, model?: string): AiProvider {\n switch (name) {\n case 'openai':\n return new OpenAiProvider(apiKey, model);\n case 'anthropic':\n return new AnthropicProvider(apiKey, model);\n case 'gemini':\n return new GeminiProvider(apiKey, model);\n }\n }\n}\n"],"mappings":";AAEA,MAAM,iBAAiB;;;;;;;;;;;AAYvB,MAAM,cAAc;AAEpB,SAAgB,YACd,SACA,aACA,SACA,UACQ;CAER,MAAM,aADY,QAAQ,MAAM,GAAG,YAAY,CAClB,KAAK,MAAM,KAAK,EAAE,UAAU,CAAC,KAAK,KAAK;AAEpE,SAAQ,YAAY,gBACjB,QAAQ,iBAAiB,YAAY,CACrC,QAAQ,aAAa,QAAQ,CAC7B,QAAQ,gBAAgB,WAAW;;AAGxC,SAAgB,cAAc,WAAmB,SAAyB;CACxE,MAAM,aAAa,QAChB,MAAM,KAAK,CACX,KAAK,SAAS,KAAK,OAAO,CAC1B,KAAK,KAAK;CAGb,MAAM,eAAe,UAAU,QAAQ,KAAK;AAC5C,KAAI,iBAAiB,GACnB,QAAO,GAAG,UAAU,MAAM;AAK5B,QAAO,GAFQ,UAAU,MAAM,GAAG,aAAa,CAE9B,MAAM,WAAW,IADrB,UAAU,MAAM,eAAe,EAAE;;;;;AC1ChD,MAAMA,kBAAgB;AACtB,MAAMC,aAAW;AAEjB,IAAa,oBAAb,MAAqD;CACnD,AAAQ;CACR,AAAQ;CAER,YAAY,QAAgB,OAAgB;AAC1C,OAAK,SAAS;AACd,OAAK,QAAQ,SAASD;;CAGxB,MAAM,aAAa,QAAgB,WAAoC;EACrE,MAAM,WAAW,MAAM,MAAMC,YAAU;GACrC,QAAQ;GACR,SAAS;IACP,gBAAgB;IAChB,aAAa,KAAK;IAClB,qBAAqB;IACtB;GACD,MAAM,KAAK,UAAU;IACnB,OAAO,KAAK;IACZ,UAAU,CAAC;KAAE,MAAM;KAAQ,SAAS;KAAQ,CAAC;IAC7C,YAAY;IACb,CAAC;GACH,CAAC;AAEF,MAAI,CAAC,SAAS,GACZ,OAAM,IAAI,MAAM,wBAAwB,SAAS,OAAO,GAAG,SAAS,aAAa;AAInF,UADc,MAAM,SAAS,MAAM,EACvB,QAAQ,GAAG,KAAK,MAAM;;;;;;AChCtC,MAAMC,kBAAgB;AACtB,MAAMC,aAAW;AAEjB,IAAa,iBAAb,MAAkD;CAChD,AAAQ;CACR,AAAQ;CAER,YAAY,QAAgB,OAAgB;AAC1C,OAAK,SAAS;AACd,OAAK,QAAQ,SAASD;;CAGxB,MAAM,aAAa,QAAgB,WAAoC;EACrE,MAAM,WAAW,MAAM,MAAM,GAAGC,WAAS,GAAG,KAAK,MAAM,uBAAuB,KAAK,UAAU;GAC3F,QAAQ;GACR,SAAS,EAAE,gBAAgB,oBAAoB;GAC/C,MAAM,KAAK,UAAU;IACnB,UAAU,CAAC,EAAE,OAAO,CAAC,EAAE,MAAM,QAAQ,CAAC,EAAE,CAAC;IACzC,kBAAkB,EAAE,iBAAiB,WAAW;IACjD,CAAC;GACH,CAAC;AAEF,MAAI,CAAC,SAAS,GACZ,OAAM,IAAI,MAAM,qBAAqB,SAAS,OAAO,GAAG,SAAS,aAAa;AAMhF,UAHc,MAAM,SAAS,MAAM,EAGvB,WAAW,GAAG,QAAQ,MAAM,GAAG,KAAK,MAAM;;;;;;AC7B1D,MAAM,gBAAgB;AACtB,MAAM,WAAW;AAEjB,IAAa,iBAAb,MAAkD;CAChD,AAAQ;CACR,AAAQ;CAER,YAAY,QAAgB,OAAgB;AAC1C,OAAK,SAAS;AACd,OAAK,QAAQ,SAAS;;CAGxB,MAAM,aAAa,QAAgB,WAAoC;EACrE,MAAM,WAAW,MAAM,MAAM,UAAU;GACrC,QAAQ;GACR,SAAS;IACP,gBAAgB;IAChB,eAAe,UAAU,KAAK;IAC/B;GACD,MAAM,KAAK,UAAU;IACnB,OAAO,KAAK;IACZ,UAAU,CAAC;KAAE,MAAM;KAAQ,SAAS;KAAQ,CAAC;IAC7C,YAAY;IACb,CAAC;GACH,CAAC;AAEF,MAAI,CAAC,SAAS,GACZ,OAAM,IAAI,MAAM,qBAAqB,SAAS,OAAO,GAAG,SAAS,aAAa;AAIhF,UADc,MAAM,SAAS,MAAM,EACvB,QAAQ,GAAG,QAAQ,QAAQ,MAAM;;;;;;ACjBjD,MAAM,WAAyC;CAC7C,QAAQ;CACR,WAAW;CACX,QAAQ;CACT;AAED,MAAM,qBAAqB;AAE3B,IAAqB,WAArB,MAAsD;CACpD,OAAO;CACP,AAAQ;CACR,AAAQ;CAER,YAAY,QAAwB;AAClC,OAAK,SAAS;EACd,MAAM,SAAS,OAAO,UAAU,QAAQ,IAAI,SAAS,OAAO;AAC5D,MAAI,CAAC,OACH,OAAM,IAAI,MACR,2CAA2C,SAAS,OAAO,UAAU,4BACtE;AAEH,OAAK,WAAW,KAAK,eAAe,OAAO,UAAU,QAAQ,OAAO,MAAM;;CAI5E,MAAM,QAAkD;AACtD,SAAO,MAAM,eAAe,WAAW,KAAK,MAAM,OAAO,YAA8B;GACrF,MAAM,MAAM,QAAQ;AACpB,OAAI,CAAC,OAAO,QAAQ,SAAU;GAE9B,MAAM,YAAY,QAAQ,WAAW,IAAI;AACzC,OAAI,CAAC,UAAW;GAEhB,MAAM,UAAU,QAAQ,SAAS,QAAQ,MAAM,EAAE,SAAS,SAAS,IAAI,KAAK,CAAC,IAAI,EAAE;AACnF,OAAI,CAAC,QAAQ,OAAQ;AAErB,OAAI;IACF,MAAM,SAAS,YAAY,SAAS,IAAI,MAAM,IAAI,SAAS,KAAK,OAAO,eAAe;IACtF,MAAM,UAAU,MAAM,KAAK,SAAS,aAClC,QACA,KAAK,OAAO,aAAa,mBAC1B;AACD,QAAI,QACF,SAAQ,WAAW,IAAI,QAAQ,cAAc,WAAW,QAAQ;YAE3D,OAAO;AACd,YAAQ,OAAO,KAAK,8BAA8B,IAAI,KAAK,IAAI,QAAQ;;IAEzE;;CAGJ,AAAQ,eAAe,MAAoB,QAAgB,OAA4B;AACrF,UAAQ,MAAR;GACE,KAAK,SACH,QAAO,IAAI,eAAe,QAAQ,MAAM;GAC1C,KAAK,YACH,QAAO,IAAI,kBAAkB,QAAQ,MAAM;GAC7C,KAAK,SACH,QAAO,IAAI,eAAe,QAAQ,MAAM"}
|