@lyy0709/contextweaver 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,30 @@
1
+ import {
2
+ logger
3
+ } from "./chunk-YVLGQTLG.js";
4
+ import "./chunk-V2USKRIC.js";
5
+
6
+ // src/enhancer/browser.ts
7
+ import { exec } from "child_process";
8
+ import { platform } from "os";
9
+ function execCommand(command) {
10
+ exec(command, (err) => {
11
+ if (err) {
12
+ logger.warn({ error: err.message, command }, "\u6D4F\u89C8\u5668\u81EA\u52A8\u6253\u5F00\u5931\u8D25");
13
+ }
14
+ });
15
+ }
16
+ function openBrowser(url) {
17
+ const os = platform();
18
+ if (os === "darwin") {
19
+ execCommand(`open "${url}"`);
20
+ return;
21
+ }
22
+ if (os === "win32") {
23
+ execCommand(`cmd /c start "" "${url}"`);
24
+ return;
25
+ }
26
+ execCommand(`xdg-open "${url}"`);
27
+ }
28
+ export {
29
+ openBrowser
30
+ };
@@ -0,0 +1,186 @@
1
+ import {
2
+ logger
3
+ } from "./chunk-YVLGQTLG.js";
4
+ import {
5
+ checkEnhancerEnv,
6
+ getEnhancerConfig
7
+ } from "./chunk-V2USKRIC.js";
8
+
9
+ // src/enhancer/detect.ts
10
+ function detectLanguage(text) {
11
+ const matches = text.match(/[\u4e00-\u9fff]/g);
12
+ const count = matches?.length ?? 0;
13
+ return count >= 3 ? "zh" : "en";
14
+ }
15
+
16
+ // src/enhancer/llmClient.ts
17
+ var clientCache = /* @__PURE__ */ new Map();
18
+ async function createLlmClient(config) {
19
+ const endpoint = config.endpoint;
20
+ const baseUrl = config.baseUrl;
21
+ const model = config.model || "";
22
+ const cacheKey = `${endpoint}:${baseUrl}:${model}`;
23
+ const cached = clientCache.get(cacheKey);
24
+ if (cached) {
25
+ return cached;
26
+ }
27
+ if (!config.model) {
28
+ throw new Error("Prompt enhancer model is required");
29
+ }
30
+ const clientConfig = {
31
+ baseUrl: config.baseUrl,
32
+ apiKey: config.apiKey,
33
+ model: config.model
34
+ };
35
+ let client;
36
+ if (endpoint === "openai") {
37
+ const { OpenAiAdapter } = await import("./openai-MOPZNA5I.js");
38
+ client = new OpenAiAdapter(clientConfig);
39
+ } else if (endpoint === "claude") {
40
+ const { ClaudeAdapter } = await import("./claude-IKIA62JA.js");
41
+ client = new ClaudeAdapter(clientConfig);
42
+ } else {
43
+ const { GeminiAdapter } = await import("./gemini-Q37K5XA5.js");
44
+ client = new GeminiAdapter(clientConfig);
45
+ }
46
+ clientCache.set(cacheKey, client);
47
+ return client;
48
+ }
49
+
50
+ // src/enhancer/parser.ts
51
+ function parseEnhancedPrompt(response) {
52
+ const patterns = [
53
+ /<enhanced-prompt>([\s\S]*?)<\/enhanced-prompt>/,
54
+ /<augment-enhanced-prompt(?:\s+[^>]*)?>\s*([\s\S]*?)\s*<\/augment-enhanced-prompt\s*>/
55
+ ];
56
+ for (const pattern of patterns) {
57
+ const match = response.match(pattern);
58
+ const extracted = match?.[1]?.trim();
59
+ if (extracted) {
60
+ return extracted;
61
+ }
62
+ }
63
+ return response.trim();
64
+ }
65
+
66
+ // src/enhancer/template.ts
67
+ import { promises as fs } from "fs";
68
+ var DEFAULT_TEMPLATE = `\u4F60\u662F\u4E00\u4E2A\u63D0\u793A\u8BCD\u4F18\u5316\u4E13\u5BB6\u3002
69
+
70
+ \u4F60\u7684\u4EFB\u52A1\uFF1A\u5C06\u7528\u6237\u7684\u539F\u59CB\u63D0\u793A\u8BCD\u8FDB\u884C\u589E\u5F3A\uFF0C\u4F7F\u5176\u66F4\u6E05\u6670\u3001\u66F4\u5177\u4F53\u3001\u66F4\u53EF\u6267\u884C\u3002
71
+
72
+ \u589E\u5F3A\u8981\u6C42\uFF1A
73
+ - \u4FDD\u7559\u539F\u59CB\u610F\u56FE\uFF0C\u4E0D\u8981\u6539\u53D8\u9700\u6C42\u65B9\u5411
74
+ - \u8865\u5168\u7F3A\u5931\u7684\u4E0A\u4E0B\u6587\u3001\u7EA6\u675F\u3001\u9A8C\u6536\u6807\u51C6\u4E0E\u8FB9\u754C\u6761\u4EF6
75
+ - \u7ED9\u51FA\u7ED3\u6784\u5316\u3001\u53EF\u76F4\u63A5\u6267\u884C\u7684\u6307\u4EE4\uFF08\u9002\u5F53\u4F7F\u7528\u5217\u8868/\u6B65\u9AA4\uFF09
76
+ - \u5982\u679C\u539F\u59CB\u63D0\u793A\u8BCD\u5305\u542B\u4E09\u5F15\u53F7\u4EE3\u7801\u5757\uFF0C\u8BF7\u5C3D\u91CF\u4FDD\u6301\u4EE3\u7801\u5757\u5185\u5BB9\u4E0D\u53D8
77
+
78
+ {{language_instruction}}
79
+
80
+ \u5BF9\u8BDD\u5386\u53F2\uFF08\u5982\u4E3A\u7A7A\u53EF\u5FFD\u7565\uFF09\uFF1A
81
+ {{conversation_history}}
82
+
83
+ \u8BF7\u53EA\u8F93\u51FA\u589E\u5F3A\u540E\u7684\u63D0\u793A\u8BCD\uFF0C\u5E76\u7528\u4EE5\u4E0B XML \u6807\u7B7E\u5305\u88F9\uFF1A
84
+ <enhanced-prompt>...</enhanced-prompt>
85
+
86
+ \u539F\u59CB\u63D0\u793A\u8BCD\uFF1A
87
+ {{original_prompt}}
88
+ `;
89
+ function getTemplatePathOrInline(templateValue) {
90
+ if (!templateValue) return void 0;
91
+ const trimmed = templateValue.trim();
92
+ if (!trimmed) return void 0;
93
+ if (trimmed.includes("\n") || trimmed.includes("{{")) {
94
+ return { kind: "inline", value: templateValue };
95
+ }
96
+ return { kind: "path", value: trimmed };
97
+ }
98
+ async function loadTemplate(templatePath) {
99
+ const templateValue = templatePath;
100
+ const resolved = getTemplatePathOrInline(templateValue);
101
+ if (!resolved) return DEFAULT_TEMPLATE;
102
+ if (resolved.kind === "inline") {
103
+ return resolved.value;
104
+ }
105
+ try {
106
+ return await fs.readFile(resolved.value, "utf-8");
107
+ } catch (err) {
108
+ const error = err;
109
+ logger.warn(
110
+ { templatePath: resolved.value, error: error.message },
111
+ "\u8BFB\u53D6\u81EA\u5B9A\u4E49\u6A21\u677F\u5931\u8D25\uFF0C\u5DF2\u56DE\u9000\u5230\u9ED8\u8BA4\u6A21\u677F"
112
+ );
113
+ return DEFAULT_TEMPLATE;
114
+ }
115
+ }
116
+ function renderPrompt(template, vars) {
117
+ const mapping = {
118
+ original_prompt: vars.originalPrompt,
119
+ conversation_history: vars.conversationHistory ?? "",
120
+ language_instruction: vars.languageInstruction
121
+ };
122
+ return template.replace(
123
+ /\{\{\s*(original_prompt|conversation_history|language_instruction)\s*\}\}/g,
124
+ (match, key) => mapping[key] ?? match
125
+ );
126
+ }
127
+
128
+ // src/enhancer/index.ts
129
+ async function enhancePrompt(options) {
130
+ const envCheck = checkEnhancerEnv();
131
+ if (!envCheck.isValid) {
132
+ throw new Error(
133
+ `Prompt Enhancer \u73AF\u5883\u53D8\u91CF\u672A\u914D\u7F6E: ${envCheck.missingVars.join(", ")}`
134
+ );
135
+ }
136
+ const config = getEnhancerConfig();
137
+ const endpoint = options.endpointOverride ?? config.endpoint;
138
+ const defaultBaseUrlByEndpoint = {
139
+ openai: "https://api.openai.com/v1/chat/completions",
140
+ claude: "https://api.anthropic.com/v1/messages",
141
+ gemini: "https://generativelanguage.googleapis.com/v1beta"
142
+ };
143
+ const defaultModelByEndpoint = {
144
+ openai: "gpt-4o-mini",
145
+ claude: "claude-sonnet-4-20250514",
146
+ gemini: "gemini-2.0-flash"
147
+ };
148
+ const baseUrl = process.env.PROMPT_ENHANCER_BASE_URL || defaultBaseUrlByEndpoint[endpoint];
149
+ const model = process.env.PROMPT_ENHANCER_MODEL || defaultModelByEndpoint[endpoint];
150
+ const configWithOverride = {
151
+ ...config,
152
+ endpoint,
153
+ baseUrl,
154
+ model
155
+ };
156
+ const language = detectLanguage(options.prompt);
157
+ const languageInstruction = language === "zh" ? "\u8BF7\u7528\u4E2D\u6587\u8F93\u51FA\u589E\u5F3A\u540E\u7684\u63D0\u793A\u8BCD\u3002" : "Please output the enhanced prompt in English.";
158
+ const template = await loadTemplate(configWithOverride.templatePath);
159
+ const rendered = renderPrompt(template, {
160
+ originalPrompt: options.prompt,
161
+ conversationHistory: options.conversationHistory,
162
+ languageInstruction
163
+ });
164
+ const client = await createLlmClient(configWithOverride);
165
+ const raw = await client.chat([
166
+ {
167
+ role: "system",
168
+ content: "You are a helpful prompt enhancement assistant."
169
+ },
170
+ {
171
+ role: "user",
172
+ content: rendered
173
+ }
174
+ ]);
175
+ const enhanced = parseEnhancedPrompt(raw);
176
+ return {
177
+ original: options.prompt,
178
+ enhanced,
179
+ endpoint: configWithOverride.endpoint,
180
+ model: configWithOverride.model || ""
181
+ };
182
+ }
183
+
184
+ export {
185
+ enhancePrompt
186
+ };