@akanjs/devkit 0.0.143 → 0.0.145

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,15 +1,23 @@
1
1
  import { Logger } from "@akanjs/common";
2
2
  import { input, select } from "@inquirer/prompts";
3
- import { AIMessage, HumanMessage } from "@langchain/core/messages";
3
+ import {
4
+ AIMessage,
5
+ HumanMessage,
6
+ mapChatMessagesToStoredMessages,
7
+ mapStoredMessagesToChatMessages
8
+ } from "@langchain/core/messages";
9
+ import { ChatDeepSeek } from "@langchain/deepseek";
4
10
  import { ChatOpenAI } from "@langchain/openai";
5
11
  import chalk from "chalk";
12
+ import fs from "fs";
6
13
  import { getAkanGlobalConfig, setAkanGlobalConfig } from "./auth";
7
14
  import { Spinner } from "./spinner";
8
15
  const MAX_ASK_TRY = 300;
9
16
  const supportedLlmModels = ["deepseek-chat", "deepseek-reasoner"];
10
17
  class AiSession {
18
+ static #cacheDir = "node_modules/.cache/akan/aiSession";
11
19
  static #chat = null;
12
- static async init({ temperature = 0.7, useExisting = true } = {}) {
20
+ static async init({ temperature = 0, useExisting = true } = {}) {
13
21
  if (useExisting) {
14
22
  const llmConfig2 = this.getLlmConfig();
15
23
  if (llmConfig2) {
@@ -17,18 +25,20 @@ class AiSession {
17
25
  Logger.rawLog(chalk.dim(`\u{1F916}akan editor uses existing LLM config (${llmConfig2.model})`));
18
26
  return this;
19
27
  }
20
- }
28
+ } else
29
+ Logger.rawLog(chalk.yellow("\u{1F916}akan-editor is not initialized. LLM configuration should be set first."));
21
30
  const llmConfig = await this.#requestLlmConfig();
22
31
  const { model, apiKey } = llmConfig;
23
32
  await this.#validateApiKey(model, apiKey);
24
33
  return this.#setChatModel(model, apiKey, { temperature }).setLlmConfig({ model, apiKey });
25
34
  }
26
- static #setChatModel(model, apiKey, { temperature = 0.7 } = {}) {
27
- this.#chat = new ChatOpenAI({
35
+ static #setChatModel(model, apiKey, { temperature = 0 } = {}) {
36
+ this.#chat = new ChatDeepSeek({
28
37
  modelName: model,
29
38
  temperature,
30
39
  streaming: true,
31
- configuration: { baseURL: "https://api.deepseek.com/v1", apiKey }
40
+ apiKey
41
+ // configuration: { baseURL: "https://api.deepseek.com/v1", apiKey },
32
42
  });
33
43
  return this;
34
44
  }
@@ -67,19 +77,44 @@ class AiSession {
67
77
  throw error;
68
78
  }
69
79
  }
80
+ static clearCache(workspaceRoot) {
81
+ const cacheDir = `${workspaceRoot}/${this.#cacheDir}`;
82
+ fs.rmSync(cacheDir, { recursive: true, force: true });
83
+ }
70
84
  messageHistory = [];
71
- constructor(messageHistory = []) {
72
- this.messageHistory = messageHistory;
85
+ sessionKey;
86
+ isCacheLoaded;
87
+ workspace;
88
+ constructor(type, { workspace, cacheKey, isContinued }) {
89
+ this.workspace = workspace;
90
+ this.sessionKey = `${type}${cacheKey ? `-${cacheKey}` : ""}`;
91
+ if (isContinued)
92
+ this.#loadCache();
93
+ }
94
+ #loadCache() {
95
+ const cacheFile = `${AiSession.#cacheDir}/${this.sessionKey}.json`;
96
+ const isCacheExists = this.workspace.exists(cacheFile);
97
+ if (isCacheExists)
98
+ this.messageHistory = mapStoredMessagesToChatMessages(this.workspace.readJson(cacheFile));
99
+ else
100
+ this.messageHistory = [];
101
+ this.isCacheLoaded = isCacheExists;
102
+ return isCacheExists;
103
+ }
104
+ #saveCache() {
105
+ const cacheFilePath = `${AiSession.#cacheDir}/${this.sessionKey}.json`;
106
+ this.workspace.writeJson(cacheFilePath, mapChatMessagesToStoredMessages(this.messageHistory));
73
107
  }
74
108
  async ask(question, {
109
+ onReasoning = (reasoning) => {
110
+ Logger.raw(chalk.dim(reasoning));
111
+ },
75
112
  onChunk = (chunk) => {
76
113
  Logger.raw(chunk);
77
114
  }
78
115
  } = {}) {
79
- if (!AiSession.#chat) {
80
- Logger.rawLog(chalk.yellow("\u{1F916}akan-editor is not initialized. LLM configuration should be set first."));
116
+ if (!AiSession.#chat)
81
117
  await AiSession.init();
82
- }
83
118
  if (!AiSession.#chat)
84
119
  throw new Error("Failed to initialize the AI session");
85
120
  const loader = new Spinner(`${AiSession.#chat.model} is thinking...`, {
@@ -89,10 +124,21 @@ class AiSession {
89
124
  const humanMessage = new HumanMessage(question);
90
125
  this.messageHistory.push(humanMessage);
91
126
  const stream = await AiSession.#chat.stream(this.messageHistory);
92
- let fullResponse = "", tokenIdx = 0;
127
+ let reasoningResponse = "", fullResponse = "", tokenIdx = 0;
93
128
  for await (const chunk of stream) {
94
- if (loader.isSpinning() && chunk.content.length)
129
+ if (loader.isSpinning())
95
130
  loader.succeed(`${AiSession.#chat.model} responded`);
131
+ if (!fullResponse.length) {
132
+ const reasoningContent = chunk.additional_kwargs.reasoning_content ?? "";
133
+ if (reasoningContent.length) {
134
+ reasoningResponse += reasoningContent;
135
+ onReasoning(reasoningContent);
136
+ continue;
137
+ } else if (chunk.content.length) {
138
+ reasoningResponse += "\n";
139
+ onReasoning(reasoningResponse);
140
+ }
141
+ }
96
142
  const content = chunk.content;
97
143
  if (typeof content === "string") {
98
144
  fullResponse += content;
@@ -109,18 +155,25 @@ class AiSession {
109
155
  throw new Error("Failed to stream response");
110
156
  }
111
157
  }
112
- async edit(question, { onChunk, maxTry = MAX_ASK_TRY } = {}) {
158
+ async edit(question, { onChunk, onReasoning, maxTry = MAX_ASK_TRY, validate, approve } = {}) {
113
159
  for (let tryCount = 0; tryCount < maxTry; tryCount++) {
114
- const response = await this.ask(question, { onChunk });
115
- const isConfirmed = await select({
160
+ let response = await this.ask(question, { onChunk, onReasoning });
161
+ if (validate?.length && tryCount === 0) {
162
+ const validateQuestion = `Double check if the response meets the requirements and conditions, and follow the instructions. If not, rewrite it.
163
+ ${validate.map((v) => `- ${v}`).join("\n")}`;
164
+ response = await this.ask(validateQuestion, { onChunk, onReasoning });
165
+ }
166
+ const isConfirmed = approve ? true : await select({
116
167
  message: "Do you want to edit the response?",
117
168
  choices: [
118
169
  { name: "\u2705 Yes, confirm and apply this result", value: true },
119
170
  { name: "\u{1F504} No, I want to edit it more", value: false }
120
171
  ]
121
172
  });
122
- if (isConfirmed)
173
+ if (isConfirmed) {
174
+ this.#saveCache();
123
175
  return response.content;
176
+ }
124
177
  question = await input({ message: "What do you want to change?" });
125
178
  tryCount++;
126
179
  }
@@ -131,9 +184,97 @@ class AiSession {
131
184
  return this.#getTypescriptCode(content);
132
185
  }
133
186
  #getTypescriptCode(content) {
187
+ //! will be deprecated
134
188
  const code = /```(typescript|tsx)([\s\S]*?)```/.exec(content);
135
189
  return code ? code[2] : content;
136
190
  }
191
+ addToolMessgaes(messages) {
192
+ const toolMessages = messages.map((message) => new HumanMessage(message.content));
193
+ this.messageHistory.push(...toolMessages);
194
+ return this;
195
+ }
196
+ async writeTypescripts(question, executor, options = {}) {
197
+ const content = await this.edit(question, options);
198
+ const writes = this.#getTypescriptCodes(content);
199
+ for (const write of writes)
200
+ executor.writeFile(write.filePath, write.content);
201
+ return await this.#tryFixTypescripts(writes, executor, options);
202
+ }
203
+ async #editTypescripts(question, options = {}) {
204
+ const content = await this.edit(question, options);
205
+ return this.#getTypescriptCodes(content);
206
+ }
207
+ async #tryFixTypescripts(writes, executor, options = {}) {
208
+ const MAX_EDIT_TRY = 5;
209
+ for (let tryCount = 0; tryCount < MAX_EDIT_TRY; tryCount++) {
210
+ const loader = new Spinner(`Type checking and linting...`, { prefix: `\u{1F916}akan-editor` }).start();
211
+ const fileChecks = await Promise.all(
212
+ writes.map(async ({ filePath }) => {
213
+ const typeCheckResult = executor.typeCheck(filePath);
214
+ const lintResult = await executor.lint(filePath);
215
+ const needFix2 = !!typeCheckResult.errors.length || !!lintResult.errors.length;
216
+ return { filePath, typeCheckResult, lintResult, needFix: needFix2 };
217
+ })
218
+ );
219
+ const needFix = fileChecks.some((fileCheck) => fileCheck.needFix);
220
+ if (needFix) {
221
+ loader.fail("Type checking and linting has some errors, try to fix them");
222
+ fileChecks.forEach((fileCheck) => {
223
+ Logger.rawLog(
224
+ `TypeCheck Result
225
+ ${fileCheck.typeCheckResult.message}
226
+ Lint Result
227
+ ${fileCheck.lintResult.message}`
228
+ );
229
+ this.addToolMessgaes([
230
+ { type: "typescript", content: fileCheck.typeCheckResult.message },
231
+ { type: "eslint", content: fileCheck.lintResult.message }
232
+ ]);
233
+ });
234
+ writes = await this.#editTypescripts("Fix the typescript and eslint errors", {
235
+ ...options,
236
+ validate: void 0,
237
+ approve: true
238
+ });
239
+ for (const write of writes)
240
+ executor.writeFile(write.filePath, write.content);
241
+ } else {
242
+ loader.succeed("Type checking and linting has no errors");
243
+ return writes;
244
+ }
245
+ }
246
+ throw new Error("Failed to create scalar");
247
+ }
248
+ #getTypescriptCodes(text) {
249
+ const codes = text.match(/```typescript([\s\S]*?)```/g);
250
+ if (!codes)
251
+ return [];
252
+ const result = codes.map((code) => {
253
+ const content = /```(typescript|tsx)([\s\S]*?)```/.exec(code)?.[2];
254
+ if (!content)
255
+ return null;
256
+ const filePath = /\/\/ File: (.*?)(?:\n|$)/.exec(content)?.[1]?.trim();
257
+ if (!filePath)
258
+ return null;
259
+ const contentWithoutFilepath = content.replace(`// File: ${filePath}
260
+ `, "").trim();
261
+ return { filePath, content: contentWithoutFilepath };
262
+ });
263
+ return result.filter((code) => code !== null);
264
+ }
265
+ async editMarkdown(request, options = {}) {
266
+ const content = await this.edit(request, options);
267
+ return this.#getMarkdownContent(content);
268
+ }
269
+ #getMarkdownContent(text) {
270
+ const searchText = "```markdown";
271
+ const firstIndex = text.indexOf("```markdown");
272
+ const lastIndex = text.lastIndexOf("```");
273
+ if (firstIndex === -1)
274
+ return text;
275
+ else
276
+ return text.slice(firstIndex + searchText.length, lastIndex).trim();
277
+ }
137
278
  }
138
279
  export {
139
280
  AiSession,
@@ -17,7 +17,6 @@ class Builder {
17
17
  return {
18
18
  entryPoints: [
19
19
  ...bundle ? [`${this.#executor.cwdPath}/index.ts`] : [`${this.#executor.cwdPath}/**/*.ts`, `${this.#executor.cwdPath}/**/*.tsx`],
20
- `${this.#executor.cwdPath}/**/*.template`,
21
20
  ...additionalEntryPoints
22
21
  ],
23
22
  bundle,
@@ -27,7 +26,7 @@ class Builder {
27
26
  format,
28
27
  outdir: `${this.#distExecutor.cwdPath}/${format}`,
29
28
  logLevel: "error",
30
- loader: { ".template": "copy" }
29
+ loader: { ".template": "copy", ".md": "copy" }
31
30
  };
32
31
  }
33
32
  #getAssetBuildOptions() {
@@ -179,7 +179,6 @@ const getInternalArgumentValue = async (argMeta, value, workspace) => {
179
179
  };
180
180
  const runCommands = async (...commands) => {
181
181
  process.on("unhandledRejection", (error) => {
182
- console.error(chalk.red("[fatal]"), error);
183
182
  process.exit(1);
184
183
  });
185
184
  const hasPackageJson = fs.existsSync(`${__dirname}/../package.json`);