ide-assi 0.86.0 → 0.88.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.cjs.js +29 -11
- package/dist/bundle.esm.js +29 -11
- package/dist/components/ideAi.js +6 -3
- package/dist/components/ideUtils.js +47 -0
- package/package.json +1 -1
- package/src/components/ideAi.js +6 -3
- package/src/components/ideUtils.js +47 -0
- package/src/components/ideAiPrompt.js +0 -30
package/dist/bundle.cjs.js
CHANGED
|
@@ -193257,7 +193257,7 @@ window.jQuery = window.$ = $$1;
|
|
|
193257
193257
|
|
|
193258
193258
|
//export default new ninegrid();
|
|
193259
193259
|
|
|
193260
|
-
class
|
|
193260
|
+
class IdeUtils
|
|
193261
193261
|
{
|
|
193262
193262
|
constructor() {
|
|
193263
193263
|
}
|
|
@@ -193272,16 +193272,31 @@ class IdeAiPrompt
|
|
|
193272
193272
|
}).format(params);
|
|
193273
193273
|
};
|
|
193274
193274
|
|
|
193275
|
-
static
|
|
193275
|
+
static extractResponse = (response, gptServer) => {
|
|
193276
193276
|
|
|
193277
|
-
const
|
|
193277
|
+
const extractJsonSnippet = (text) => {
|
|
193278
|
+
const match = text.match(/```json([\s\S]*?)```/);
|
|
193279
|
+
return match ? match[1].trim() : text;
|
|
193280
|
+
};
|
|
193278
193281
|
|
|
193279
|
-
|
|
193280
|
-
|
|
193281
|
-
|
|
193282
|
-
|
|
193283
|
-
|
|
193284
|
-
|
|
193282
|
+
let r;
|
|
193283
|
+
switch (gptServer) {
|
|
193284
|
+
case "openai":
|
|
193285
|
+
r = extractJsonSnippet(response.content.trim());
|
|
193286
|
+
break;
|
|
193287
|
+
case "gemini":
|
|
193288
|
+
r = extractJsonSnippet(response.content.trim());
|
|
193289
|
+
break;
|
|
193290
|
+
case "ollama":
|
|
193291
|
+
r = extractJsonSnippet(response);
|
|
193292
|
+
break;
|
|
193293
|
+
}
|
|
193294
|
+
|
|
193295
|
+
try {
|
|
193296
|
+
return JSON.parse(r);
|
|
193297
|
+
} catch (error) {
|
|
193298
|
+
return r;
|
|
193299
|
+
}
|
|
193285
193300
|
};
|
|
193286
193301
|
}
|
|
193287
193302
|
|
|
@@ -193458,14 +193473,17 @@ class IdeAi
|
|
|
193458
193473
|
this.#createModel();
|
|
193459
193474
|
|
|
193460
193475
|
const systemMessage = "You are a helpful assistant.";
|
|
193461
|
-
const prompt = await
|
|
193476
|
+
const prompt = await IdeUtils.generatePrompt('/prompts/meta/개발액션분류.txt', { "userPrompt": userPrompt });
|
|
193462
193477
|
|
|
193463
193478
|
const response = await this.#model.invoke([
|
|
193464
193479
|
new SystemMessage(systemMessage),
|
|
193465
193480
|
new HumanMessage(prompt),
|
|
193466
193481
|
]);
|
|
193467
193482
|
|
|
193468
|
-
|
|
193483
|
+
const a = IdeUtils.extractResponse(response, this.#parent.settings.server);
|
|
193484
|
+
console.log(a);
|
|
193485
|
+
|
|
193486
|
+
|
|
193469
193487
|
}
|
|
193470
193488
|
}
|
|
193471
193489
|
|
package/dist/bundle.esm.js
CHANGED
|
@@ -193253,7 +193253,7 @@ window.jQuery = window.$ = $$1;
|
|
|
193253
193253
|
|
|
193254
193254
|
//export default new ninegrid();
|
|
193255
193255
|
|
|
193256
|
-
class
|
|
193256
|
+
class IdeUtils
|
|
193257
193257
|
{
|
|
193258
193258
|
constructor() {
|
|
193259
193259
|
}
|
|
@@ -193268,16 +193268,31 @@ class IdeAiPrompt
|
|
|
193268
193268
|
}).format(params);
|
|
193269
193269
|
};
|
|
193270
193270
|
|
|
193271
|
-
static
|
|
193271
|
+
static extractResponse = (response, gptServer) => {
|
|
193272
193272
|
|
|
193273
|
-
const
|
|
193273
|
+
const extractJsonSnippet = (text) => {
|
|
193274
|
+
const match = text.match(/```json([\s\S]*?)```/);
|
|
193275
|
+
return match ? match[1].trim() : text;
|
|
193276
|
+
};
|
|
193274
193277
|
|
|
193275
|
-
|
|
193276
|
-
|
|
193277
|
-
|
|
193278
|
-
|
|
193279
|
-
|
|
193280
|
-
|
|
193278
|
+
let r;
|
|
193279
|
+
switch (gptServer) {
|
|
193280
|
+
case "openai":
|
|
193281
|
+
r = extractJsonSnippet(response.content.trim());
|
|
193282
|
+
break;
|
|
193283
|
+
case "gemini":
|
|
193284
|
+
r = extractJsonSnippet(response.content.trim());
|
|
193285
|
+
break;
|
|
193286
|
+
case "ollama":
|
|
193287
|
+
r = extractJsonSnippet(response);
|
|
193288
|
+
break;
|
|
193289
|
+
}
|
|
193290
|
+
|
|
193291
|
+
try {
|
|
193292
|
+
return JSON.parse(r);
|
|
193293
|
+
} catch (error) {
|
|
193294
|
+
return r;
|
|
193295
|
+
}
|
|
193281
193296
|
};
|
|
193282
193297
|
}
|
|
193283
193298
|
|
|
@@ -193454,14 +193469,17 @@ class IdeAi
|
|
|
193454
193469
|
this.#createModel();
|
|
193455
193470
|
|
|
193456
193471
|
const systemMessage = "You are a helpful assistant.";
|
|
193457
|
-
const prompt = await
|
|
193472
|
+
const prompt = await IdeUtils.generatePrompt('/prompts/meta/개발액션분류.txt', { "userPrompt": userPrompt });
|
|
193458
193473
|
|
|
193459
193474
|
const response = await this.#model.invoke([
|
|
193460
193475
|
new SystemMessage(systemMessage),
|
|
193461
193476
|
new HumanMessage(prompt),
|
|
193462
193477
|
]);
|
|
193463
193478
|
|
|
193464
|
-
|
|
193479
|
+
const a = IdeUtils.extractResponse(response, this.#parent.settings.server);
|
|
193480
|
+
console.log(a);
|
|
193481
|
+
|
|
193482
|
+
|
|
193465
193483
|
}
|
|
193466
193484
|
}
|
|
193467
193485
|
|
package/dist/components/ideAi.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import ninegrid from "ninegrid2";
|
|
2
|
-
import {
|
|
2
|
+
import {IdeUtils} from "./ideUtils.js";
|
|
3
3
|
import { HumanMessage, SystemMessage } from '@langchain/core/messages';
|
|
4
4
|
import { ChatGoogleGenerativeAI } from "@langchain/google-genai";
|
|
5
5
|
import { Ollama } from "@langchain/ollama";
|
|
@@ -178,14 +178,17 @@ export class IdeAi
|
|
|
178
178
|
this.#createModel();
|
|
179
179
|
|
|
180
180
|
const systemMessage = "You are a helpful assistant.";
|
|
181
|
-
const prompt = await
|
|
181
|
+
const prompt = await IdeUtils.generatePrompt('/prompts/meta/개발액션분류.txt', { "userPrompt": userPrompt });
|
|
182
182
|
|
|
183
183
|
const response = await this.#model.invoke([
|
|
184
184
|
new SystemMessage(systemMessage),
|
|
185
185
|
new HumanMessage(prompt),
|
|
186
186
|
]);
|
|
187
187
|
|
|
188
|
-
|
|
188
|
+
const a = IdeUtils.extractResponse(response, this.#parent.settings.server);
|
|
189
|
+
console.log(a);
|
|
190
|
+
|
|
191
|
+
|
|
189
192
|
}
|
|
190
193
|
}
|
|
191
194
|
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { PromptTemplate } from '@langchain/core/prompts';
|
|
2
|
+
|
|
3
|
+
export class IdeUtils
|
|
4
|
+
{
|
|
5
|
+
constructor() {
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
static generatePrompt = async (path, params) => {
|
|
9
|
+
|
|
10
|
+
const template = await fetch(path).then(res => res.text());
|
|
11
|
+
|
|
12
|
+
return await new PromptTemplate({
|
|
13
|
+
template,
|
|
14
|
+
inputVariables: Object.keys(params),
|
|
15
|
+
}).format(params);
|
|
16
|
+
};
|
|
17
|
+
|
|
18
|
+
static extractResponse = (response, gptServer) => {
|
|
19
|
+
|
|
20
|
+
const extractJsonSnippet = (text) => {
|
|
21
|
+
const match = text.match(/```json([\s\S]*?)```/);
|
|
22
|
+
return match ? match[1].trim() : text;
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
let r;
|
|
26
|
+
switch (gptServer) {
|
|
27
|
+
case "openai":
|
|
28
|
+
r = extractJsonSnippet(response.content.trim());
|
|
29
|
+
break;
|
|
30
|
+
case "gemini":
|
|
31
|
+
r = extractJsonSnippet(response.content.trim());
|
|
32
|
+
break;
|
|
33
|
+
case "ollama":
|
|
34
|
+
r = extractJsonSnippet(response);
|
|
35
|
+
break;
|
|
36
|
+
default:
|
|
37
|
+
break;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
try {
|
|
41
|
+
return JSON.parse(r);
|
|
42
|
+
} catch (error) {
|
|
43
|
+
return r;
|
|
44
|
+
}
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
|
package/package.json
CHANGED
package/src/components/ideAi.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import ninegrid from "ninegrid2";
|
|
2
|
-
import {
|
|
2
|
+
import {IdeUtils} from "./ideUtils.js";
|
|
3
3
|
import { HumanMessage, SystemMessage } from '@langchain/core/messages';
|
|
4
4
|
import { ChatGoogleGenerativeAI } from "@langchain/google-genai";
|
|
5
5
|
import { Ollama } from "@langchain/ollama";
|
|
@@ -178,14 +178,17 @@ export class IdeAi
|
|
|
178
178
|
this.#createModel();
|
|
179
179
|
|
|
180
180
|
const systemMessage = "You are a helpful assistant.";
|
|
181
|
-
const prompt = await
|
|
181
|
+
const prompt = await IdeUtils.generatePrompt('/prompts/meta/개발액션분류.txt', { "userPrompt": userPrompt });
|
|
182
182
|
|
|
183
183
|
const response = await this.#model.invoke([
|
|
184
184
|
new SystemMessage(systemMessage),
|
|
185
185
|
new HumanMessage(prompt),
|
|
186
186
|
]);
|
|
187
187
|
|
|
188
|
-
|
|
188
|
+
const a = IdeUtils.extractResponse(response, this.#parent.settings.server);
|
|
189
|
+
console.log(a);
|
|
190
|
+
|
|
191
|
+
|
|
189
192
|
}
|
|
190
193
|
}
|
|
191
194
|
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { PromptTemplate } from '@langchain/core/prompts';
|
|
2
|
+
|
|
3
|
+
export class IdeUtils
|
|
4
|
+
{
|
|
5
|
+
constructor() {
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
static generatePrompt = async (path, params) => {
|
|
9
|
+
|
|
10
|
+
const template = await fetch(path).then(res => res.text());
|
|
11
|
+
|
|
12
|
+
return await new PromptTemplate({
|
|
13
|
+
template,
|
|
14
|
+
inputVariables: Object.keys(params),
|
|
15
|
+
}).format(params);
|
|
16
|
+
};
|
|
17
|
+
|
|
18
|
+
static extractResponse = (response, gptServer) => {
|
|
19
|
+
|
|
20
|
+
const extractJsonSnippet = (text) => {
|
|
21
|
+
const match = text.match(/```json([\s\S]*?)```/);
|
|
22
|
+
return match ? match[1].trim() : text;
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
let r;
|
|
26
|
+
switch (gptServer) {
|
|
27
|
+
case "openai":
|
|
28
|
+
r = extractJsonSnippet(response.content.trim());
|
|
29
|
+
break;
|
|
30
|
+
case "gemini":
|
|
31
|
+
r = extractJsonSnippet(response.content.trim());
|
|
32
|
+
break;
|
|
33
|
+
case "ollama":
|
|
34
|
+
r = extractJsonSnippet(response);
|
|
35
|
+
break;
|
|
36
|
+
default:
|
|
37
|
+
break;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
try {
|
|
41
|
+
return JSON.parse(r);
|
|
42
|
+
} catch (error) {
|
|
43
|
+
return r;
|
|
44
|
+
}
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
|
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
import { PromptTemplate } from '@langchain/core/prompts';
|
|
2
|
-
|
|
3
|
-
export class IdeAiPrompt
|
|
4
|
-
{
|
|
5
|
-
constructor() {
|
|
6
|
-
}
|
|
7
|
-
|
|
8
|
-
static generatePrompt = async (path, params) => {
|
|
9
|
-
|
|
10
|
-
const template = await fetch(path).then(res => res.text());
|
|
11
|
-
|
|
12
|
-
return await new PromptTemplate({
|
|
13
|
-
template,
|
|
14
|
-
inputVariables: Object.keys(params),
|
|
15
|
-
}).format(params);
|
|
16
|
-
};
|
|
17
|
-
|
|
18
|
-
static getDevelopActionClassification = async (userPrompt) => {
|
|
19
|
-
|
|
20
|
-
const template = await fetch('/prompts/meta/개발액션분류.txt').then(res => res.text());
|
|
21
|
-
|
|
22
|
-
return await new PromptTemplate({
|
|
23
|
-
template,
|
|
24
|
-
inputVariables: ['userPrompt'],
|
|
25
|
-
}).format({
|
|
26
|
-
userPrompt: userPrompt,
|
|
27
|
-
});
|
|
28
|
-
};
|
|
29
|
-
}
|
|
30
|
-
|