@graf-research/llm-runner 0.0.15 → 0.0.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +2 -1
- package/dist/index.js +3 -1
- package/dist/platform/deepseek.d.ts +14 -0
- package/dist/platform/deepseek.js +97 -0
- package/package.json +1 -1
package/dist/index.d.ts
CHANGED
|
@@ -3,6 +3,7 @@ import { ChatGPTLLM } from "./platform/chatgpt";
|
|
|
3
3
|
import { OllamaLLM } from "./platform/ollama";
|
|
4
4
|
import { AnthropicLLM } from "./platform/anthropic";
|
|
5
5
|
import { GeminiLLM } from "./platform/gemini";
|
|
6
|
+
import { DeepseekLLM } from "./platform/deepseek";
|
|
6
7
|
import { GenericLLM } from "./base/generic-llm";
|
|
7
8
|
import { MultistepTypes } from "./multistep/types";
|
|
8
9
|
import { MSModule_Choose } from "./multistep/modules/choose";
|
|
@@ -11,4 +12,4 @@ import { MSModule_Normal } from "./multistep/modules/normal";
|
|
|
11
12
|
import { MSModule_OpenListAnswer } from "./multistep/modules/open-list-answer";
|
|
12
13
|
import { MSModule_Plan } from "./multistep/modules/plan";
|
|
13
14
|
import { MSModule_YesNo } from "./multistep/modules/yes-no";
|
|
14
|
-
export { GenericLLM, LLMRunner, ChatGPTLLM, OllamaLLM, AnthropicLLM, GeminiLLM, MultistepTypes, MSModule_Choose, MSModule_MultipleChoiceAnswer, MSModule_Normal, MSModule_OpenListAnswer, MSModule_Plan, MSModule_YesNo };
|
|
15
|
+
export { GenericLLM, LLMRunner, ChatGPTLLM, OllamaLLM, AnthropicLLM, GeminiLLM, DeepseekLLM, MultistepTypes, MSModule_Choose, MSModule_MultipleChoiceAnswer, MSModule_Normal, MSModule_OpenListAnswer, MSModule_Plan, MSModule_YesNo };
|
package/dist/index.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.MSModule_YesNo = exports.MSModule_Plan = exports.MSModule_OpenListAnswer = exports.MSModule_Normal = exports.MSModule_MultipleChoiceAnswer = exports.MSModule_Choose = exports.GeminiLLM = exports.AnthropicLLM = exports.OllamaLLM = exports.ChatGPTLLM = exports.LLMRunner = exports.GenericLLM = void 0;
|
|
3
|
+
exports.MSModule_YesNo = exports.MSModule_Plan = exports.MSModule_OpenListAnswer = exports.MSModule_Normal = exports.MSModule_MultipleChoiceAnswer = exports.MSModule_Choose = exports.DeepseekLLM = exports.GeminiLLM = exports.AnthropicLLM = exports.OllamaLLM = exports.ChatGPTLLM = exports.LLMRunner = exports.GenericLLM = void 0;
|
|
4
4
|
const llm_runner_1 = require("./base/llm-runner");
|
|
5
5
|
Object.defineProperty(exports, "LLMRunner", { enumerable: true, get: function () { return llm_runner_1.LLMRunner; } });
|
|
6
6
|
const chatgpt_1 = require("./platform/chatgpt");
|
|
@@ -11,6 +11,8 @@ const anthropic_1 = require("./platform/anthropic");
|
|
|
11
11
|
Object.defineProperty(exports, "AnthropicLLM", { enumerable: true, get: function () { return anthropic_1.AnthropicLLM; } });
|
|
12
12
|
const gemini_1 = require("./platform/gemini");
|
|
13
13
|
Object.defineProperty(exports, "GeminiLLM", { enumerable: true, get: function () { return gemini_1.GeminiLLM; } });
|
|
14
|
+
const deepseek_1 = require("./platform/deepseek");
|
|
15
|
+
Object.defineProperty(exports, "DeepseekLLM", { enumerable: true, get: function () { return deepseek_1.DeepseekLLM; } });
|
|
14
16
|
const generic_llm_1 = require("./base/generic-llm");
|
|
15
17
|
Object.defineProperty(exports, "GenericLLM", { enumerable: true, get: function () { return generic_llm_1.GenericLLM; } });
|
|
16
18
|
const choose_1 = require("./multistep/modules/choose");
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { LLMRunner } from "../base/llm-runner";
|
|
2
|
+
import { GenericLLM } from "../base/generic-llm";
|
|
3
|
+
import { Readable } from 'node:stream';
|
|
4
|
+
export type DeepseekModel = 'deepseek-reasoner' | 'deepseek-chat';
|
|
5
|
+
/**
|
|
6
|
+
* Chat GPT Implementation
|
|
7
|
+
*/
|
|
8
|
+
export declare class DeepseekLLM extends LLMRunner.BaseLLM {
|
|
9
|
+
private cgpt;
|
|
10
|
+
private model;
|
|
11
|
+
constructor(api_key: string, model: DeepseekModel, chat_session_manager?: GenericLLM.ChatSessionManager<LLMRunner.ChatSession, LLMRunner.Message>);
|
|
12
|
+
protected streamChat(messages: string[], id_session: string | null, stream: Readable, ac: AbortController): Promise<void>;
|
|
13
|
+
protected chat(messages: string[], id_session: string | null): Promise<string>;
|
|
14
|
+
}
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
|
12
|
+
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
|
13
|
+
var m = o[Symbol.asyncIterator], i;
|
|
14
|
+
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
|
15
|
+
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
|
16
|
+
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
|
17
|
+
};
|
|
18
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
19
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
20
|
+
};
|
|
21
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
22
|
+
exports.DeepseekLLM = void 0;
|
|
23
|
+
const openai_1 = __importDefault(require("openai"));
|
|
24
|
+
const llm_runner_1 = require("../base/llm-runner");
|
|
25
|
+
/**
|
|
26
|
+
* Chat GPT Implementation
|
|
27
|
+
*/
|
|
28
|
+
class DeepseekLLM extends llm_runner_1.LLMRunner.BaseLLM {
|
|
29
|
+
constructor(api_key, model, chat_session_manager) {
|
|
30
|
+
super(chat_session_manager !== null && chat_session_manager !== void 0 ? chat_session_manager : new llm_runner_1.LLMRunner.SessionManager());
|
|
31
|
+
this.cgpt = new openai_1.default({ apiKey: api_key, baseURL: 'https://api.deepseek.com' });
|
|
32
|
+
this.model = model;
|
|
33
|
+
}
|
|
34
|
+
streamChat(messages, id_session, stream, ac) {
|
|
35
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
36
|
+
var _a, e_1, _b, _c;
|
|
37
|
+
var _d, _e;
|
|
38
|
+
const chat_history = id_session ? yield this.chat_session_manager.retrieveHistory(id_session) : [];
|
|
39
|
+
const chat_messages = [
|
|
40
|
+
...chat_history.map((msg) => { var _a; return ({ role: msg.role, content: (_a = msg.content) !== null && _a !== void 0 ? _a : '' }); }),
|
|
41
|
+
...messages.map(content => ({ role: 'user', content }))
|
|
42
|
+
];
|
|
43
|
+
if (ac.signal.aborted) {
|
|
44
|
+
stream.push(null);
|
|
45
|
+
return;
|
|
46
|
+
}
|
|
47
|
+
const cgpt_stream = yield this.cgpt.chat.completions.create({
|
|
48
|
+
model: this.model,
|
|
49
|
+
store: false,
|
|
50
|
+
stream: true,
|
|
51
|
+
n: 1,
|
|
52
|
+
messages: chat_messages
|
|
53
|
+
});
|
|
54
|
+
ac.signal.addEventListener('abort', () => cgpt_stream.controller.abort());
|
|
55
|
+
try {
|
|
56
|
+
for (var _f = true, cgpt_stream_1 = __asyncValues(cgpt_stream), cgpt_stream_1_1; cgpt_stream_1_1 = yield cgpt_stream_1.next(), _a = cgpt_stream_1_1.done, !_a; _f = true) {
|
|
57
|
+
_c = cgpt_stream_1_1.value;
|
|
58
|
+
_f = false;
|
|
59
|
+
const chunk = _c;
|
|
60
|
+
const c = chunk;
|
|
61
|
+
const first_choice = (_d = c.choices) === null || _d === void 0 ? void 0 : _d[0];
|
|
62
|
+
const delta = (_e = first_choice.delta.content) !== null && _e !== void 0 ? _e : '';
|
|
63
|
+
if (!delta) {
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
stream.push(delta);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
|
70
|
+
finally {
|
|
71
|
+
try {
|
|
72
|
+
if (!_f && !_a && (_b = cgpt_stream_1.return)) yield _b.call(cgpt_stream_1);
|
|
73
|
+
}
|
|
74
|
+
finally { if (e_1) throw e_1.error; }
|
|
75
|
+
}
|
|
76
|
+
stream.push(null);
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
chat(messages, id_session) {
|
|
80
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
81
|
+
var _a, _b;
|
|
82
|
+
const chat_history = id_session ? yield this.chat_session_manager.retrieveHistory(id_session) : [];
|
|
83
|
+
const chat_messages = [
|
|
84
|
+
...chat_history.map((msg) => { var _a; return ({ role: msg.role, content: (_a = msg.content) !== null && _a !== void 0 ? _a : '' }); }),
|
|
85
|
+
...messages.map(content => ({ role: 'user', content }))
|
|
86
|
+
];
|
|
87
|
+
const res = yield this.cgpt.chat.completions.create({
|
|
88
|
+
model: this.model,
|
|
89
|
+
store: false,
|
|
90
|
+
n: 1,
|
|
91
|
+
messages: chat_messages
|
|
92
|
+
});
|
|
93
|
+
return (_b = (_a = res.choices) === null || _a === void 0 ? void 0 : _a[0].message.content) !== null && _b !== void 0 ? _b : '';
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
exports.DeepseekLLM = DeepseekLLM;
|