@memberjunction/ai-mistral 0.9.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +1 -0
- package/dist/index.js +18 -0
- package/dist/index.js.map +1 -0
- package/dist/models/mistral.d.ts +119 -0
- package/dist/models/mistral.js +145 -0
- package/dist/models/mistral.js.map +1 -0
- package/package.json +26 -0
- package/readme.md +2 -0
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './models/mistral';
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
__exportStar(require("./models/mistral"), exports);
|
|
18
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,mDAAiC"}
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import { BaseLLM, ChatMessage, ChatParams, ChatResult, ClassifyParams, ClassifyResult, SummarizeParams, SummarizeResult } from '@memberjunction/ai';
|
|
2
|
+
export declare class MistralLLM extends BaseLLM {
|
|
3
|
+
private apiBaseURL;
|
|
4
|
+
private enableSafePrompt;
|
|
5
|
+
constructor(apiKey: string);
|
|
6
|
+
ChatCompletion(params: ChatParams): Promise<ChatResult>;
|
|
7
|
+
SummarizeText(params: SummarizeParams): Promise<SummarizeResult>;
|
|
8
|
+
ClassifyText(params: ClassifyParams): Promise<ClassifyResult>;
|
|
9
|
+
/**
|
|
10
|
+
* Helper function to ask a simple question and return the response
|
|
11
|
+
* @param prompt
|
|
12
|
+
*/
|
|
13
|
+
Ask(prompt: string): Promise<void>;
|
|
14
|
+
/**
|
|
15
|
+
* Returns a list of available models
|
|
16
|
+
* @returns {Promise<AvailableModelInfo>}
|
|
17
|
+
*/
|
|
18
|
+
ListModels(): Promise<ListModelsResponse>;
|
|
19
|
+
ChatSingle(message: string, role?: string, model?: string, temperature?: number, maxTokens?: number, topP?: number, randomSeed?: number, safePrompt?: boolean): Promise<ChatCompletionResponse>;
|
|
20
|
+
Chat(messages: ChatMessage[], model?: string, temperature?: number, maxTokens?: number, topP?: number, randomSeed?: number, safePrompt?: boolean): Promise<ChatCompletionResponse>;
|
|
21
|
+
private MakeChatCompletionRequest;
|
|
22
|
+
private createAxiosRequestConfig;
|
|
23
|
+
private callApi;
|
|
24
|
+
createChatMessages(prompts: string[], role?: string): ChatMessage[];
|
|
25
|
+
}
|
|
26
|
+
export declare const MistralRoles: {
|
|
27
|
+
User: string;
|
|
28
|
+
System: string;
|
|
29
|
+
};
|
|
30
|
+
export declare const MistralModels: {
|
|
31
|
+
Tiny: string;
|
|
32
|
+
Small: string;
|
|
33
|
+
Medium: string;
|
|
34
|
+
};
|
|
35
|
+
export interface ModelPermission {
|
|
36
|
+
id: string;
|
|
37
|
+
object: 'model_permission';
|
|
38
|
+
created: number;
|
|
39
|
+
allow_create_engine: boolean;
|
|
40
|
+
allow_sampling: boolean;
|
|
41
|
+
allow_logprobs: boolean;
|
|
42
|
+
allow_search_indices: boolean;
|
|
43
|
+
allow_view: boolean;
|
|
44
|
+
allow_fine_tuning: boolean;
|
|
45
|
+
organization: string;
|
|
46
|
+
group: string | null;
|
|
47
|
+
is_blocking: boolean;
|
|
48
|
+
}
|
|
49
|
+
export interface Model {
|
|
50
|
+
id: string;
|
|
51
|
+
object: 'model';
|
|
52
|
+
created: number;
|
|
53
|
+
owned_by: string;
|
|
54
|
+
root: string | null;
|
|
55
|
+
parent: string | null;
|
|
56
|
+
permission: ModelPermission[];
|
|
57
|
+
}
|
|
58
|
+
export interface ListModelsResponse {
|
|
59
|
+
object: 'list';
|
|
60
|
+
data: Model[];
|
|
61
|
+
}
|
|
62
|
+
export interface TokenUsage {
|
|
63
|
+
prompt_tokens: number;
|
|
64
|
+
completion_tokens: number;
|
|
65
|
+
total_tokens: number;
|
|
66
|
+
}
|
|
67
|
+
export type ChatCompletetionRequest = {
|
|
68
|
+
model: string;
|
|
69
|
+
messages: ChatMessage[];
|
|
70
|
+
temperature: number;
|
|
71
|
+
max_tokens: number;
|
|
72
|
+
top_p: number;
|
|
73
|
+
random_seed: number;
|
|
74
|
+
stream: boolean;
|
|
75
|
+
safe_prompt: boolean;
|
|
76
|
+
};
|
|
77
|
+
export interface ChatCompletionResponseChoice {
|
|
78
|
+
index: number;
|
|
79
|
+
message: {
|
|
80
|
+
role: string;
|
|
81
|
+
content: string;
|
|
82
|
+
};
|
|
83
|
+
finish_reason: string;
|
|
84
|
+
}
|
|
85
|
+
export interface ChatCompletionResponseChunkChoice {
|
|
86
|
+
index: number;
|
|
87
|
+
delta: {
|
|
88
|
+
role?: string;
|
|
89
|
+
content?: string;
|
|
90
|
+
};
|
|
91
|
+
finish_reason: string;
|
|
92
|
+
}
|
|
93
|
+
export interface ChatCompletionResponse {
|
|
94
|
+
id: string;
|
|
95
|
+
object: 'chat.completion';
|
|
96
|
+
created: number;
|
|
97
|
+
model: string;
|
|
98
|
+
choices: ChatCompletionResponseChoice[];
|
|
99
|
+
usage: TokenUsage;
|
|
100
|
+
}
|
|
101
|
+
export interface ChatCompletionResponseChunk {
|
|
102
|
+
id: string;
|
|
103
|
+
object: 'chat.completion.chunk';
|
|
104
|
+
created: number;
|
|
105
|
+
model: string;
|
|
106
|
+
choices: ChatCompletionResponseChunkChoice[];
|
|
107
|
+
}
|
|
108
|
+
export interface Embedding {
|
|
109
|
+
id: string;
|
|
110
|
+
object: 'embedding';
|
|
111
|
+
embedding: number[];
|
|
112
|
+
}
|
|
113
|
+
export interface EmbeddingResponse {
|
|
114
|
+
id: string;
|
|
115
|
+
object: 'list';
|
|
116
|
+
data: Embedding[];
|
|
117
|
+
model: string;
|
|
118
|
+
usage: TokenUsage;
|
|
119
|
+
}
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
9
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.MistralModels = exports.MistralRoles = exports.MistralLLM = void 0;
|
|
13
|
+
const axios_1 = __importDefault(require("axios"));
|
|
14
|
+
const ai_1 = require("@memberjunction/ai");
|
|
15
|
+
const global_1 = require("@memberjunction/global");
|
|
16
|
+
let MistralLLM = class MistralLLM extends ai_1.BaseLLM {
|
|
17
|
+
constructor(apiKey) {
|
|
18
|
+
super(apiKey);
|
|
19
|
+
// need to do this another way, we don't want classes to rely on .env files directly
|
|
20
|
+
// this.apiBaseURL = process.env.MISTRAL_API_BASE_URL || "";
|
|
21
|
+
this.enableSafePrompt = process.env.MISTRAL_ENABLE_SAFE_PROMPT === "true";
|
|
22
|
+
}
|
|
23
|
+
async ChatCompletion(params) {
|
|
24
|
+
const config = {
|
|
25
|
+
method: "post",
|
|
26
|
+
baseURL: this.apiBaseURL,
|
|
27
|
+
url: "/chat/completions",
|
|
28
|
+
headers: {
|
|
29
|
+
Authorization: `Bearer ${this.apiKey}`
|
|
30
|
+
},
|
|
31
|
+
data: {
|
|
32
|
+
model: params.model,
|
|
33
|
+
messages: params.messages,
|
|
34
|
+
safe_prompt: this.enableSafePrompt
|
|
35
|
+
}
|
|
36
|
+
};
|
|
37
|
+
let result = await this.callApi(config);
|
|
38
|
+
console.log(result);
|
|
39
|
+
console.log(result.choices);
|
|
40
|
+
return result;
|
|
41
|
+
}
|
|
42
|
+
async SummarizeText(params) {
|
|
43
|
+
throw new Error("Method not implemented.");
|
|
44
|
+
}
|
|
45
|
+
async ClassifyText(params) {
|
|
46
|
+
throw new Error("Method not implemented.");
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Helper function to ask a simple question and return the response
|
|
50
|
+
* @param prompt
|
|
51
|
+
*/
|
|
52
|
+
async Ask(prompt) {
|
|
53
|
+
let response = await this.ChatSingle(prompt, undefined, exports.MistralModels.Tiny);
|
|
54
|
+
console.log(response);
|
|
55
|
+
response.choices.forEach((choice) => {
|
|
56
|
+
console.log(choice.message.content);
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Returns a list of available models
|
|
61
|
+
* @returns {Promise<AvailableModelInfo>}
|
|
62
|
+
*/
|
|
63
|
+
async ListModels() {
|
|
64
|
+
const request = this.createAxiosRequestConfig('get', 'models');
|
|
65
|
+
let response = await this.callApi(request);
|
|
66
|
+
return response;
|
|
67
|
+
}
|
|
68
|
+
async ChatSingle(message, role = exports.MistralRoles.User, model = exports.MistralModels.Medium, temperature = null, maxTokens = null, topP = null, randomSeed = null, safePrompt = this.enableSafePrompt) {
|
|
69
|
+
const chatMessage = [
|
|
70
|
+
{
|
|
71
|
+
role: role,
|
|
72
|
+
content: message
|
|
73
|
+
}
|
|
74
|
+
];
|
|
75
|
+
return await this.Chat(chatMessage, model, temperature, maxTokens, topP, randomSeed, safePrompt);
|
|
76
|
+
}
|
|
77
|
+
async Chat(messages, model = exports.MistralModels.Medium, temperature = null, maxTokens = null, topP = null, randomSeed = null, safePrompt = this.enableSafePrompt) {
|
|
78
|
+
const request = this.MakeChatCompletionRequest(model, messages, temperature, maxTokens, topP, randomSeed, false, safePrompt);
|
|
79
|
+
const axiosRequest = this.createAxiosRequestConfig('post', "chat/completions", request);
|
|
80
|
+
return await this.callApi(axiosRequest);
|
|
81
|
+
}
|
|
82
|
+
MakeChatCompletionRequest(model, messages, temperature = null, maxTokens = null, topP = null, randomSeed = null, stream = null, safePrompt = null) {
|
|
83
|
+
return {
|
|
84
|
+
model: model,
|
|
85
|
+
messages: messages,
|
|
86
|
+
temperature: temperature ?? undefined,
|
|
87
|
+
max_tokens: maxTokens ?? undefined,
|
|
88
|
+
top_p: topP ?? undefined,
|
|
89
|
+
random_seed: randomSeed ?? undefined,
|
|
90
|
+
stream: stream ?? undefined,
|
|
91
|
+
safe_prompt: safePrompt ?? undefined,
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
;
|
|
95
|
+
createAxiosRequestConfig(method, path, options = null) {
|
|
96
|
+
return {
|
|
97
|
+
method: method,
|
|
98
|
+
baseURL: this.apiBaseURL,
|
|
99
|
+
url: `/${path}`,
|
|
100
|
+
headers: {
|
|
101
|
+
Accept: options?.stream ? 'text/event-stream' : 'application/json',
|
|
102
|
+
ContentType: 'application/json',
|
|
103
|
+
Authorization: `Bearer ${this.apiKey}`
|
|
104
|
+
},
|
|
105
|
+
data: method !== 'get' ? options : null,
|
|
106
|
+
timeout: 120 * 1000
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
async callApi(data) {
|
|
110
|
+
try {
|
|
111
|
+
const response = await (0, axios_1.default)(data);
|
|
112
|
+
return response.data;
|
|
113
|
+
}
|
|
114
|
+
catch (error) {
|
|
115
|
+
console.log(error);
|
|
116
|
+
console.error("An error occured when making request to", data.baseURL + data.url, ":\n", error.data?.message);
|
|
117
|
+
return null;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
;
|
|
121
|
+
createChatMessages(prompts, role = 'user') {
|
|
122
|
+
let messages = [];
|
|
123
|
+
prompts.forEach((prompt) => {
|
|
124
|
+
messages.push({
|
|
125
|
+
role: role,
|
|
126
|
+
content: prompt
|
|
127
|
+
});
|
|
128
|
+
});
|
|
129
|
+
return messages;
|
|
130
|
+
}
|
|
131
|
+
};
|
|
132
|
+
exports.MistralLLM = MistralLLM;
|
|
133
|
+
exports.MistralLLM = MistralLLM = __decorate([
|
|
134
|
+
(0, global_1.RegisterClass)(ai_1.BaseLLM, "MistralLLM")
|
|
135
|
+
], MistralLLM);
|
|
136
|
+
exports.MistralRoles = {
|
|
137
|
+
User: 'user',
|
|
138
|
+
System: 'system'
|
|
139
|
+
};
|
|
140
|
+
exports.MistralModels = {
|
|
141
|
+
Tiny: "mistral-tiny",
|
|
142
|
+
Small: "mistral-small",
|
|
143
|
+
Medium: "mistral-medium"
|
|
144
|
+
};
|
|
145
|
+
//# sourceMappingURL=mistral.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"mistral.js","sourceRoot":"","sources":["../../src/models/mistral.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,kDAAiE;AACjE,2CAAqK;AACrK,mDAAuD;AAGhD,IAAM,UAAU,GAAhB,MAAM,UAAW,SAAQ,YAAO;IAInC,YAAY,MAAc;QACtB,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,oFAAoF;QACpF,4DAA4D;QAC5D,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,GAAG,CAAC,0BAA0B,KAAK,MAAM,CAAC;IAC9E,CAAC;IAEM,KAAK,CAAC,cAAc,CAAC,MAAkB;QAC1C,MAAM,MAAM,GAAuB;YAC/B,MAAM,EAAE,MAAM;YACd,OAAO,EAAE,IAAI,CAAC,UAAU;YACxB,GAAG,EAAE,mBAAmB;YACxB,OAAO,EAAE;gBACL,aAAa,EAAE,UAAU,IAAI,CAAC,MAAM,EAAE;aACzC;YACD,IAAI,EAAE;gBACF,KAAK,EAAE,MAAM,CAAC,KAAK;gBACnB,QAAQ,EAAE,MAAM,CAAC,QAAQ;gBACzB,WAAW,EAAE,IAAI,CAAC,gBAAgB;aACrC;SACJ,CAAC;QACF,IAAI,MAAM,GAAQ,MAAM,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC7C,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;QACpB,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QAC5B,OAAO,MAAM,CAAC;IAClB,CAAC;IAEM,KAAK,CAAC,aAAa,CAAC,MAAuB;QAC9C,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAC/C,CAAC;IAEM,KAAK,CAAC,YAAY,CAAC,MAAsB;QAC5C,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAC/C,CAAC;IAED;;;OAGG;IACI,KAAK,CAAC,GAAG,CAAC,MAAc;QAC3B,IAAI,QAAQ,GAA2B,MAAM,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,qBAAa,CAAC,IAAI,CAAC,CAAC;QACpG,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;QACtB,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,MAAoC,EAAE,EAAE;YAC9D,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QACxC,CAAC,CAAC,CAAC;IACP,CAAC;IAED;;;OAGG;IACI,KAAK,CAAC,UAAU;QACnB,MAAM,OAAO,GAAuB,IAAI,CAAC,wBAAwB,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;QACnF,IAAI,QAAQ,GAAuB,MAAM,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,OAAO,QAAQ,CAAC;IACpB,CAAC;IAEM,KAAK,CAAC,UAAU,CACnB,OAAe,EACf,OAAe,oBAAY,CAAC,IAAI,EAChC,QAAgB,qBAAa,CAAC,MAAM,EACpC,cAAsB,IAAI,EAC1B,YAAoB,IAAI,EACxB,OAAe,IAAI,EACnB,aAAqB,IAAI,EACzB,aAAsB,IAAI,CAAC,gBAAgB;QAEvC,MAAM,WAAW,GAAkB;YAC/B;gBACI,IAAI,EAAmB,IAAI;gBAC3B,OAAO,EAAE,OAAO;aACnB;SACJ,CAAC;QACF,OAAO,MAAM,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,EAAE,WAAW,EAAE,SAAS,EAAE,IAAI,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC;IACzG,CAAC;IAEM,KAAK,CAAC,IAAI,CACb,QAAuB,EACvB,QAAgB,qBAAa,CAAC,MAAM,EACpC,cAAsB,IAAI,EAC1B,YAAoB,IAAI,EACxB,OAAe,IAAI,EACnB,aAAqB,IAAI,EACzB,aAAsB,IAAI,CAAC,gBAAgB;QAG3C,MAAM,OAAO,GAA4B,IAAI,CAAC,yBAAyB,CAAC,KAAK,EAAE,QAAQ,EAAE,WAAW,EAAE,SAAS,EAAE,IAAI,EAAE,UAAU,EAAE,KAAK,EAAE,UAAU,CAAC,CAAC;QACtJ,MAAM,YAAY,GAAuB,IAAI,CAAC,wBAAwB,CAAC,MAAM,EAAE,kBAAkB,EAAE,OAAO,CAAC,CAAC;QAC5G,OAAO,MAAM,IAAI,CAAC,OAAO,CAAyB,YAAY,CAAC,CAAC;IACpE,CAAC;IAEO,yBAAyB,CAC7B,KAAa,EACb,QAAuB,EACvB,cAAsB,IAAI,EAC1B,YAAoB,IAAI,EACxB,OAAe,IAAI,EACnB,aAAqB,IAAI,EACzB,SAAkB,IAAI,EACtB,aAAsB,IAAI;QAE1B,OAAO;YACL,KAAK,EAAE,KAAK;YACZ,QAAQ,EAAE,QAAQ;YAClB,WAAW,EAAE,WAAW,IAAI,SAAS;YACrC,UAAU,EAAE,SAAS,IAAI,SAAS;YAClC,KAAK,EAAE,IAAI,IAAI,SAAS;YACxB,WAAW,EAAE,UAAU,IAAI,SAAS;YACpC,MAAM,EAAE,MAAM,IAAI,SAAS;YAC3B,WAAW,EAAE,UAAU,IAAI,SAAS;SACrC,CAAC;IACN,CAAC;IAAA,CAAC;IAEM,wBAAwB,CAAC,MAAc,EAAE,IAAY,EAAE,UAAmC,IAAI;QAClG,OAAO;YACH,MAAM,EAAE,MAAM;YACd,OAAO,EAAE,IAAI,CAAC,UAAU;YACxB,GAAG,EAAE,IAAI,IAAI,EAAE;YACf,OAAO,EAAE;gBACL,MAAM,EAAE,OAAO,EAAE,MAAM,CAAA,CAAC,CAAC,mBAAmB,CAAA,CAAC,CAAC,kBAAkB;gBAChE,WAAW,EAAE,kBAAkB;gBAC/B,aAAa,EAAE,UAAU,IAAI,CAAC,MAAM,EAAE;aACzC;YACD,IAAI,EAAE,MAAM,KAAK,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI;YACvC,OAAO,EAAE,GAAG,GAAG,IAAI;SACtB,CAAA;IACL,CAAC;IAEO,KAAK,CAAC,OAAO,CAAI,IAAwB;QAC7C,IAAI,CAAC;YACH,MAAM,QAAQ,GAAkB,MAAM,IAAA,eAAK,EAAC,IAAI,CAAC,CAAC;YAClD,OAAO,QAAQ,CAAC,IAAI,CAAC;QACvB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YACnB,OAAO,CAAC,KAAK,CAAC,yCAAyC,EAAE,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,KAAK,EAAE,KAAK,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;YAC9G,OAAO,IAAI,CAAC;QAChB,CAAC;IACL,CAAC;IAAA,CAAC;IAEK,kBAAkB,CAAC,OAAiB,EAAE,OAAe,MAAM;QAC9D,IAAI,QAAQ,GAAkB,EAAE,CAAC;QACjC,OAAO,CAAC,OAAO,CAAC,CAAC,MAAc,EAAE,EAAE;YAC/B,QAAQ,CAAC,IAAI,CAAC;gBACV,IAAI,EAAmB,IAAI;gBAC3B,OAAO,EAAE,MAAM;aAClB,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;QAEH,OAAO,QAAQ,CAAC;IACpB,CAAC;CACJ,CAAA;AA1JY,gCAAU;qBAAV,UAAU;IADtB,IAAA,sBAAa,EAAC,YAAO,EAAE,YAAY,CAAC;GACxB,UAAU,CA0JtB;AAEY,QAAA,YAAY,GAAG;IACxB,IAAI,EAAE,MAAM;IACZ,MAAM,EAAE,QAAQ;CACnB,CAAA;AAEY,QAAA,aAAa,GAAG;IACzB,IAAI,EAAE,cAAc;IACpB,KAAK,EAAE,eAAe;IACtB,MAAM,EAAE,gBAAgB;CAC3B,CAAA"}
|
package/package.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@memberjunction/ai-mistral",
|
|
3
|
+
"version": "0.9.9",
|
|
4
|
+
"description": "MemberJunction Wrapper for Mistral AI's AI Models",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"types": "dist/index.d.ts",
|
|
7
|
+
"files": [
|
|
8
|
+
"/dist"
|
|
9
|
+
],
|
|
10
|
+
"scripts": {
|
|
11
|
+
"start": "ts-node-dev src/index.ts",
|
|
12
|
+
"build": "tsc",
|
|
13
|
+
"test": "echo \"Error: no test specified\" && exit 1"
|
|
14
|
+
},
|
|
15
|
+
"author": "MemberJunction.com",
|
|
16
|
+
"license": "ISC",
|
|
17
|
+
"devDependencies": {
|
|
18
|
+
"ts-node-dev": "^2.0.0",
|
|
19
|
+
"typescript": "^5.3.3",
|
|
20
|
+
"openai": "^3.2.1"
|
|
21
|
+
},
|
|
22
|
+
"dependencies": {
|
|
23
|
+
"@memberjunction/ai": "^0.9.152",
|
|
24
|
+
"@memberjunction/global": "^0.9.146"
|
|
25
|
+
}
|
|
26
|
+
}
|
package/readme.md
ADDED