@memberjunction/ai-mistral 2.29.2 → 2.30.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,21 +1,12 @@
1
1
  import { BaseLLM, ChatParams, ChatResult, ClassifyParams, ClassifyResult, SummarizeParams, SummarizeResult } from '@memberjunction/ai';
2
- import { MistralClient } from './mistralClient';
3
- import { ListModelsResponse } from '../generic/mistral.types';
2
+ import { Mistral } from "@mistralai/mistralai";
4
3
  export declare class MistralLLM extends BaseLLM {
5
4
  private _client;
6
5
  constructor(apiKey: string);
7
- get client(): MistralClient;
8
- ChatCompletion(params: MistralChatParams): Promise<ChatResult>;
6
+ get Client(): Mistral;
7
+ ChatCompletion(params: ChatParams): Promise<ChatResult>;
9
8
  SummarizeText(params: SummarizeParams): Promise<SummarizeResult>;
10
9
  ClassifyText(params: ClassifyParams): Promise<ClassifyResult>;
11
- /**
12
- * Returns a list of available models
13
- * @returns {Promise<AvailableModelInfo>}
14
- */
15
- listModels(): Promise<ListModelsResponse>;
16
- }
17
- export declare class MistralChatParams extends ChatParams {
18
- model: string;
19
10
  }
20
11
  export declare function LoadMistralLLM(): void;
21
12
  //# sourceMappingURL=mistral.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"mistral.d.ts","sourceRoot":"","sources":["../../src/models/mistral.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,UAAU,EAAoB,cAAc,EAAE,cAAc,EAAE,eAAe,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AAEzJ,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAgC,kBAAkB,EAAE,MAAM,0BAA0B,CAAC;AAE5F,qBACa,UAAW,SAAQ,OAAO;IACnC,OAAO,CAAC,OAAO,CAAgB;gBACnB,MAAM,EAAE,MAAM;IAK1B,IAAW,MAAM,IAAI,aAAa,CAAuB;IAE5C,cAAc,CAAC,MAAM,EAAE,iBAAiB,GAAG,OAAO,CAAC,UAAU,CAAC;IA0C9D,aAAa,CAAC,MAAM,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;IAIhE,YAAY,CAAC,MAAM,EAAE,cAAc,GAAG,OAAO,CAAC,cAAc,CAAC;IAI1E;;;OAGG;IACU,UAAU,IAAI,OAAO,CAAC,kBAAkB,CAAC;CAIzD;AAED,qBAAa,iBAAkB,SAAQ,UAAU;IAC7C,KAAK,EAAE,MAAM,CAAC;CACjB;AAED,wBAAgB,cAAc,SAE7B"}
1
+ {"version":3,"file":"mistral.d.ts","sourceRoot":"","sources":["../../src/models/mistral.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,UAAU,EAAoB,cAAc,EAAE,cAAc,EAAE,eAAe,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AAEzJ,OAAO,EAAE,OAAO,EAAE,MAAM,sBAAsB,CAAC;AAG/C,qBACa,UAAW,SAAQ,OAAO;IACnC,OAAO,CAAC,OAAO,CAAU;gBAEb,MAAM,EAAE,MAAM;IAO1B,IAAW,MAAM,IAAI,OAAO,CAAuB;IAEtC,cAAc,CAAC,MAAM,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;IA4DvD,aAAa,CAAC,MAAM,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;IAIhE,YAAY,CAAC,MAAM,EAAE,cAAc,GAAG,OAAO,CAAC,cAAc,CAAC;CAG7E;AAED,wBAAgB,cAAc,SAE7B"}
@@ -6,32 +6,47 @@ var __decorate = (this && this.__decorate) || function (decorators, target, key,
6
6
  return c > 3 && r && Object.defineProperty(target, key, r), r;
7
7
  };
8
8
  Object.defineProperty(exports, "__esModule", { value: true });
9
- exports.LoadMistralLLM = exports.MistralChatParams = exports.MistralLLM = void 0;
9
+ exports.LoadMistralLLM = exports.MistralLLM = void 0;
10
10
  const ai_1 = require("@memberjunction/ai");
11
11
  const global_1 = require("@memberjunction/global");
12
- const mistralClient_1 = require("./mistralClient");
12
+ const mistralai_1 = require("@mistralai/mistralai");
13
13
  let MistralLLM = class MistralLLM extends ai_1.BaseLLM {
14
14
  constructor(apiKey) {
15
15
  super(apiKey);
16
- this._client = new mistralClient_1.MistralClient({ apiKey });
16
+ this._client = new mistralai_1.Mistral({
17
+ apiKey: apiKey
18
+ });
17
19
  }
18
- get client() { return this._client; }
20
+ get Client() { return this._client; }
19
21
  async ChatCompletion(params) {
20
22
  const startTime = new Date();
21
- const chatResponse = await this.client.chat({
23
+ let responseFormat = undefined;
24
+ if (params.responseFormat) {
25
+ if (params.responseFormat === 'JSON') {
26
+ responseFormat = { type: "json_object" };
27
+ }
28
+ }
29
+ const chatResponse = await this.Client.chat.complete({
22
30
  model: params.model,
23
31
  messages: params.messages,
24
- max_tokens: params.maxOutputTokens,
25
- response_format: params.responseFormat
32
+ maxTokens: params.maxOutputTokens,
33
+ responseFormat: responseFormat
26
34
  });
27
35
  const endTime = new Date();
28
36
  let choices = chatResponse.choices.map((choice) => {
37
+ let content = "";
38
+ if (choice.message.content && typeof choice.message.content === 'string') {
39
+ content = choice.message.content;
40
+ }
41
+ else if (choice.message.content && Array.isArray(choice.message.content)) {
42
+ content = choice.message.content.join(' ');
43
+ }
29
44
  const res = {
30
45
  message: {
31
46
  role: 'assistant',
32
- content: choice.message.content
47
+ content: content
33
48
  },
34
- finish_reason: choice.finish_reason,
49
+ finish_reason: choice.finishReason,
35
50
  index: choice.index
36
51
  };
37
52
  return res;
@@ -45,9 +60,9 @@ let MistralLLM = class MistralLLM extends ai_1.BaseLLM {
45
60
  data: {
46
61
  choices: choices,
47
62
  usage: {
48
- totalTokens: chatResponse.usage.total_tokens,
49
- promptTokens: chatResponse.usage.prompt_tokens,
50
- completionTokens: chatResponse.usage.completion_tokens
63
+ totalTokens: chatResponse.usage.totalTokens,
64
+ promptTokens: chatResponse.usage.promptTokens,
65
+ completionTokens: chatResponse.usage.completionTokens
51
66
  }
52
67
  },
53
68
  errorMessage: "",
@@ -60,22 +75,11 @@ let MistralLLM = class MistralLLM extends ai_1.BaseLLM {
60
75
  async ClassifyText(params) {
61
76
  throw new Error("Method not implemented.");
62
77
  }
63
- /**
64
- * Returns a list of available models
65
- * @returns {Promise<AvailableModelInfo>}
66
- */
67
- async listModels() {
68
- const listModelsResponse = await this.client.listModels();
69
- return listModelsResponse;
70
- }
71
78
  };
72
79
  exports.MistralLLM = MistralLLM;
73
80
  exports.MistralLLM = MistralLLM = __decorate([
74
81
  (0, global_1.RegisterClass)(ai_1.BaseLLM, "MistralLLM")
75
82
  ], MistralLLM);
76
- class MistralChatParams extends ai_1.ChatParams {
77
- }
78
- exports.MistralChatParams = MistralChatParams;
79
83
  function LoadMistralLLM() {
80
84
  // this does nothing but prevents the class from being removed by the tree shaker
81
85
  }
@@ -1 +1 @@
1
- {"version":3,"file":"mistral.js","sourceRoot":"","sources":["../../src/models/mistral.ts"],"names":[],"mappings":";;;;;;;;;AAAA,2CAAyJ;AACzJ,mDAAuD;AACvD,mDAAgD;AAIzC,IAAM,UAAU,GAAhB,MAAM,UAAW,SAAQ,YAAO;IAEnC,YAAY,MAAc;QACtB,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,IAAI,CAAC,OAAO,GAAG,IAAI,6BAAa,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;IACjD,CAAC;IAED,IAAW,MAAM,KAAmB,OAAO,IAAI,CAAC,OAAO,CAAC,CAAA,CAAC;IAElD,KAAK,CAAC,cAAc,CAAC,MAAyB;QACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;QAC7B,MAAM,YAAY,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC;YACxC,KAAK,EAAE,MAAM,CAAC,KAAK;YACnB,QAAQ,EAAE,MAAM,CAAC,QAAQ;YACzB,UAAU,EAAE,MAAM,CAAC,eAAe;YAClC,eAAe,EAAE,MAAM,CAAC,cAAc;SACzC,CAAC,CAAC;QACH,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC;QAE3B,IAAI,OAAO,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAoC,EAAE,EAAE;YAChG,MAAM,GAAG,GAAqB;gBAC1B,OAAO,EAAE;oBACL,IAAI,EAAE,WAAW;oBACjB,OAAO,EAAE,MAAM,CAAC,OAAO,CAAC,OAAO;iBAClC;gBACD,aAAa,EAAE,MAAM,CAAC,aAAa;gBACnC,KAAK,EAAE,MAAM,CAAC,KAAK;aACtB,CAAC;YACF,OAAO,GAAG,CAAC;QACf,CAAC,CAAC,CAAC;QAEH,OAAO;YACH,OAAO,EAAE,IAAI;YACb,UAAU,EAAE,IAAI;YAChB,SAAS,EAAE,SAAS;YACpB,OAAO,EAAE,OAAO;YAChB,WAAW,EAAE,OAAO,CAAC,OAAO,EAAE,GAAG,SAAS,CAAC,OAAO,EAAE;YACpD,IAAI,EAAE;gBACF,OAAO,EAAE,OAAO;gBAChB,KAAK,EAAE;oBACH,WAAW,EAAE,YAAY,CAAC,KAAK,CAAC,YAAY;oBAC5C,YAAY,EAAE,YAAY,CAAC,KAAK,CAAC,aAAa;oBAC9C,gBAAgB,EAAE,YAAY,CAAC,KAAK,CAAC,iBAAiB;iBACzD;aACJ;YACD,YAAY,EAAE,EAAE;YAChB,SAAS,EAAE,IAAI;SAClB,CAAA;IAEL,CAAC;IAEM,KAAK,CAAC,aAAa,CAAC,MAAuB;QAC9C,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAC/C,CAAC;IAEM,KAAK,CAAC,YAAY,CAAC,MAAsB;QAC5C,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAC/C,CAAC;IAED;;;OAGG;IACI,KAAK,CAAC,UAAU;QACnB,MAAM,kBAAkB,GAAuB,MAAM,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE,CAAC;QAC9E,OAAO,kBAAkB,CAAC;IAC9B,CAAC;CACJ,CAAA;AAnEY,gCAAU;qBAAV,UAAU;IADtB,IAAA,sBAAa,EAAC,YAAO,EAAE,YAAY,CAAC;GACxB,UAAU,CAmEtB;AAED,MAAa,iBAAkB,SAAQ,eAAU;CAEhD;AAFD,8CAEC;AAED,SAAgB,cAAc;IAC1B,iFAAiF;AACrF,CAAC;AAFD,wCAEC"}
1
+ {"version":3,"file":"mistral.js","sourceRoot":"","sources":["../../src/models/mistral.ts"],"names":[],"mappings":";;;;;;;;;AAAA,2CAAyJ;AACzJ,mDAAuD;AACvD,oDAA+C;AAIxC,IAAM,UAAU,GAAhB,MAAM,UAAW,SAAQ,YAAO;IAGnC,YAAY,MAAc;QACtB,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,IAAI,CAAC,OAAO,GAAG,IAAI,mBAAO,CAAC;YACvB,MAAM,EAAE,MAAM;SACjB,CAAC,CAAC;IACP,CAAC;IAED,IAAW,MAAM,KAAa,OAAO,IAAI,CAAC,OAAO,CAAC,CAAA,CAAC;IAE5C,KAAK,CAAC,cAAc,CAAC,MAAkB;QAC1C,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;QAE7B,IAAI,cAAc,GAA+B,SAAS,CAAC;QAC3D,IAAI,MAAM,CAAC,cAAc,EAAE,CAAC;YACxB,IAAG,MAAM,CAAC,cAAc,KAAK,MAAM,EAAE,CAAC;gBAClC,cAAc,GAAG,EAAE,IAAI,EAAE,aAAa,EAAE,CAAC;YAC7C,CAAC;QACL,CAAC;QAED,MAAM,YAAY,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC;YACjD,KAAK,EAAE,MAAM,CAAC,KAAK;YACnB,QAAQ,EAAE,MAAM,CAAC,QAAQ;YACzB,SAAS,EAAE,MAAM,CAAC,eAAe;YACjC,cAAc,EAAE,cAAc;SACjC,CAAC,CAAC;QAEH,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC;QAE3B,IAAI,OAAO,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAA4B,EAAE,EAAE;YACxF,IAAI,OAAO,GAAW,EAAE,CAAC;YAEzB,IAAG,MAAM,CAAC,OAAO,CAAC,OAAO,IAAI,OAAO,MAAM,CAAC,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE,CAAC;gBACtE,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC;YACrC,CAAC;iBACI,IAAG,MAAM,CAAC,OAAO,CAAC,OAAO,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;gBACtE,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YAC/C,CAAC;YAED,MAAM,GAAG,GAAqB;gBAC1B,OAAO,EAAE;oBACL,IAAI,EAAE,WAAW;oBACjB,OAAO,EAAE,OAAO;iBACnB;gBACD,aAAa,EAAE,MAAM,CAAC,YAAY;gBAClC,KAAK,EAAE,MAAM,CAAC,KAAK;aACtB,CAAC;YACF,OAAO,GAAG,CAAC;QACf,CAAC,CAAC,CAAC;QAEH,OAAO;YACH,OAAO,EAAE,IAAI;YACb,UAAU,EAAE,IAAI;YAChB,SAAS,EAAE,SAAS;YACpB,OAAO,EAAE,OAAO;YAChB,WAAW,EAAE,OAAO,CAAC,OAAO,EAAE,GAAG,SAAS,CAAC,OAAO,EAAE;YACpD,IAAI,EAAE;gBACF,OAAO,EAAE,OAAO;gBAChB,KAAK,EAAE;oBACH,WAAW,EAAE,YAAY,CAAC,KAAK,CAAC,WAAW;oBAC3C,YAAY,EAAE,YAAY,CAAC,KAAK,CAAC,YAAY;oBAC7C,gBAAgB,EAAE,YAAY,CAAC,KAAK,CAAC,gBAAgB;iBACxD;aACJ;YACD,YAAY,EAAE,EAAE;YAChB,SAAS,EAAE,IAAI;SAClB,CAAA;IAEL,CAAC;IAEM,KAAK,CAAC,aAAa,CAAC,MAAuB;QAC9C,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAC/C,CAAC;IAEM,KAAK,CAAC,YAAY,CAAC,MAAsB;QAC5C,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAC/C,CAAC;CACJ,CAAA;AA/EY,gCAAU;qBAAV,UAAU;IADtB,IAAA,sBAAa,EAAC,YAAO,EAAE,YAAY,CAAC;GACxB,UAAU,CA+EtB;AAED,SAAgB,cAAc;IAC1B,iFAAiF;AACrF,CAAC;AAFD,wCAEC"}
@@ -1,9 +1,9 @@
1
1
  import { EmbedTextParams, EmbedTextsParams, Embeddings, EmbedTextResult, EmbedTextsResult } from "@memberjunction/ai";
2
- import { MistralClient } from './mistralClient';
2
+ import { Mistral } from "@mistralai/mistralai";
3
3
  export declare class MistralEmbedding extends Embeddings {
4
4
  private _client;
5
5
  constructor(apiKey: string);
6
- get Client(): MistralClient;
6
+ get Client(): Mistral;
7
7
  /**
8
8
  * Mistral AI embedding endpoint outputs vectors in 1024 dimensions
9
9
  */
@@ -1 +1 @@
1
- {"version":3,"file":"mistralEmbedding.d.ts","sourceRoot":"","sources":["../../src/models/mistralEmbedding.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,gBAAgB,EAAE,UAAU,EAAc,eAAe,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAElI,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAIhD,qBACa,gBAAiB,SAAQ,UAAU;IAC5C,OAAO,CAAC,OAAO,CAAgB;gBAEnB,MAAM,EAAE,MAAM;IAK1B,IAAW,MAAM,IAAI,aAAa,CAAyB;IAE3D;;OAEG;IACU,SAAS,CAAC,MAAM,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;IAezE;;OAEG;IACU,UAAU,CAAC,MAAM,EAAE,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;IAW/D,kBAAkB,IAAI,OAAO,CAAC,GAAG,CAAC;CAKlD;AAED,wBAAgB,oBAAoB,SAEnC"}
1
+ {"version":3,"file":"mistralEmbedding.d.ts","sourceRoot":"","sources":["../../src/models/mistralEmbedding.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,gBAAgB,EAAE,UAAU,EAAc,eAAe,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAElI,OAAO,EAAE,OAAO,EAAE,MAAM,sBAAsB,CAAC;AAI/C,qBACa,gBAAiB,SAAQ,UAAU;IAC5C,OAAO,CAAC,OAAO,CAAU;gBAET,MAAM,EAAE,MAAM;IAO1B,IAAW,MAAM,IAAI,OAAO,CAAuB;IAEvD;;OAEG;IACU,SAAS,CAAC,MAAM,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;IAoBzE;;OAEG;IACU,UAAU,CAAC,MAAM,EAAE,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;IAiB/D,kBAAkB,IAAI,OAAO,CAAC,GAAG,CAAC;CAIlD;AAED,wBAAgB,oBAAoB,SAEnC"}
@@ -9,19 +9,25 @@ Object.defineProperty(exports, "__esModule", { value: true });
9
9
  exports.LoadMistralEmbedding = exports.MistralEmbedding = void 0;
10
10
  const ai_1 = require("@memberjunction/ai");
11
11
  const global_1 = require("@memberjunction/global");
12
- const mistralClient_1 = require("./mistralClient");
12
+ const mistralai_1 = require("@mistralai/mistralai");
13
13
  let MistralEmbedding = class MistralEmbedding extends ai_1.Embeddings {
14
14
  constructor(apiKey) {
15
15
  super(apiKey);
16
- this._client = new mistralClient_1.MistralClient({ apiKey });
16
+ this._client = new mistralai_1.Mistral({
17
+ apiKey: apiKey
18
+ });
17
19
  }
18
20
  get Client() { return this._client; }
19
21
  /**
20
22
  * Mistral AI embedding endpoint outputs vectors in 1024 dimensions
21
23
  */
22
24
  async EmbedText(params) {
25
+ const request = {
26
+ inputs: params.text,
27
+ model: params.model || "mistral-embed",
28
+ };
23
29
  params.model = params.model || "mistral-embed";
24
- const response = await this.Client.embeddings(params.model, [params.text]);
30
+ const response = await this.Client.embeddings.create(request);
25
31
  let vector = [];
26
32
  if (response.data.length > 0) {
27
33
  vector = response.data[0].embedding;
@@ -29,7 +35,7 @@ let MistralEmbedding = class MistralEmbedding extends ai_1.Embeddings {
29
35
  return {
30
36
  object: response.object,
31
37
  model: response.model,
32
- ModelUsage: new ai_1.ModelUsage(response.usage.prompt_tokens, response.usage.completion_tokens),
38
+ ModelUsage: new ai_1.ModelUsage(response.usage.promptTokens, response.usage.completionTokens),
33
39
  vector: vector
34
40
  };
35
41
  }
@@ -37,18 +43,21 @@ let MistralEmbedding = class MistralEmbedding extends ai_1.Embeddings {
37
43
  * Mistral AI embedding endpoint outputs vectors in 1024 dimensions
38
44
  */
39
45
  async EmbedTexts(params) {
46
+ const request = {
47
+ inputs: params.texts,
48
+ model: params.model || "mistral-embed",
49
+ };
40
50
  params.model = params.model || "mistral-embed";
41
- const response = await this.Client.embeddings(params.model, params.texts);
51
+ const response = await this.Client.embeddings.create(request);
42
52
  return {
43
53
  object: response.object,
44
54
  model: response.model,
45
- ModelUsage: new ai_1.ModelUsage(response.usage.prompt_tokens, response.usage.completion_tokens),
55
+ ModelUsage: new ai_1.ModelUsage(response.usage.promptTokens, response.usage.completionTokens),
46
56
  vectors: response.data.map((data) => data.embedding)
47
57
  };
48
58
  }
49
59
  async GetEmbeddingModels() {
50
- let allModels = await this.Client.listModels();
51
- allModels.data = allModels.data.filter((model) => model.id.toLowerCase().includes('embed'));
60
+ let allModels = await this.Client.models.list();
52
61
  return allModels;
53
62
  }
54
63
  };
@@ -1 +1 @@
1
- {"version":3,"file":"mistralEmbedding.js","sourceRoot":"","sources":["../../src/models/mistralEmbedding.ts"],"names":[],"mappings":";;;;;;;;;AAAA,2CAAkI;AAClI,mDAAuD;AACvD,mDAAgD;AAKzC,IAAM,gBAAgB,GAAtB,MAAM,gBAAiB,SAAQ,eAAU;IAG5C,YAAY,MAAc;QACtB,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,IAAI,CAAC,OAAO,GAAG,IAAI,6BAAa,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;IACjD,CAAC;IAED,IAAW,MAAM,KAAoB,OAAO,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC;IAE3D;;OAEG;IACI,KAAK,CAAC,SAAS,CAAC,MAAuB;QAC1C,MAAM,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,eAAe,CAAC;QAC/C,MAAM,QAAQ,GAAsB,MAAM,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC;QAC9F,IAAI,MAAM,GAAa,EAAE,CAAC;QAC1B,IAAI,QAAQ,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,EAAC,CAAC;YAC1B,MAAM,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;QACxC,CAAC;QACD,OAAO;YACH,MAAM,EAAE,QAAQ,CAAC,MAAM;YACvB,KAAK,EAAE,QAAQ,CAAC,KAAK;YACrB,UAAU,EAAE,IAAI,eAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,aAAa,EAAE,QAAQ,CAAC,KAAK,CAAC,iBAAiB,CAAC;YAC1F,MAAM,EAAE,MAAM;SACjB,CAAC;IACN,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,UAAU,CAAC,MAAwB;QAC5C,MAAM,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,eAAe,CAAC;QAC/C,MAAM,QAAQ,GAAsB,MAAM,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;QAC7F,OAAO;YACH,MAAM,EAAE,QAAQ,CAAC,MAAM;YACvB,KAAK,EAAE,QAAQ,CAAC,KAAK;YACrB,UAAU,EAAE,IAAI,eAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,aAAa,EAAE,QAAQ,CAAC,KAAK,CAAC,iBAAiB,CAAC;YAC1F,OAAO,EAAE,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC;SACvD,CAAA;IACL,CAAC;IAEM,KAAK,CAAC,kBAAkB;QAC3B,IAAI,SAAS,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE,CAAC;QAC/C,SAAS,CAAC,IAAI,GAAG,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC;QAC5F,OAAO,SAAS,CAAC;IACrB,CAAC;CACJ,CAAA;AA/CY,4CAAgB;2BAAhB,gBAAgB;IAD5B,IAAA,sBAAa,EAAC,eAAU,EAAE,kBAAkB,CAAC;GACjC,gBAAgB,CA+C5B;AAED,SAAgB,oBAAoB;IAChC,iFAAiF;AACrF,CAAC;AAFD,oDAEC"}
1
+ {"version":3,"file":"mistralEmbedding.js","sourceRoot":"","sources":["../../src/models/mistralEmbedding.ts"],"names":[],"mappings":";;;;;;;;;AAAA,2CAAkI;AAClI,mDAAuD;AACvD,oDAA+C;AAKxC,IAAM,gBAAgB,GAAtB,MAAM,gBAAiB,SAAQ,eAAU;IAGxC,YAAY,MAAc;QACtB,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,IAAI,CAAC,OAAO,GAAG,IAAI,mBAAO,CAAC;YACvB,MAAM,EAAE,MAAM;SACjB,CAAC,CAAC;IACP,CAAC;IAED,IAAW,MAAM,KAAa,OAAO,IAAI,CAAC,OAAO,CAAC,CAAA,CAAC;IAEvD;;OAEG;IACI,KAAK,CAAC,SAAS,CAAC,MAAuB;QAC1C,MAAM,OAAO,GAAqB;YAC9B,MAAM,EAAE,MAAM,CAAC,IAAI;YACnB,KAAK,EAAE,MAAM,CAAC,KAAK,IAAI,eAAe;SACzC,CAAC;QAEF,MAAM,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,eAAe,CAAC;QAC/C,MAAM,QAAQ,GAAsB,MAAM,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QACjF,IAAI,MAAM,GAAa,EAAE,CAAC;QAC1B,IAAI,QAAQ,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,EAAC,CAAC;YAC1B,MAAM,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;QACxC,CAAC;QACD,OAAO;YACH,MAAM,EAAE,QAAQ,CAAC,MAA2B;YAC5C,KAAK,EAAE,QAAQ,CAAC,KAAK;YACrB,UAAU,EAAE,IAAI,eAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,YAAY,EAAE,QAAQ,CAAC,KAAK,CAAC,gBAAgB,CAAC;YACxF,MAAM,EAAE,MAAM;SACjB,CAAC;IACN,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,UAAU,CAAC,MAAwB;QAE5C,MAAM,OAAO,GAAqB;YAC9B,MAAM,EAAE,MAAM,CAAC,KAAK;YACpB,KAAK,EAAE,MAAM,CAAC,KAAK,IAAI,eAAe;SACzC,CAAC;QAEF,MAAM,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,eAAe,CAAC;QAC/C,MAAM,QAAQ,GAAsB,MAAM,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QACjF,OAAO;YACH,MAAM,EAAE,QAAQ,CAAC,MAA2B;YAC5C,KAAK,EAAE,QAAQ,CAAC,KAAK;YACrB,UAAU,EAAE,IAAI,eAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,YAAY,EAAE,QAAQ,CAAC,KAAK,CAAC,gBAAgB,CAAC;YACxF,OAAO,EAAE,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC;SACvD,CAAA;IACL,CAAC;IAEM,KAAK,CAAC,kBAAkB;QAC3B,IAAI,SAAS,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC;QAChD,OAAO,SAAS,CAAC;IACrB,CAAC;CACJ,CAAA;AA3DY,4CAAgB;2BAAhB,gBAAgB;IAD5B,IAAA,sBAAa,EAAC,eAAU,EAAE,kBAAkB,CAAC;GACjC,gBAAgB,CA2D5B;AAED,SAAgB,oBAAoB;IAChC,iFAAiF;AACrF,CAAC;AAFD,oDAEC"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@memberjunction/ai-mistral",
3
- "version": "2.29.2",
3
+ "version": "2.30.0",
4
4
  "description": "MemberJunction Wrapper for Mistral AI's AI Models",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -19,9 +19,9 @@
19
19
  "typescript": "^5.4.5"
20
20
  },
21
21
  "dependencies": {
22
- "@memberjunction/ai": "2.29.2",
23
- "@memberjunction/global": "2.29.2",
24
- "@mistralai/mistralai": "1.5.0",
25
- "axios-retry": "^4.3.0"
22
+ "@memberjunction/ai": "2.30.0",
23
+ "@memberjunction/global": "2.30.0",
24
+ "@mistralai/mistralai": "^1.5.0",
25
+ "axios-retry": "4.3.0"
26
26
  }
27
27
  }
package/dist/config.d.ts DELETED
@@ -1,2 +0,0 @@
1
- export declare const mistralAPIKey: string;
2
- //# sourceMappingURL=config.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,aAAa,EAAE,MAAoC,CAAC"}
package/dist/config.js DELETED
@@ -1,10 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.mistralAPIKey = void 0;
7
- const dotenv_1 = __importDefault(require("dotenv"));
8
- dotenv_1.default.config();
9
- exports.mistralAPIKey = process.env.MISTRAL_API_KEY;
10
- //# sourceMappingURL=config.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"config.js","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":";;;;;;AAAA,oDAA4B;AAC5B,gBAAM,CAAC,MAAM,EAAE,CAAC;AAEH,QAAA,aAAa,GAAW,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC"}
@@ -1,97 +0,0 @@
1
- import { ListModelsResponse, ChatCompletetionRequest, ChatCompletionResponse, EmbeddingResponse } from '../generic/mistral.types';
2
- /**
3
- * A simple and lightweight client for the Mistral API
4
- * @param {*} apiKey can be set as an environment variable MISTRAL_API_KEY,
5
- * or provided in this parameter
6
- * @param {*} endpoint defaults to https://api.mistral.ai
7
- */
8
- export declare class MistralClient {
9
- private RETRY_STATUS_CODES;
10
- private ENDPOINT;
11
- endpoint: string;
12
- apiKey: string;
13
- textDecoder: TextDecoder;
14
- constructor(config: {
15
- apiKey?: string;
16
- endpoint?: string;
17
- });
18
- /**
19
- *
20
- * @param {*} method
21
- * @param {*} path
22
- * @param {*} request
23
- * @return {Promise<T>}
24
- */
25
- private request;
26
- /**
27
- * Creates a chat completion request
28
- * @param {*} model
29
- * @param {*} messages
30
- * @param {*} temperature
31
- * @param {*} maxTokens
32
- * @param {*} topP
33
- * @param {*} randomSeed
34
- * @param {*} stream
35
- * @param {*} safeMode
36
- * @return {Promise<Object>}
37
- */
38
- _makeChatCompletionRequest: (model: any, messages: any, temperature: any, maxTokens: any, topP: any, randomSeed: any, stream: any, safeMode: any) => {
39
- model: any;
40
- messages: any;
41
- temperature: any;
42
- max_tokens: any;
43
- top_p: any;
44
- random_seed: any;
45
- stream: any;
46
- safe_prompt: any;
47
- };
48
- /**
49
- * Returns a list of the available models
50
- * @return {Promise<ListModelsResponse>}
51
- */
52
- listModels(): Promise<ListModelsResponse>;
53
- /**
54
- * A chat endpoint without streaming
55
- * @param {*} model the name of the model to chat with, e.g. mistral-tiny
56
- * @param {*} messages an array of messages to chat with, e.g.
57
- * [{role: 'user', content: 'What is the best French cheese?'}]
58
- * @param {*} temperature the temperature to use for sampling, e.g. 0.5
59
- * @param {*} maxTokens the maximum number of tokens to generate, e.g. 100
60
- * @param {*} topP the cumulative probability of tokens to generate, e.g. 0.9
61
- * @param {*} randomSeed the random seed to use for sampling, e.g. 42
62
- * @param {*} safeMode whether to use safe mode, e.g. true
63
- * @return {Promise<Object>}
64
- */
65
- chat(params: ChatCompletetionRequest): Promise<ChatCompletionResponse>;
66
- /**
67
- * A chat endpoint that streams responses.
68
- * @param {*} model the name of the model to chat with, e.g. mistral-tiny
69
- * @param {*} messages an array of messages to chat with, e.g.
70
- * [{role: 'user', content: 'What is the best French cheese?'}]
71
- * @param {*} temperature the temperature to use for sampling, e.g. 0.5
72
- * @param {*} maxTokens the maximum number of tokens to generate, e.g. 100
73
- * @param {*} topP the cumulative probability of tokens to generate, e.g. 0.9
74
- * @param {*} randomSeed the random seed to use for sampling, e.g. 42
75
- * @param {*} safeMode whether to use safe mode, e.g. true
76
- * @return {Promise<Object>}
77
- */
78
- chatStream: ({ model, messages, temperature, maxTokens, topP, randomSeed, safeMode }: {
79
- model: any;
80
- messages: any;
81
- temperature: any;
82
- maxTokens: any;
83
- topP: any;
84
- randomSeed: any;
85
- safeMode: any;
86
- }) => AsyncGenerator<any, void, unknown>;
87
- /**
88
- * An embedddings endpoint that returns embeddings for a single,
89
- * or batch of inputs
90
- * @param {*} model The embedding model to use, e.g. mistral-embed
91
- * @param {*} input The input to embed,
92
- * e.g. ['What is the best French cheese?']
93
- * @return {Promise<Object>}
94
- */
95
- embeddings(model: string, input: string[]): Promise<EmbeddingResponse>;
96
- }
97
- //# sourceMappingURL=mistralClient.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"mistralClient.d.ts","sourceRoot":"","sources":["../../src/models/mistralClient.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,kBAAkB,EAAE,uBAAuB,EAAE,sBAAsB,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC;AAMlI;;;;;GAKG;AACH,qBAAa,aAAa;IAEtB,OAAO,CAAC,kBAAkB,CAA6B;IACvD,OAAO,CAAC,QAAQ,CAA4B;IAE5C,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;IACf,WAAW,EAAE,WAAW,CAAC;gBAEb,MAAM,EAAE;QAAC,MAAM,CAAC,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAC;IAmB1D;;;;;;OAMG;YACW,OAAO;IAiBrB;;;;;;;;;;;OAWG;IACH,0BAA0B;;;;;;;;;MAoBxB;IAGF;;;OAGG;IACU,UAAU,IAAI,OAAO,CAAC,kBAAkB,CAAC;IAKtD;;;;;;;;;;;OAWG;IACU,IAAI,CAAC,MAAM,EAAE,uBAAuB,GAAG,OAAO,CAAC,sBAAsB,CAAC;IAQnF;;;;;;;;;;;OAWG;IACH,UAAU;;;;;;;;6CAqCR;IAEF;;;;;;;OAOG;IACU,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,iBAAiB,CAAC;CAepF"}
@@ -1,160 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.MistralClient = void 0;
7
- const axios_1 = __importDefault(require("axios"));
8
- const axios_retry_1 = __importDefault(require("axios-retry"));
9
- //This is a 1:1 copy of the mistralAI client library
10
- //but modified to work with MJ
11
- //see https://github.com/mistralai/client-js
12
- /**
13
- * A simple and lightweight client for the Mistral API
14
- * @param {*} apiKey can be set as an environment variable MISTRAL_API_KEY,
15
- * or provided in this parameter
16
- * @param {*} endpoint defaults to https://api.mistral.ai
17
- */
18
- class MistralClient {
19
- constructor(config) {
20
- this.RETRY_STATUS_CODES = [429, 500, 502, 503, 504];
21
- this.ENDPOINT = 'https://api.mistral.ai';
22
- /**
23
- * Creates a chat completion request
24
- * @param {*} model
25
- * @param {*} messages
26
- * @param {*} temperature
27
- * @param {*} maxTokens
28
- * @param {*} topP
29
- * @param {*} randomSeed
30
- * @param {*} stream
31
- * @param {*} safeMode
32
- * @return {Promise<Object>}
33
- */
34
- this._makeChatCompletionRequest = function (model, messages, temperature, maxTokens, topP, randomSeed, stream, safeMode) {
35
- return {
36
- model: model,
37
- messages: messages,
38
- temperature: temperature ?? undefined,
39
- max_tokens: maxTokens ?? undefined,
40
- top_p: topP ?? undefined,
41
- random_seed: randomSeed ?? undefined,
42
- stream: stream ?? undefined,
43
- safe_prompt: safeMode ?? undefined,
44
- };
45
- };
46
- /**
47
- * A chat endpoint that streams responses.
48
- * @param {*} model the name of the model to chat with, e.g. mistral-tiny
49
- * @param {*} messages an array of messages to chat with, e.g.
50
- * [{role: 'user', content: 'What is the best French cheese?'}]
51
- * @param {*} temperature the temperature to use for sampling, e.g. 0.5
52
- * @param {*} maxTokens the maximum number of tokens to generate, e.g. 100
53
- * @param {*} topP the cumulative probability of tokens to generate, e.g. 0.9
54
- * @param {*} randomSeed the random seed to use for sampling, e.g. 42
55
- * @param {*} safeMode whether to use safe mode, e.g. true
56
- * @return {Promise<Object>}
57
- */
58
- this.chatStream = async function* ({ model, messages, temperature, maxTokens, topP, randomSeed, safeMode }) {
59
- const request = this._makeChatCompletionRequest(model, messages, temperature, maxTokens, topP, randomSeed, true, safeMode);
60
- const response = await this._request('post', 'v1/chat/completions', request);
61
- for await (const chunk of response) {
62
- const chunkString = this.textDecoder.decode(chunk);
63
- // split the chunks by new line
64
- const chunkLines = chunkString.split('\n');
65
- // Iterate through the lines
66
- for (const chunkLine of chunkLines) {
67
- // If the line starts with data: then it is a chunk
68
- if (chunkLine.startsWith('data:')) {
69
- const chunkData = chunkLine.substring(6).trim();
70
- if (chunkData !== '[DONE]') {
71
- yield JSON.parse(chunkData);
72
- }
73
- }
74
- }
75
- }
76
- };
77
- this.endpoint = config.endpoint || this.ENDPOINT;
78
- this.apiKey = config.apiKey;
79
- this.textDecoder = new TextDecoder();
80
- (0, axios_retry_1.default)(axios_1.default, {
81
- retries: 3,
82
- retryCondition: (error) => {
83
- return this.RETRY_STATUS_CODES.includes(error.response.status);
84
- },
85
- retryDelay: (retryCount, error) => {
86
- console.debug(`retry attempt: ${retryCount}`, error);
87
- return retryCount * 500;
88
- },
89
- });
90
- }
91
- /**
92
- *
93
- * @param {*} method
94
- * @param {*} path
95
- * @param {*} request
96
- * @return {Promise<T>}
97
- */
98
- async request(method, path, request) {
99
- const response = await (0, axios_1.default)({
100
- method: method,
101
- url: `${this.endpoint}/${path}`,
102
- data: request || {},
103
- headers: {
104
- 'Authorization': `Bearer ${this.apiKey}`,
105
- },
106
- responseType: request?.stream ? 'stream' : 'json',
107
- }).catch((error) => {
108
- console.error(error);
109
- return error.response;
110
- });
111
- return response.data;
112
- }
113
- /**
114
- * Returns a list of the available models
115
- * @return {Promise<ListModelsResponse>}
116
- */
117
- async listModels() {
118
- const response = await this.request('get', 'v1/models');
119
- return response;
120
- }
121
- /**
122
- * A chat endpoint without streaming
123
- * @param {*} model the name of the model to chat with, e.g. mistral-tiny
124
- * @param {*} messages an array of messages to chat with, e.g.
125
- * [{role: 'user', content: 'What is the best French cheese?'}]
126
- * @param {*} temperature the temperature to use for sampling, e.g. 0.5
127
- * @param {*} maxTokens the maximum number of tokens to generate, e.g. 100
128
- * @param {*} topP the cumulative probability of tokens to generate, e.g. 0.9
129
- * @param {*} randomSeed the random seed to use for sampling, e.g. 42
130
- * @param {*} safeMode whether to use safe mode, e.g. true
131
- * @return {Promise<Object>}
132
- */
133
- async chat(params) {
134
- const response = await this.request('post', 'v1/chat/completions', params);
135
- return response;
136
- }
137
- /**
138
- * An embedddings endpoint that returns embeddings for a single,
139
- * or batch of inputs
140
- * @param {*} model The embedding model to use, e.g. mistral-embed
141
- * @param {*} input The input to embed,
142
- * e.g. ['What is the best French cheese?']
143
- * @return {Promise<Object>}
144
- */
145
- async embeddings(model, input) {
146
- try {
147
- const request = {
148
- model: model,
149
- input: input,
150
- };
151
- const response = await this.request('post', 'v1/embeddings', request);
152
- return response;
153
- }
154
- catch (error) {
155
- throw new Error(error);
156
- }
157
- }
158
- }
159
- exports.MistralClient = MistralClient;
160
- //# sourceMappingURL=mistralClient.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"mistralClient.js","sourceRoot":"","sources":["../../src/models/mistralClient.ts"],"names":[],"mappings":";;;;;;AAAA,kDAA0B;AAC1B,8DAAqC;AAGrC,oDAAoD;AACpD,8BAA8B;AAC9B,6CAA6C;AAE7C;;;;;GAKG;AACH,MAAa,aAAa;IAStB,YAAY,MAA4C;QAPhD,uBAAkB,GAAG,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;QAC/C,aAAQ,GAAG,wBAAwB,CAAC;QAiD9C;;;;;;;;;;;WAWG;QACH,+BAA0B,GAAG,UAC3B,KAAK,EACL,QAAQ,EACR,WAAW,EACX,SAAS,EACT,IAAI,EACJ,UAAU,EACV,MAAM,EACN,QAAQ;YAER,OAAO;gBACL,KAAK,EAAE,KAAK;gBACZ,QAAQ,EAAE,QAAQ;gBAClB,WAAW,EAAE,WAAW,IAAI,SAAS;gBACrC,UAAU,EAAE,SAAS,IAAI,SAAS;gBAClC,KAAK,EAAE,IAAI,IAAI,SAAS;gBACxB,WAAW,EAAE,UAAU,IAAI,SAAS;gBACpC,MAAM,EAAE,MAAM,IAAI,SAAS;gBAC3B,WAAW,EAAE,QAAQ,IAAI,SAAS;aACnC,CAAC;QACJ,CAAC,CAAC;QAgCF;;;;;;;;;;;WAWG;QACH,eAAU,GAAG,KAAK,SAAS,CAAC,EAAE,EAC5B,KAAK,EACL,QAAQ,EACR,WAAW,EACX,SAAS,EACT,IAAI,EACJ,UAAU,EACV,QAAQ,EAAC;YACT,MAAM,OAAO,GAAG,IAAI,CAAC,0BAA0B,CAC7C,KAAK,EACL,QAAQ,EACR,WAAW,EACX,SAAS,EACT,IAAI,EACJ,UAAU,EACV,IAAI,EACJ,QAAQ,CACT,CAAC;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAClC,MAAM,EAAE,qBAAqB,EAAE,OAAO,CACvC,CAAC;YAEF,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,QAAQ,EAAE,CAAC;gBACnC,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;gBACnD,+BAA+B;gBAC/B,MAAM,UAAU,GAAG,WAAW,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;gBAC3C,4BAA4B;gBAC5B,KAAK,MAAM,SAAS,IAAI,UAAU,EAAE,CAAC;oBACnC,mDAAmD;oBACnD,IAAI,SAAS,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE,CAAC;wBAClC,MAAM,SAAS,GAAG,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC;wBAChD,IAAI,SAAS,KAAK,QAAQ,EAAE,CAAC;4BAC3B,MAAM,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;wBAC9B,CAAC;oBACH,CAAC;gBACH,CAAC;YACH,CAAC;QACH,CAAC,CAAC;QA3JI,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,CAAC;QACjD,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;QAE5B,IAAI,CAAC,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;QAErC,IAAA,qBAAU,EAAC,eAAK,EAAE;YAClB,OAAO,EAAE,CAAC;YACV,cAAc,EAAE,CAAC,KAAK,EAAE,EAAE;gBACtB,OAAO,IAAI,CAAC,kBAAkB,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;YACnE,CAAC;YAED,UAAU,EAAE,CAAC,UAAU,EAAE,KAAK,EAAE,EAAE;gBAC9B,OAAO,CAAC,KAAK,CAAC,kBAAkB,UAAU,EAAE,EAAE,KAAK,CAAC,CAAC;gBACrD,OAAO,UAAU,GAAG,GAAG,CAAC;YAC5B,CAAC;SACA,CAAC,CAAC;IACP,CAAC;IAEH;;;;;;OAMG;IACK,KAAK,CAAC,OAAO,CAAI,MAAc,EAAE,IAAY,EAAE,OAAa;QAElE,MAAM,QAAQ,GAAG,MAAM,IAAA,eAAK,EAAC;YACzB,MAAM,EAAE,MAAM;YACd,GAAG,EAAE,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,EAAE;YAC/B,IAAI,EAAE,OAAO,IAAI,EAAE;YACnB,OAAO,EAAE;gBACP,eAAe,EAAE,UAAU,IAAI,CAAC,MAAM,EAAE;aACzC;YACD,YAAY,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM;SAClD,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;YACjB,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YACrB,OAAO,KAAK,CAAC,QAAQ,CAAC;QACxB,CAAC,CAAC,CAAC;QACH,OAAO,QAAQ,CAAC,IAAI,CAAC;IACzB,CAAC;IAqCD;;;OAGG;IACI,KAAK,CAAC,UAAU;QACrB,MAAM,QAAQ,GAAuB,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC;QAC5E,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,IAAI,CAAC,MAA+B;QAC/C,MAAM,QAAQ,GAA2B,MAAM,IAAI,CAAC,OAAO,CACzD,MAAM,EAAE,qBAAqB,EAAE,MAAM,CACtC,CAAC;QAEF,OAAO,QAAQ,CAAC;IAClB,CAAC;IAqDD;;;;;;;OAOG;IACI,KAAK,CAAC,UAAU,CAAC,KAAa,EAAE,KAAe;QACpD,IAAG,CAAC;YACF,MAAM,OAAO,GAAG;gBACd,KAAK,EAAE,KAAK;gBACZ,KAAK,EAAE,KAAK;aACb,CAAC;YACF,MAAM,QAAQ,GAAsB,MAAM,IAAI,CAAC,OAAO,CACpD,MAAM,EAAE,eAAe,EAAE,OAAO,CACjC,CAAC;YACF,OAAO,QAAQ,CAAC;QAClB,CAAC;QACD,OAAM,KAAK,EAAC,CAAC;YACX,MAAM,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC;QACzB,CAAC;IACH,CAAC;CACF;AA9LD,sCA8LC"}