@langchain/google-common 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +47 -0
- package/dist/auth.cjs +90 -0
- package/dist/auth.d.ts +39 -0
- package/dist/auth.js +83 -0
- package/dist/chat_models.cjs +153 -0
- package/dist/chat_models.d.ts +46 -0
- package/dist/chat_models.js +149 -0
- package/dist/connection.cjs +231 -0
- package/dist/connection.d.ts +47 -0
- package/dist/connection.js +224 -0
- package/dist/index.cjs +23 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.js +7 -0
- package/dist/llms.cjs +162 -0
- package/dist/llms.d.ts +48 -0
- package/dist/llms.js +158 -0
- package/dist/types.cjs +2 -0
- package/dist/types.d.ts +146 -0
- package/dist/types.js +1 -0
- package/dist/utils/common.cjs +48 -0
- package/dist/utils/common.d.ts +6 -0
- package/dist/utils/common.js +40 -0
- package/dist/utils/gemini.cjs +239 -0
- package/dist/utils/gemini.d.ts +20 -0
- package/dist/utils/gemini.js +219 -0
- package/dist/utils/palm.cjs +2 -0
- package/dist/utils/palm.d.ts +6 -0
- package/dist/utils/palm.js +1 -0
- package/dist/utils/stream.cjs +304 -0
- package/dist/utils/stream.d.ts +70 -0
- package/dist/utils/stream.js +296 -0
- package/index.cjs +1 -0
- package/index.d.ts +1 -0
- package/index.js +1 -0
- package/package.json +83 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
The MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2023 LangChain
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in
|
|
13
|
+
all copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
21
|
+
THE SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
# LangChain google-common
|
|
2
|
+
|
|
3
|
+
This package contains common resources to access Google AI/ML models
|
|
4
|
+
and other Google services in an auth-independent way.
|
|
5
|
+
|
|
6
|
+
AI/ML models are supported using the same interface no matter if
|
|
7
|
+
you are using the Google AI Studio-based version of the model or
|
|
8
|
+
the Google Cloud Vertex AI version of the model.
|
|
9
|
+
|
|
10
|
+
## Installation
|
|
11
|
+
|
|
12
|
+
This is **not** a stand-alone package since it does not contain code to do
|
|
13
|
+
authorization.
|
|
14
|
+
|
|
15
|
+
Instead, you should install *one* of the following packages:
|
|
16
|
+
* @langchain/google-gauth
|
|
17
|
+
* @langchain/google-webauth
|
|
18
|
+
|
|
19
|
+
See those packages for details about installation.
|
|
20
|
+
|
|
21
|
+
This package does **not** depend on any Google library. Instead, it relies on
|
|
22
|
+
REST calls to Google endpoints. This is deliberate to reduce (sometimes
|
|
23
|
+
conflicting) dependencies and make it usable on platforms that do not include
|
|
24
|
+
file storage.
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
## Google services supported
|
|
28
|
+
|
|
29
|
+
* Gemini model through LLM and Chat classes (both through Google AI Studio and
|
|
30
|
+
Google Cloud Vertex AI)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
## TODO
|
|
34
|
+
|
|
35
|
+
Tasks and services still to be implemented:
|
|
36
|
+
|
|
37
|
+
* Functions for Gemini
|
|
38
|
+
* PaLM Vertex AI support and backwards compatibility
|
|
39
|
+
* PaLM MakerSuite support and backwards compatibility
|
|
40
|
+
* Semantic Retrieval / AQA model
|
|
41
|
+
* PaLM embeddings
|
|
42
|
+
* Gemini embeddings
|
|
43
|
+
* Multimodal embeddings
|
|
44
|
+
* Vertex AI Search
|
|
45
|
+
* Vertex AI Model Garden
|
|
46
|
+
* MakerSuite / Google Drive Hub
|
|
47
|
+
* Google Cloud Vector Store
|
package/dist/auth.cjs
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ensureAuthOptionScopes = exports.aiPlatformScope = exports.ApiKeyGoogleAuth = exports.GoogleAbstractedFetchClient = void 0;
|
|
4
|
+
const stream_js_1 = require("./utils/stream.cjs");
|
|
5
|
+
class GoogleAbstractedFetchClient {
|
|
6
|
+
async _request(url, opts, additionalHeaders) {
|
|
7
|
+
if (url == null)
|
|
8
|
+
throw new Error("Missing URL");
|
|
9
|
+
const fetchOptions = {
|
|
10
|
+
method: opts.method,
|
|
11
|
+
headers: {
|
|
12
|
+
"Content-Type": "application/json",
|
|
13
|
+
...(opts.headers ?? {}),
|
|
14
|
+
...(additionalHeaders ?? {}),
|
|
15
|
+
},
|
|
16
|
+
};
|
|
17
|
+
if (opts.data !== undefined) {
|
|
18
|
+
fetchOptions.body = JSON.stringify(opts.data);
|
|
19
|
+
}
|
|
20
|
+
const res = await fetch(url, fetchOptions);
|
|
21
|
+
if (!res.ok) {
|
|
22
|
+
const error = new Error(`Could not get access token for Google with status code: ${res.status}`);
|
|
23
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
24
|
+
error.response = res;
|
|
25
|
+
throw error;
|
|
26
|
+
}
|
|
27
|
+
return {
|
|
28
|
+
data: opts.responseType === "json"
|
|
29
|
+
? await res.json()
|
|
30
|
+
: new stream_js_1.ReadableJsonStream(res.body),
|
|
31
|
+
config: {},
|
|
32
|
+
status: res.status,
|
|
33
|
+
statusText: res.statusText,
|
|
34
|
+
headers: res.headers,
|
|
35
|
+
request: { responseURL: res.url },
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
exports.GoogleAbstractedFetchClient = GoogleAbstractedFetchClient;
|
|
40
|
+
class ApiKeyGoogleAuth extends GoogleAbstractedFetchClient {
|
|
41
|
+
constructor(apiKey) {
|
|
42
|
+
super();
|
|
43
|
+
Object.defineProperty(this, "apiKey", {
|
|
44
|
+
enumerable: true,
|
|
45
|
+
configurable: true,
|
|
46
|
+
writable: true,
|
|
47
|
+
value: void 0
|
|
48
|
+
});
|
|
49
|
+
this.apiKey = apiKey;
|
|
50
|
+
}
|
|
51
|
+
get clientType() {
|
|
52
|
+
return "apiKey";
|
|
53
|
+
}
|
|
54
|
+
getProjectId() {
|
|
55
|
+
throw new Error("APIs that require a project ID cannot use an API key");
|
|
56
|
+
// Perhaps we could implement this if needed:
|
|
57
|
+
// https://cloud.google.com/docs/authentication/api-keys#get-info
|
|
58
|
+
}
|
|
59
|
+
request(opts) {
|
|
60
|
+
const authHeader = {
|
|
61
|
+
"X-Goog-Api-Key": this.apiKey,
|
|
62
|
+
};
|
|
63
|
+
return this._request(opts.url, opts, authHeader);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
exports.ApiKeyGoogleAuth = ApiKeyGoogleAuth;
|
|
67
|
+
function aiPlatformScope(platform) {
|
|
68
|
+
switch (platform) {
|
|
69
|
+
case "gai":
|
|
70
|
+
return ["https://www.googleapis.com/auth/generative-language"];
|
|
71
|
+
default:
|
|
72
|
+
return ["https://www.googleapis.com/auth/cloud-platform"];
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
exports.aiPlatformScope = aiPlatformScope;
|
|
76
|
+
function ensureAuthOptionScopes(authOption, scopeProperty, scopesOrPlatform) {
|
|
77
|
+
// If the property is already set, return it
|
|
78
|
+
if (authOption && Object.hasOwn(authOption, scopeProperty)) {
|
|
79
|
+
return authOption;
|
|
80
|
+
}
|
|
81
|
+
// Otherwise add it
|
|
82
|
+
const scopes = Array.isArray(scopesOrPlatform)
|
|
83
|
+
? scopesOrPlatform
|
|
84
|
+
: aiPlatformScope(scopesOrPlatform ?? "gcp");
|
|
85
|
+
return {
|
|
86
|
+
[scopeProperty]: scopes,
|
|
87
|
+
...(authOption ?? {}),
|
|
88
|
+
};
|
|
89
|
+
}
|
|
90
|
+
exports.ensureAuthOptionScopes = ensureAuthOptionScopes;
|
package/dist/auth.d.ts
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { GooglePlatformType } from "./types.js";
|
|
2
|
+
export type GoogleAbstractedClientOpsMethod = "GET" | "POST";
|
|
3
|
+
export type GoogleAbstractedClientOpsResponseType = "json" | "stream";
|
|
4
|
+
export type GoogleAbstractedClientOps = {
|
|
5
|
+
url?: string;
|
|
6
|
+
method?: GoogleAbstractedClientOpsMethod;
|
|
7
|
+
headers?: Record<string, string>;
|
|
8
|
+
data?: unknown;
|
|
9
|
+
responseType?: GoogleAbstractedClientOpsResponseType;
|
|
10
|
+
};
|
|
11
|
+
export interface GoogleAbstractedClient {
|
|
12
|
+
request: (opts: GoogleAbstractedClientOps) => unknown;
|
|
13
|
+
getProjectId: () => Promise<string>;
|
|
14
|
+
get clientType(): string;
|
|
15
|
+
}
|
|
16
|
+
export declare abstract class GoogleAbstractedFetchClient implements GoogleAbstractedClient {
|
|
17
|
+
abstract get clientType(): string;
|
|
18
|
+
abstract getProjectId(): Promise<string>;
|
|
19
|
+
abstract request(opts: GoogleAbstractedClientOps): unknown;
|
|
20
|
+
_request(url: string | undefined, opts: GoogleAbstractedClientOps, additionalHeaders: Record<string, string>): Promise<{
|
|
21
|
+
data: any;
|
|
22
|
+
config: {};
|
|
23
|
+
status: number;
|
|
24
|
+
statusText: string;
|
|
25
|
+
headers: Headers;
|
|
26
|
+
request: {
|
|
27
|
+
responseURL: string;
|
|
28
|
+
};
|
|
29
|
+
}>;
|
|
30
|
+
}
|
|
31
|
+
export declare class ApiKeyGoogleAuth extends GoogleAbstractedFetchClient {
|
|
32
|
+
apiKey: string;
|
|
33
|
+
constructor(apiKey: string);
|
|
34
|
+
get clientType(): string;
|
|
35
|
+
getProjectId(): Promise<string>;
|
|
36
|
+
request(opts: GoogleAbstractedClientOps): unknown;
|
|
37
|
+
}
|
|
38
|
+
export declare function aiPlatformScope(platform: GooglePlatformType): string[];
|
|
39
|
+
export declare function ensureAuthOptionScopes<AuthOptions>(authOption: AuthOptions | undefined, scopeProperty: string, scopesOrPlatform: string[] | GooglePlatformType | undefined): AuthOptions;
|
package/dist/auth.js
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import { ReadableJsonStream } from "./utils/stream.js";
|
|
2
|
+
export class GoogleAbstractedFetchClient {
|
|
3
|
+
async _request(url, opts, additionalHeaders) {
|
|
4
|
+
if (url == null)
|
|
5
|
+
throw new Error("Missing URL");
|
|
6
|
+
const fetchOptions = {
|
|
7
|
+
method: opts.method,
|
|
8
|
+
headers: {
|
|
9
|
+
"Content-Type": "application/json",
|
|
10
|
+
...(opts.headers ?? {}),
|
|
11
|
+
...(additionalHeaders ?? {}),
|
|
12
|
+
},
|
|
13
|
+
};
|
|
14
|
+
if (opts.data !== undefined) {
|
|
15
|
+
fetchOptions.body = JSON.stringify(opts.data);
|
|
16
|
+
}
|
|
17
|
+
const res = await fetch(url, fetchOptions);
|
|
18
|
+
if (!res.ok) {
|
|
19
|
+
const error = new Error(`Could not get access token for Google with status code: ${res.status}`);
|
|
20
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
21
|
+
error.response = res;
|
|
22
|
+
throw error;
|
|
23
|
+
}
|
|
24
|
+
return {
|
|
25
|
+
data: opts.responseType === "json"
|
|
26
|
+
? await res.json()
|
|
27
|
+
: new ReadableJsonStream(res.body),
|
|
28
|
+
config: {},
|
|
29
|
+
status: res.status,
|
|
30
|
+
statusText: res.statusText,
|
|
31
|
+
headers: res.headers,
|
|
32
|
+
request: { responseURL: res.url },
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
export class ApiKeyGoogleAuth extends GoogleAbstractedFetchClient {
|
|
37
|
+
constructor(apiKey) {
|
|
38
|
+
super();
|
|
39
|
+
Object.defineProperty(this, "apiKey", {
|
|
40
|
+
enumerable: true,
|
|
41
|
+
configurable: true,
|
|
42
|
+
writable: true,
|
|
43
|
+
value: void 0
|
|
44
|
+
});
|
|
45
|
+
this.apiKey = apiKey;
|
|
46
|
+
}
|
|
47
|
+
get clientType() {
|
|
48
|
+
return "apiKey";
|
|
49
|
+
}
|
|
50
|
+
getProjectId() {
|
|
51
|
+
throw new Error("APIs that require a project ID cannot use an API key");
|
|
52
|
+
// Perhaps we could implement this if needed:
|
|
53
|
+
// https://cloud.google.com/docs/authentication/api-keys#get-info
|
|
54
|
+
}
|
|
55
|
+
request(opts) {
|
|
56
|
+
const authHeader = {
|
|
57
|
+
"X-Goog-Api-Key": this.apiKey,
|
|
58
|
+
};
|
|
59
|
+
return this._request(opts.url, opts, authHeader);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
export function aiPlatformScope(platform) {
|
|
63
|
+
switch (platform) {
|
|
64
|
+
case "gai":
|
|
65
|
+
return ["https://www.googleapis.com/auth/generative-language"];
|
|
66
|
+
default:
|
|
67
|
+
return ["https://www.googleapis.com/auth/cloud-platform"];
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
export function ensureAuthOptionScopes(authOption, scopeProperty, scopesOrPlatform) {
|
|
71
|
+
// If the property is already set, return it
|
|
72
|
+
if (authOption && Object.hasOwn(authOption, scopeProperty)) {
|
|
73
|
+
return authOption;
|
|
74
|
+
}
|
|
75
|
+
// Otherwise add it
|
|
76
|
+
const scopes = Array.isArray(scopesOrPlatform)
|
|
77
|
+
? scopesOrPlatform
|
|
78
|
+
: aiPlatformScope(scopesOrPlatform ?? "gcp");
|
|
79
|
+
return {
|
|
80
|
+
[scopeProperty]: scopes,
|
|
81
|
+
...(authOption ?? {}),
|
|
82
|
+
};
|
|
83
|
+
}
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ChatGoogleBase = void 0;
|
|
4
|
+
const env_1 = require("@langchain/core/utils/env");
|
|
5
|
+
const chat_models_1 = require("@langchain/core/language_models/chat_models");
|
|
6
|
+
const outputs_1 = require("@langchain/core/outputs");
|
|
7
|
+
const messages_1 = require("@langchain/core/messages");
|
|
8
|
+
const common_js_1 = require("./utils/common.cjs");
|
|
9
|
+
const connection_js_1 = require("./connection.cjs");
|
|
10
|
+
const gemini_js_1 = require("./utils/gemini.cjs");
|
|
11
|
+
const auth_js_1 = require("./auth.cjs");
|
|
12
|
+
class ChatConnection extends connection_js_1.AbstractGoogleLLMConnection {
|
|
13
|
+
formatContents(input, _parameters) {
|
|
14
|
+
return input
|
|
15
|
+
.map((msg) => (0, gemini_js_1.baseMessageToContent)(msg))
|
|
16
|
+
.reduce((acc, cur) => [...acc, ...cur]);
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Integration with a chat model.
|
|
21
|
+
*/
|
|
22
|
+
class ChatGoogleBase extends chat_models_1.BaseChatModel {
|
|
23
|
+
// Used for tracing, replace with the same name as your class
|
|
24
|
+
static lc_name() {
|
|
25
|
+
return "ChatGoogle";
|
|
26
|
+
}
|
|
27
|
+
constructor(fields) {
|
|
28
|
+
super(fields ?? {});
|
|
29
|
+
Object.defineProperty(this, "lc_serializable", {
|
|
30
|
+
enumerable: true,
|
|
31
|
+
configurable: true,
|
|
32
|
+
writable: true,
|
|
33
|
+
value: true
|
|
34
|
+
});
|
|
35
|
+
Object.defineProperty(this, "model", {
|
|
36
|
+
enumerable: true,
|
|
37
|
+
configurable: true,
|
|
38
|
+
writable: true,
|
|
39
|
+
value: "gemini-pro"
|
|
40
|
+
});
|
|
41
|
+
Object.defineProperty(this, "temperature", {
|
|
42
|
+
enumerable: true,
|
|
43
|
+
configurable: true,
|
|
44
|
+
writable: true,
|
|
45
|
+
value: 0.7
|
|
46
|
+
});
|
|
47
|
+
Object.defineProperty(this, "maxOutputTokens", {
|
|
48
|
+
enumerable: true,
|
|
49
|
+
configurable: true,
|
|
50
|
+
writable: true,
|
|
51
|
+
value: 1024
|
|
52
|
+
});
|
|
53
|
+
Object.defineProperty(this, "topP", {
|
|
54
|
+
enumerable: true,
|
|
55
|
+
configurable: true,
|
|
56
|
+
writable: true,
|
|
57
|
+
value: 0.8
|
|
58
|
+
});
|
|
59
|
+
Object.defineProperty(this, "topK", {
|
|
60
|
+
enumerable: true,
|
|
61
|
+
configurable: true,
|
|
62
|
+
writable: true,
|
|
63
|
+
value: 40
|
|
64
|
+
});
|
|
65
|
+
Object.defineProperty(this, "stopSequences", {
|
|
66
|
+
enumerable: true,
|
|
67
|
+
configurable: true,
|
|
68
|
+
writable: true,
|
|
69
|
+
value: []
|
|
70
|
+
});
|
|
71
|
+
Object.defineProperty(this, "safetySettings", {
|
|
72
|
+
enumerable: true,
|
|
73
|
+
configurable: true,
|
|
74
|
+
writable: true,
|
|
75
|
+
value: []
|
|
76
|
+
});
|
|
77
|
+
Object.defineProperty(this, "connection", {
|
|
78
|
+
enumerable: true,
|
|
79
|
+
configurable: true,
|
|
80
|
+
writable: true,
|
|
81
|
+
value: void 0
|
|
82
|
+
});
|
|
83
|
+
Object.defineProperty(this, "streamedConnection", {
|
|
84
|
+
enumerable: true,
|
|
85
|
+
configurable: true,
|
|
86
|
+
writable: true,
|
|
87
|
+
value: void 0
|
|
88
|
+
});
|
|
89
|
+
(0, common_js_1.copyAndValidateModelParamsInto)(fields, this);
|
|
90
|
+
const client = this.buildClient(fields);
|
|
91
|
+
this.buildConnection(fields ?? {}, client);
|
|
92
|
+
}
|
|
93
|
+
buildApiKeyClient(apiKey) {
|
|
94
|
+
return new auth_js_1.ApiKeyGoogleAuth(apiKey);
|
|
95
|
+
}
|
|
96
|
+
buildApiKey(fields) {
|
|
97
|
+
return fields?.apiKey ?? (0, env_1.getEnvironmentVariable)("GOOGLE_API_KEY");
|
|
98
|
+
}
|
|
99
|
+
buildClient(fields) {
|
|
100
|
+
const apiKey = this.buildApiKey(fields);
|
|
101
|
+
if (apiKey) {
|
|
102
|
+
return this.buildApiKeyClient(apiKey);
|
|
103
|
+
}
|
|
104
|
+
else {
|
|
105
|
+
return this.buildAbstractedClient(fields);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
buildConnection(fields, client) {
|
|
109
|
+
this.connection = new ChatConnection({ ...fields, ...this }, this.caller, client, false);
|
|
110
|
+
this.streamedConnection = new ChatConnection({ ...fields, ...this }, this.caller, client, true);
|
|
111
|
+
}
|
|
112
|
+
get platform() {
|
|
113
|
+
return this.connection.platform;
|
|
114
|
+
}
|
|
115
|
+
// Replace
|
|
116
|
+
_llmType() {
|
|
117
|
+
return "chat_integration";
|
|
118
|
+
}
|
|
119
|
+
async _generate(messages, options, _runManager) {
|
|
120
|
+
const parameters = (0, common_js_1.copyAIModelParams)(this);
|
|
121
|
+
const response = await this.connection.request(messages, parameters, options);
|
|
122
|
+
const ret = (0, gemini_js_1.responseToChatResult)(response);
|
|
123
|
+
return ret;
|
|
124
|
+
}
|
|
125
|
+
async *_streamResponseChunks(_messages, _options, _runManager) {
|
|
126
|
+
// Make the call as a streaming request
|
|
127
|
+
const parameters = (0, common_js_1.copyAIModelParams)(this);
|
|
128
|
+
const response = await this.streamedConnection.request(_messages, parameters, _options);
|
|
129
|
+
// Get the streaming parser of the response
|
|
130
|
+
const stream = response.data;
|
|
131
|
+
// Loop until the end of the stream
|
|
132
|
+
// During the loop, yield each time we get a chunk from the streaming parser
|
|
133
|
+
// that is either available or added to the queue
|
|
134
|
+
while (!stream.streamDone) {
|
|
135
|
+
const output = await stream.nextChunk();
|
|
136
|
+
const chunk = output !== null
|
|
137
|
+
? (0, gemini_js_1.responseToChatGeneration)({ data: output })
|
|
138
|
+
: new outputs_1.ChatGenerationChunk({
|
|
139
|
+
text: "",
|
|
140
|
+
generationInfo: { finishReason: "stop" },
|
|
141
|
+
message: new messages_1.AIMessageChunk({
|
|
142
|
+
content: "",
|
|
143
|
+
}),
|
|
144
|
+
});
|
|
145
|
+
yield chunk;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
/** @ignore */
|
|
149
|
+
_combineLLMOutput() {
|
|
150
|
+
return [];
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
exports.ChatGoogleBase = ChatGoogleBase;
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { type BaseMessage } from "@langchain/core/messages";
|
|
2
|
+
import { type BaseLanguageModelCallOptions } from "@langchain/core/language_models/base";
|
|
3
|
+
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
|
|
4
|
+
import { BaseChatModel, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
|
|
5
|
+
import { ChatGenerationChunk, ChatResult } from "@langchain/core/outputs";
|
|
6
|
+
import { GoogleAIBaseLLMInput, GoogleAIModelParams, GoogleAISafetySetting, GoogleConnectionParams, GooglePlatformType, GeminiContent } from "./types.js";
|
|
7
|
+
import { AbstractGoogleLLMConnection } from "./connection.js";
|
|
8
|
+
import { GoogleAbstractedClient } from "./auth.js";
|
|
9
|
+
import { GoogleBaseLLMInput } from "./llms.js";
|
|
10
|
+
declare class ChatConnection<AuthOptions> extends AbstractGoogleLLMConnection<BaseMessage[], AuthOptions> {
|
|
11
|
+
formatContents(input: BaseMessage[], _parameters: GoogleAIModelParams): GeminiContent[];
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Input to chat model class.
|
|
15
|
+
*/
|
|
16
|
+
export interface ChatGoogleBaseInput<AuthOptions> extends BaseChatModelParams, GoogleConnectionParams<AuthOptions>, GoogleAIModelParams {
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Integration with a chat model.
|
|
20
|
+
*/
|
|
21
|
+
export declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<BaseLanguageModelCallOptions> implements ChatGoogleBaseInput<AuthOptions> {
|
|
22
|
+
static lc_name(): string;
|
|
23
|
+
lc_serializable: boolean;
|
|
24
|
+
model: string;
|
|
25
|
+
temperature: number;
|
|
26
|
+
maxOutputTokens: number;
|
|
27
|
+
topP: number;
|
|
28
|
+
topK: number;
|
|
29
|
+
stopSequences: string[];
|
|
30
|
+
safetySettings: GoogleAISafetySetting[];
|
|
31
|
+
protected connection: ChatConnection<AuthOptions>;
|
|
32
|
+
protected streamedConnection: ChatConnection<AuthOptions>;
|
|
33
|
+
constructor(fields?: ChatGoogleBaseInput<AuthOptions>);
|
|
34
|
+
abstract buildAbstractedClient(fields?: GoogleAIBaseLLMInput<AuthOptions>): GoogleAbstractedClient;
|
|
35
|
+
buildApiKeyClient(apiKey: string): GoogleAbstractedClient;
|
|
36
|
+
buildApiKey(fields?: GoogleAIBaseLLMInput<AuthOptions>): string | undefined;
|
|
37
|
+
buildClient(fields?: GoogleAIBaseLLMInput<AuthOptions>): GoogleAbstractedClient;
|
|
38
|
+
buildConnection(fields: GoogleBaseLLMInput<AuthOptions>, client: GoogleAbstractedClient): void;
|
|
39
|
+
get platform(): GooglePlatformType;
|
|
40
|
+
_llmType(): string;
|
|
41
|
+
_generate(messages: BaseMessage[], options: this["ParsedCallOptions"], _runManager: CallbackManagerForLLMRun | undefined): Promise<ChatResult>;
|
|
42
|
+
_streamResponseChunks(_messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
43
|
+
/** @ignore */
|
|
44
|
+
_combineLLMOutput(): never[];
|
|
45
|
+
}
|
|
46
|
+
export {};
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
import { getEnvironmentVariable } from "@langchain/core/utils/env";
|
|
2
|
+
import { BaseChatModel, } from "@langchain/core/language_models/chat_models";
|
|
3
|
+
import { ChatGenerationChunk } from "@langchain/core/outputs";
|
|
4
|
+
import { AIMessageChunk } from "@langchain/core/messages";
|
|
5
|
+
import { copyAIModelParams, copyAndValidateModelParamsInto, } from "./utils/common.js";
|
|
6
|
+
import { AbstractGoogleLLMConnection } from "./connection.js";
|
|
7
|
+
import { baseMessageToContent, responseToChatGeneration, responseToChatResult, } from "./utils/gemini.js";
|
|
8
|
+
import { ApiKeyGoogleAuth } from "./auth.js";
|
|
9
|
+
class ChatConnection extends AbstractGoogleLLMConnection {
|
|
10
|
+
formatContents(input, _parameters) {
|
|
11
|
+
return input
|
|
12
|
+
.map((msg) => baseMessageToContent(msg))
|
|
13
|
+
.reduce((acc, cur) => [...acc, ...cur]);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Integration with a chat model.
|
|
18
|
+
*/
|
|
19
|
+
export class ChatGoogleBase extends BaseChatModel {
|
|
20
|
+
// Used for tracing, replace with the same name as your class
|
|
21
|
+
static lc_name() {
|
|
22
|
+
return "ChatGoogle";
|
|
23
|
+
}
|
|
24
|
+
constructor(fields) {
|
|
25
|
+
super(fields ?? {});
|
|
26
|
+
Object.defineProperty(this, "lc_serializable", {
|
|
27
|
+
enumerable: true,
|
|
28
|
+
configurable: true,
|
|
29
|
+
writable: true,
|
|
30
|
+
value: true
|
|
31
|
+
});
|
|
32
|
+
Object.defineProperty(this, "model", {
|
|
33
|
+
enumerable: true,
|
|
34
|
+
configurable: true,
|
|
35
|
+
writable: true,
|
|
36
|
+
value: "gemini-pro"
|
|
37
|
+
});
|
|
38
|
+
Object.defineProperty(this, "temperature", {
|
|
39
|
+
enumerable: true,
|
|
40
|
+
configurable: true,
|
|
41
|
+
writable: true,
|
|
42
|
+
value: 0.7
|
|
43
|
+
});
|
|
44
|
+
Object.defineProperty(this, "maxOutputTokens", {
|
|
45
|
+
enumerable: true,
|
|
46
|
+
configurable: true,
|
|
47
|
+
writable: true,
|
|
48
|
+
value: 1024
|
|
49
|
+
});
|
|
50
|
+
Object.defineProperty(this, "topP", {
|
|
51
|
+
enumerable: true,
|
|
52
|
+
configurable: true,
|
|
53
|
+
writable: true,
|
|
54
|
+
value: 0.8
|
|
55
|
+
});
|
|
56
|
+
Object.defineProperty(this, "topK", {
|
|
57
|
+
enumerable: true,
|
|
58
|
+
configurable: true,
|
|
59
|
+
writable: true,
|
|
60
|
+
value: 40
|
|
61
|
+
});
|
|
62
|
+
Object.defineProperty(this, "stopSequences", {
|
|
63
|
+
enumerable: true,
|
|
64
|
+
configurable: true,
|
|
65
|
+
writable: true,
|
|
66
|
+
value: []
|
|
67
|
+
});
|
|
68
|
+
Object.defineProperty(this, "safetySettings", {
|
|
69
|
+
enumerable: true,
|
|
70
|
+
configurable: true,
|
|
71
|
+
writable: true,
|
|
72
|
+
value: []
|
|
73
|
+
});
|
|
74
|
+
Object.defineProperty(this, "connection", {
|
|
75
|
+
enumerable: true,
|
|
76
|
+
configurable: true,
|
|
77
|
+
writable: true,
|
|
78
|
+
value: void 0
|
|
79
|
+
});
|
|
80
|
+
Object.defineProperty(this, "streamedConnection", {
|
|
81
|
+
enumerable: true,
|
|
82
|
+
configurable: true,
|
|
83
|
+
writable: true,
|
|
84
|
+
value: void 0
|
|
85
|
+
});
|
|
86
|
+
copyAndValidateModelParamsInto(fields, this);
|
|
87
|
+
const client = this.buildClient(fields);
|
|
88
|
+
this.buildConnection(fields ?? {}, client);
|
|
89
|
+
}
|
|
90
|
+
buildApiKeyClient(apiKey) {
|
|
91
|
+
return new ApiKeyGoogleAuth(apiKey);
|
|
92
|
+
}
|
|
93
|
+
buildApiKey(fields) {
|
|
94
|
+
return fields?.apiKey ?? getEnvironmentVariable("GOOGLE_API_KEY");
|
|
95
|
+
}
|
|
96
|
+
buildClient(fields) {
|
|
97
|
+
const apiKey = this.buildApiKey(fields);
|
|
98
|
+
if (apiKey) {
|
|
99
|
+
return this.buildApiKeyClient(apiKey);
|
|
100
|
+
}
|
|
101
|
+
else {
|
|
102
|
+
return this.buildAbstractedClient(fields);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
buildConnection(fields, client) {
|
|
106
|
+
this.connection = new ChatConnection({ ...fields, ...this }, this.caller, client, false);
|
|
107
|
+
this.streamedConnection = new ChatConnection({ ...fields, ...this }, this.caller, client, true);
|
|
108
|
+
}
|
|
109
|
+
get platform() {
|
|
110
|
+
return this.connection.platform;
|
|
111
|
+
}
|
|
112
|
+
// Replace
|
|
113
|
+
_llmType() {
|
|
114
|
+
return "chat_integration";
|
|
115
|
+
}
|
|
116
|
+
async _generate(messages, options, _runManager) {
|
|
117
|
+
const parameters = copyAIModelParams(this);
|
|
118
|
+
const response = await this.connection.request(messages, parameters, options);
|
|
119
|
+
const ret = responseToChatResult(response);
|
|
120
|
+
return ret;
|
|
121
|
+
}
|
|
122
|
+
async *_streamResponseChunks(_messages, _options, _runManager) {
|
|
123
|
+
// Make the call as a streaming request
|
|
124
|
+
const parameters = copyAIModelParams(this);
|
|
125
|
+
const response = await this.streamedConnection.request(_messages, parameters, _options);
|
|
126
|
+
// Get the streaming parser of the response
|
|
127
|
+
const stream = response.data;
|
|
128
|
+
// Loop until the end of the stream
|
|
129
|
+
// During the loop, yield each time we get a chunk from the streaming parser
|
|
130
|
+
// that is either available or added to the queue
|
|
131
|
+
while (!stream.streamDone) {
|
|
132
|
+
const output = await stream.nextChunk();
|
|
133
|
+
const chunk = output !== null
|
|
134
|
+
? responseToChatGeneration({ data: output })
|
|
135
|
+
: new ChatGenerationChunk({
|
|
136
|
+
text: "",
|
|
137
|
+
generationInfo: { finishReason: "stop" },
|
|
138
|
+
message: new AIMessageChunk({
|
|
139
|
+
content: "",
|
|
140
|
+
}),
|
|
141
|
+
});
|
|
142
|
+
yield chunk;
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
/** @ignore */
|
|
146
|
+
_combineLLMOutput() {
|
|
147
|
+
return [];
|
|
148
|
+
}
|
|
149
|
+
}
|