oci-generativeaiinference 2.77.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.txt +89 -0
- package/NOTICE.txt +1 -0
- package/README.md +22 -0
- package/THIRD_PARTY_LICENSES.txt +576 -0
- package/index.d.ts +27 -0
- package/index.js +50 -0
- package/index.js.map +1 -0
- package/lib/client.d.ts +113 -0
- package/lib/client.js +376 -0
- package/lib/client.js.map +1 -0
- package/lib/model/choice.d.ts +45 -0
- package/lib/model/choice.js +60 -0
- package/lib/model/choice.js.map +1 -0
- package/lib/model/cohere-llm-inference-request.d.ts +106 -0
- package/lib/model/cohere-llm-inference-request.js +73 -0
- package/lib/model/cohere-llm-inference-request.js.map +1 -0
- package/lib/model/cohere-llm-inference-response.d.ts +43 -0
- package/lib/model/cohere-llm-inference-response.js +73 -0
- package/lib/model/cohere-llm-inference-response.js.map +1 -0
- package/lib/model/dedicated-serving-mode.d.ts +35 -0
- package/lib/model/dedicated-serving-mode.js +59 -0
- package/lib/model/dedicated-serving-mode.js.map +1 -0
- package/lib/model/embed-text-details.d.ts +61 -0
- package/lib/model/embed-text-details.js +75 -0
- package/lib/model/embed-text-details.js.map +1 -0
- package/lib/model/embed-text-result.d.ts +48 -0
- package/lib/model/embed-text-result.js +36 -0
- package/lib/model/embed-text-result.js.map +1 -0
- package/lib/model/generate-text-details.d.ts +35 -0
- package/lib/model/generate-text-details.js +68 -0
- package/lib/model/generate-text-details.js.map +1 -0
- package/lib/model/generate-text-result.d.ts +38 -0
- package/lib/model/generate-text-result.js +64 -0
- package/lib/model/generate-text-result.js.map +1 -0
- package/lib/model/generated-text.d.ts +55 -0
- package/lib/model/generated-text.js +68 -0
- package/lib/model/generated-text.js.map +1 -0
- package/lib/model/index.d.ts +57 -0
- package/lib/model/index.js +80 -0
- package/lib/model/index.js.map +1 -0
- package/lib/model/llama-llm-inference-request.d.ts +94 -0
- package/lib/model/llama-llm-inference-request.js +61 -0
- package/lib/model/llama-llm-inference-request.js.map +1 -0
- package/lib/model/llama-llm-inference-response.d.ts +39 -0
- package/lib/model/llama-llm-inference-response.js +73 -0
- package/lib/model/llama-llm-inference-response.js.map +1 -0
- package/lib/model/llm-inference-request.d.ts +29 -0
- package/lib/model/llm-inference-request.js +79 -0
- package/lib/model/llm-inference-request.js.map +1 -0
- package/lib/model/llm-inference-response.d.ts +29 -0
- package/lib/model/llm-inference-response.js +79 -0
- package/lib/model/llm-inference-response.js.map +1 -0
- package/lib/model/logprobs.d.ts +46 -0
- package/lib/model/logprobs.js +36 -0
- package/lib/model/logprobs.js.map +1 -0
- package/lib/model/on-demand-serving-mode.d.ts +35 -0
- package/lib/model/on-demand-serving-mode.js +59 -0
- package/lib/model/on-demand-serving-mode.js.map +1 -0
- package/lib/model/serving-mode.d.ts +29 -0
- package/lib/model/serving-mode.js +79 -0
- package/lib/model/serving-mode.js.map +1 -0
- package/lib/model/summarize-text-details.d.ts +82 -0
- package/lib/model/summarize-text-details.js +82 -0
- package/lib/model/summarize-text-details.js.map +1 -0
- package/lib/model/summarize-text-result.d.ts +48 -0
- package/lib/model/summarize-text-result.js +36 -0
- package/lib/model/summarize-text-result.js.map +1 -0
- package/lib/model/token-likelihood.d.ts +39 -0
- package/lib/model/token-likelihood.js +36 -0
- package/lib/model/token-likelihood.js.map +1 -0
- package/lib/request/embed-text-request.d.ts +36 -0
- package/lib/request/embed-text-request.js +15 -0
- package/lib/request/embed-text-request.js.map +1 -0
- package/lib/request/generate-text-request.d.ts +36 -0
- package/lib/request/generate-text-request.js +15 -0
- package/lib/request/generate-text-request.js.map +1 -0
- package/lib/request/index.d.ts +25 -0
- package/lib/request/index.js +22 -0
- package/lib/request/index.js.map +1 -0
- package/lib/request/summarize-text-request.d.ts +36 -0
- package/lib/request/summarize-text-request.js +15 -0
- package/lib/request/summarize-text-request.js.map +1 -0
- package/lib/response/embed-text-response.d.ts +30 -0
- package/lib/response/embed-text-response.js +15 -0
- package/lib/response/embed-text-response.js.map +1 -0
- package/lib/response/generate-text-response.d.ts +30 -0
- package/lib/response/generate-text-response.js +15 -0
- package/lib/response/generate-text-response.js.map +1 -0
- package/lib/response/index.d.ts +25 -0
- package/lib/response/index.js +22 -0
- package/lib/response/index.js.map +1 -0
- package/lib/response/summarize-text-response.d.ts +30 -0
- package/lib/response/summarize-text-response.js +15 -0
- package/lib/response/summarize-text-response.js.map +1 -0
- package/package.json +29 -0
package/lib/client.d.ts
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Generative AI Service Inference API
|
|
3
|
+
* OCI Generative AI is a fully managed service that provides a set of state-of-the-art, customizable large language models (LLMs) that cover a wide range of use cases for text generation, summarization, and text embeddings.
|
|
4
|
+
|
|
5
|
+
Use the Generative AI service inference API to access your custom model endpoints, or to try the out-of-the-box models to [generate text](#/en/generative-ai-inference/latest/GenerateTextResult/GenerateText), [summarize](#/en/generative-ai-inference/latest/SummarizeTextResult/SummarizeText), and [create text embeddings](#/en/generative-ai-inference/latest/EmbedTextResult/EmbedText).
|
|
6
|
+
|
|
7
|
+
To use a Generative AI custom model for inference, you must first create an endpoint for that model. Use the [Generative AI service management API](/#/en/generative-ai/latest/) to [create a custom model](#/en/generative-ai/latest/Model/) by fine-tuning an out-of-the-box model, or a previous version of a custom model, using your own data. Fine-tune the custom model on a [fine-tuning dedicated AI cluster](#/en/generative-ai/latest/DedicatedAiCluster/). Then, create a [hosting dedicated AI cluster](#/en/generative-ai/latest/DedicatedAiCluster/) with an [endpoint](#/en/generative-ai/latest/Endpoint/) to host your custom model. For resource management in the Generative AI service, use the [Generative AI service management API](/#/en/generative-ai/latest/).
|
|
8
|
+
|
|
9
|
+
To learn more about the service, see the [Generative AI documentation](/iaas/Content/generative-ai/home.htm).
|
|
10
|
+
|
|
11
|
+
* OpenAPI spec version: 20231130
|
|
12
|
+
*
|
|
13
|
+
*
|
|
14
|
+
* NOTE: This class is auto generated by OracleSDKGenerator.
|
|
15
|
+
* Do not edit the class manually.
|
|
16
|
+
*
|
|
17
|
+
* Copyright (c) 2020, 2024, Oracle and/or its affiliates. All rights reserved.
|
|
18
|
+
* This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
|
|
19
|
+
*/
|
|
20
|
+
import common = require("oci-common");
|
|
21
|
+
import * as requests from "./request";
|
|
22
|
+
import * as responses from "./response";
|
|
23
|
+
declare const Breaker: any;
|
|
24
|
+
export declare enum GenerativeAiInferenceApiKeys {
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* This service client uses {@link common.CircuitBreaker.DefaultConfiguration} for all the operations by default if no circuit breaker configuration is defined by the user.
|
|
28
|
+
*/
|
|
29
|
+
export declare class GenerativeAiInferenceClient {
|
|
30
|
+
protected static serviceEndpointTemplate: string;
|
|
31
|
+
protected static endpointServiceName: string;
|
|
32
|
+
protected "_realmSpecificEndpointTemplateEnabled": boolean;
|
|
33
|
+
protected "_endpoint": string;
|
|
34
|
+
protected "_defaultHeaders": any;
|
|
35
|
+
protected "_clientConfiguration": common.ClientConfiguration;
|
|
36
|
+
protected _circuitBreaker: typeof Breaker | null;
|
|
37
|
+
protected _httpOptions: any;
|
|
38
|
+
protected _bodyDuplexMode: any;
|
|
39
|
+
targetService: string;
|
|
40
|
+
protected _regionId: string;
|
|
41
|
+
protected "_region": common.Region;
|
|
42
|
+
protected _lastSetRegionOrRegionId: string;
|
|
43
|
+
protected _httpClient: common.HttpClient;
|
|
44
|
+
constructor(params: common.AuthParams, clientConfiguration?: common.ClientConfiguration);
|
|
45
|
+
/**
|
|
46
|
+
* Get the endpoint that is being used to call (ex, https://www.example.com).
|
|
47
|
+
*/
|
|
48
|
+
get endpoint(): string;
|
|
49
|
+
/**
|
|
50
|
+
* Sets the endpoint to call (ex, https://www.example.com).
|
|
51
|
+
* @param endpoint The endpoint of the service.
|
|
52
|
+
*/
|
|
53
|
+
set endpoint(endpoint: string);
|
|
54
|
+
get logger(): import("oci-common/lib/log").Logger;
|
|
55
|
+
/**
|
|
56
|
+
* Determines whether realm specific endpoint should be used or not.
|
|
57
|
+
* Set realmSpecificEndpointTemplateEnabled to "true" if the user wants to enable use of realm specific endpoint template, otherwise set it to "false"
|
|
58
|
+
* @param realmSpecificEndpointTemplateEnabled flag to enable the use of realm specific endpoint template
|
|
59
|
+
*/
|
|
60
|
+
set useRealmSpecificEndpointTemplate(realmSpecificEndpointTemplateEnabled: boolean);
|
|
61
|
+
/**
|
|
62
|
+
* Sets the region to call (ex, Region.US_PHOENIX_1).
|
|
63
|
+
* Note, this will call {@link #endpoint(String) endpoint} after resolving the endpoint.
|
|
64
|
+
* @param region The region of the service.
|
|
65
|
+
*/
|
|
66
|
+
set region(region: common.Region);
|
|
67
|
+
/**
|
|
68
|
+
* Sets the regionId to call (ex, 'us-phoenix-1').
|
|
69
|
+
*
|
|
70
|
+
* Note, this will first try to map the region ID to a known Region and call {@link #region(Region) region}.
|
|
71
|
+
* If no known Region could be determined, it will create an endpoint assuming its in default Realm OC1
|
|
72
|
+
* and then call {@link #endpoint(String) endpoint}.
|
|
73
|
+
* @param regionId The public region ID.
|
|
74
|
+
*/
|
|
75
|
+
set regionId(regionId: string);
|
|
76
|
+
/**
|
|
77
|
+
* Shutdown the circuit breaker used by the client when it is no longer needed
|
|
78
|
+
*/
|
|
79
|
+
shutdownCircuitBreaker(): void;
|
|
80
|
+
/**
|
|
81
|
+
* Produces embeddings for the inputs.
|
|
82
|
+
* <p>
|
|
83
|
+
An embedding is numeric representation of a piece of text. This text can be a phrase, a sentence, or one or more paragraphs. The Generative AI embedding model transforms each phrase, sentence, or paragraph that you input, into an array with 1024 numbers. You can use these embeddings for finding similarity in your input text such as finding phrases that are similar in context or category. Embeddings are mostly used for semantic searches where the search function focuses on the meaning of the text that it's searching through rather than finding results based on keywords.
|
|
84
|
+
*
|
|
85
|
+
* This operation uses {@link common.OciSdkDefaultRetryConfiguration} by default if no retry configuration is defined by the user.
|
|
86
|
+
* @param EmbedTextRequest
|
|
87
|
+
* @return EmbedTextResponse
|
|
88
|
+
* @throws OciError when an error occurs
|
|
89
|
+
* @example Click {@link https://docs.cloud.oracle.com/en-us/iaas/tools/typescript-sdk-examples/2.77.1/generativeaiinference/EmbedText.ts.html |here} to see how to use EmbedText API.
|
|
90
|
+
*/
|
|
91
|
+
embedText(embedTextRequest: requests.EmbedTextRequest): Promise<responses.EmbedTextResponse>;
|
|
92
|
+
/**
|
|
93
|
+
* Generates a text response based on the user prompt.
|
|
94
|
+
*
|
|
95
|
+
* This operation uses {@link common.OciSdkDefaultRetryConfiguration} by default if no retry configuration is defined by the user.
|
|
96
|
+
* @param GenerateTextRequest
|
|
97
|
+
* @return GenerateTextResponse
|
|
98
|
+
* @throws OciError when an error occurs
|
|
99
|
+
* @example Click {@link https://docs.cloud.oracle.com/en-us/iaas/tools/typescript-sdk-examples/2.77.1/generativeaiinference/GenerateText.ts.html |here} to see how to use GenerateText API.
|
|
100
|
+
*/
|
|
101
|
+
generateText(generateTextRequest: requests.GenerateTextRequest): Promise<responses.GenerateTextResponse>;
|
|
102
|
+
/**
|
|
103
|
+
* Summarizes the input text.
|
|
104
|
+
*
|
|
105
|
+
* This operation uses {@link common.OciSdkDefaultRetryConfiguration} by default if no retry configuration is defined by the user.
|
|
106
|
+
* @param SummarizeTextRequest
|
|
107
|
+
* @return SummarizeTextResponse
|
|
108
|
+
* @throws OciError when an error occurs
|
|
109
|
+
* @example Click {@link https://docs.cloud.oracle.com/en-us/iaas/tools/typescript-sdk-examples/2.77.1/generativeaiinference/SummarizeText.ts.html |here} to see how to use SummarizeText API.
|
|
110
|
+
*/
|
|
111
|
+
summarizeText(summarizeTextRequest: requests.SummarizeTextRequest): Promise<responses.SummarizeTextResponse>;
|
|
112
|
+
}
|
|
113
|
+
export {};
|
package/lib/client.js
ADDED
|
@@ -0,0 +1,376 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Generative AI Service Inference API
|
|
4
|
+
* OCI Generative AI is a fully managed service that provides a set of state-of-the-art, customizable large language models (LLMs) that cover a wide range of use cases for text generation, summarization, and text embeddings.
|
|
5
|
+
|
|
6
|
+
Use the Generative AI service inference API to access your custom model endpoints, or to try the out-of-the-box models to [generate text](#/en/generative-ai-inference/latest/GenerateTextResult/GenerateText), [summarize](#/en/generative-ai-inference/latest/SummarizeTextResult/SummarizeText), and [create text embeddings](#/en/generative-ai-inference/latest/EmbedTextResult/EmbedText).
|
|
7
|
+
|
|
8
|
+
To use a Generative AI custom model for inference, you must first create an endpoint for that model. Use the [Generative AI service management API](/#/en/generative-ai/latest/) to [create a custom model](#/en/generative-ai/latest/Model/) by fine-tuning an out-of-the-box model, or a previous version of a custom model, using your own data. Fine-tune the custom model on a [fine-tuning dedicated AI cluster](#/en/generative-ai/latest/DedicatedAiCluster/). Then, create a [hosting dedicated AI cluster](#/en/generative-ai/latest/DedicatedAiCluster/) with an [endpoint](#/en/generative-ai/latest/Endpoint/) to host your custom model. For resource management in the Generative AI service, use the [Generative AI service management API](/#/en/generative-ai/latest/).
|
|
9
|
+
|
|
10
|
+
To learn more about the service, see the [Generative AI documentation](/iaas/Content/generative-ai/home.htm).
|
|
11
|
+
|
|
12
|
+
* OpenAPI spec version: 20231130
|
|
13
|
+
*
|
|
14
|
+
*
|
|
15
|
+
* NOTE: This class is auto generated by OracleSDKGenerator.
|
|
16
|
+
* Do not edit the class manually.
|
|
17
|
+
*
|
|
18
|
+
* Copyright (c) 2020, 2024, Oracle and/or its affiliates. All rights reserved.
|
|
19
|
+
* This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
|
|
20
|
+
*/
|
|
21
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
22
|
+
if (k2 === undefined) k2 = k;
|
|
23
|
+
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
24
|
+
}) : (function(o, m, k, k2) {
|
|
25
|
+
if (k2 === undefined) k2 = k;
|
|
26
|
+
o[k2] = m[k];
|
|
27
|
+
}));
|
|
28
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
29
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
30
|
+
}) : function(o, v) {
|
|
31
|
+
o["default"] = v;
|
|
32
|
+
});
|
|
33
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
|
34
|
+
if (mod && mod.__esModule) return mod;
|
|
35
|
+
var result = {};
|
|
36
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
37
|
+
__setModuleDefault(result, mod);
|
|
38
|
+
return result;
|
|
39
|
+
};
|
|
40
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
41
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
42
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
43
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
44
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
45
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
46
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
47
|
+
});
|
|
48
|
+
};
|
|
49
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
50
|
+
exports.GenerativeAiInferenceClient = exports.GenerativeAiInferenceApiKeys = void 0;
|
|
51
|
+
const common = require("oci-common");
|
|
52
|
+
const model = __importStar(require("./model"));
|
|
53
|
+
const oci_common_1 = require("oci-common");
|
|
54
|
+
const Breaker = require("opossum");
|
|
55
|
+
// ===============================================
|
|
56
|
+
// This file is autogenerated - Please do not edit
|
|
57
|
+
// ===============================================
|
|
58
|
+
var GenerativeAiInferenceApiKeys;
|
|
59
|
+
(function (GenerativeAiInferenceApiKeys) {
|
|
60
|
+
})(GenerativeAiInferenceApiKeys = exports.GenerativeAiInferenceApiKeys || (exports.GenerativeAiInferenceApiKeys = {}));
|
|
61
|
+
/**
|
|
62
|
+
* This service client uses {@link common.CircuitBreaker.DefaultConfiguration} for all the operations by default if no circuit breaker configuration is defined by the user.
|
|
63
|
+
*/
|
|
64
|
+
class GenerativeAiInferenceClient {
|
|
65
|
+
constructor(params, clientConfiguration) {
|
|
66
|
+
this["_realmSpecificEndpointTemplateEnabled"] = false;
|
|
67
|
+
this["_endpoint"] = "";
|
|
68
|
+
this["_defaultHeaders"] = {};
|
|
69
|
+
this._circuitBreaker = null;
|
|
70
|
+
this._httpOptions = undefined;
|
|
71
|
+
this._bodyDuplexMode = undefined;
|
|
72
|
+
this.targetService = "GenerativeAiInference";
|
|
73
|
+
this._regionId = "";
|
|
74
|
+
this._lastSetRegionOrRegionId = "";
|
|
75
|
+
const requestSigner = params.authenticationDetailsProvider
|
|
76
|
+
? new common.DefaultRequestSigner(params.authenticationDetailsProvider)
|
|
77
|
+
: null;
|
|
78
|
+
if (clientConfiguration) {
|
|
79
|
+
this._clientConfiguration = clientConfiguration;
|
|
80
|
+
this._circuitBreaker = clientConfiguration.circuitBreaker
|
|
81
|
+
? clientConfiguration.circuitBreaker.circuit
|
|
82
|
+
: null;
|
|
83
|
+
this._httpOptions = clientConfiguration.httpOptions
|
|
84
|
+
? clientConfiguration.httpOptions
|
|
85
|
+
: undefined;
|
|
86
|
+
this._bodyDuplexMode = clientConfiguration.bodyDuplexMode
|
|
87
|
+
? clientConfiguration.bodyDuplexMode
|
|
88
|
+
: undefined;
|
|
89
|
+
}
|
|
90
|
+
if (!oci_common_1.developerToolConfiguration.isServiceEnabled("generativeaiinference")) {
|
|
91
|
+
let errmsg = "The developerToolConfiguration configuration disabled this service, this behavior is controlled by developerToolConfiguration.ociEnabledServiceSet variable. Please check if your local developer_tool_configuration file has configured the service you're targeting or contact the cloud provider on the availability of this service : ";
|
|
92
|
+
throw errmsg.concat("generativeaiinference");
|
|
93
|
+
}
|
|
94
|
+
// if circuit breaker is not created, check if circuit breaker system is enabled to use default circuit breaker
|
|
95
|
+
const specCircuitBreakerEnabled = true;
|
|
96
|
+
if (!this._circuitBreaker &&
|
|
97
|
+
common.utils.isCircuitBreakerSystemEnabled(clientConfiguration) &&
|
|
98
|
+
(specCircuitBreakerEnabled || common.CircuitBreaker.DefaultCircuitBreakerOverriden)) {
|
|
99
|
+
this._circuitBreaker = new common.CircuitBreaker().circuit;
|
|
100
|
+
}
|
|
101
|
+
this._httpClient =
|
|
102
|
+
params.httpClient ||
|
|
103
|
+
new common.FetchHttpClient(requestSigner, this._circuitBreaker, this._httpOptions, this._bodyDuplexMode);
|
|
104
|
+
if (params.authenticationDetailsProvider &&
|
|
105
|
+
common.isRegionProvider(params.authenticationDetailsProvider)) {
|
|
106
|
+
const provider = params.authenticationDetailsProvider;
|
|
107
|
+
if (provider.getRegion()) {
|
|
108
|
+
this.region = provider.getRegion();
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* Get the endpoint that is being used to call (ex, https://www.example.com).
|
|
114
|
+
*/
|
|
115
|
+
get endpoint() {
|
|
116
|
+
return this._endpoint;
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Sets the endpoint to call (ex, https://www.example.com).
|
|
120
|
+
* @param endpoint The endpoint of the service.
|
|
121
|
+
*/
|
|
122
|
+
set endpoint(endpoint) {
|
|
123
|
+
this._endpoint = endpoint;
|
|
124
|
+
this._endpoint = this._endpoint + "/20231130";
|
|
125
|
+
if (this.logger)
|
|
126
|
+
this.logger.info(`GenerativeAiInferenceClient endpoint set to ${this._endpoint}`);
|
|
127
|
+
}
|
|
128
|
+
get logger() {
|
|
129
|
+
return common.LOG.logger;
|
|
130
|
+
}
|
|
131
|
+
/**
|
|
132
|
+
* Determines whether realm specific endpoint should be used or not.
|
|
133
|
+
* Set realmSpecificEndpointTemplateEnabled to "true" if the user wants to enable use of realm specific endpoint template, otherwise set it to "false"
|
|
134
|
+
* @param realmSpecificEndpointTemplateEnabled flag to enable the use of realm specific endpoint template
|
|
135
|
+
*/
|
|
136
|
+
set useRealmSpecificEndpointTemplate(realmSpecificEndpointTemplateEnabled) {
|
|
137
|
+
this._realmSpecificEndpointTemplateEnabled = realmSpecificEndpointTemplateEnabled;
|
|
138
|
+
if (this.logger)
|
|
139
|
+
this.logger.info(`realmSpecificEndpointTemplateEnabled set to ${this._realmSpecificEndpointTemplateEnabled}`);
|
|
140
|
+
if (this._lastSetRegionOrRegionId === common.Region.REGION_STRING) {
|
|
141
|
+
this.endpoint = common.EndpointBuilder.createEndpointFromRegion(GenerativeAiInferenceClient.serviceEndpointTemplate, this._region, GenerativeAiInferenceClient.endpointServiceName);
|
|
142
|
+
}
|
|
143
|
+
else if (this._lastSetRegionOrRegionId === common.Region.REGION_ID_STRING) {
|
|
144
|
+
this.endpoint = common.EndpointBuilder.createEndpointFromRegionId(GenerativeAiInferenceClient.serviceEndpointTemplate, this._regionId, GenerativeAiInferenceClient.endpointServiceName);
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
/**
|
|
148
|
+
* Sets the region to call (ex, Region.US_PHOENIX_1).
|
|
149
|
+
* Note, this will call {@link #endpoint(String) endpoint} after resolving the endpoint.
|
|
150
|
+
* @param region The region of the service.
|
|
151
|
+
*/
|
|
152
|
+
set region(region) {
|
|
153
|
+
this._region = region;
|
|
154
|
+
this.endpoint = common.EndpointBuilder.createEndpointFromRegion(GenerativeAiInferenceClient.serviceEndpointTemplate, region, GenerativeAiInferenceClient.endpointServiceName);
|
|
155
|
+
this._lastSetRegionOrRegionId = common.Region.REGION_STRING;
|
|
156
|
+
}
|
|
157
|
+
/**
|
|
158
|
+
* Sets the regionId to call (ex, 'us-phoenix-1').
|
|
159
|
+
*
|
|
160
|
+
* Note, this will first try to map the region ID to a known Region and call {@link #region(Region) region}.
|
|
161
|
+
* If no known Region could be determined, it will create an endpoint assuming its in default Realm OC1
|
|
162
|
+
* and then call {@link #endpoint(String) endpoint}.
|
|
163
|
+
* @param regionId The public region ID.
|
|
164
|
+
*/
|
|
165
|
+
set regionId(regionId) {
|
|
166
|
+
this._regionId = regionId;
|
|
167
|
+
this.endpoint = common.EndpointBuilder.createEndpointFromRegionId(GenerativeAiInferenceClient.serviceEndpointTemplate, regionId, GenerativeAiInferenceClient.endpointServiceName);
|
|
168
|
+
this._lastSetRegionOrRegionId = common.Region.REGION_ID_STRING;
|
|
169
|
+
}
|
|
170
|
+
/**
|
|
171
|
+
* Shutdown the circuit breaker used by the client when it is no longer needed
|
|
172
|
+
*/
|
|
173
|
+
shutdownCircuitBreaker() {
|
|
174
|
+
if (this._circuitBreaker) {
|
|
175
|
+
this._circuitBreaker.shutdown();
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
/**
|
|
179
|
+
* Produces embeddings for the inputs.
|
|
180
|
+
* <p>
|
|
181
|
+
An embedding is numeric representation of a piece of text. This text can be a phrase, a sentence, or one or more paragraphs. The Generative AI embedding model transforms each phrase, sentence, or paragraph that you input, into an array with 1024 numbers. You can use these embeddings for finding similarity in your input text such as finding phrases that are similar in context or category. Embeddings are mostly used for semantic searches where the search function focuses on the meaning of the text that it's searching through rather than finding results based on keywords.
|
|
182
|
+
*
|
|
183
|
+
* This operation uses {@link common.OciSdkDefaultRetryConfiguration} by default if no retry configuration is defined by the user.
|
|
184
|
+
* @param EmbedTextRequest
|
|
185
|
+
* @return EmbedTextResponse
|
|
186
|
+
* @throws OciError when an error occurs
|
|
187
|
+
* @example Click {@link https://docs.cloud.oracle.com/en-us/iaas/tools/typescript-sdk-examples/2.77.1/generativeaiinference/EmbedText.ts.html |here} to see how to use EmbedText API.
|
|
188
|
+
*/
|
|
189
|
+
embedText(embedTextRequest) {
|
|
190
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
191
|
+
if (this.logger)
|
|
192
|
+
this.logger.debug("Calling operation GenerativeAiInferenceClient#embedText.");
|
|
193
|
+
const operationName = "embedText";
|
|
194
|
+
const apiReferenceLink = "";
|
|
195
|
+
const pathParams = {};
|
|
196
|
+
const queryParams = {};
|
|
197
|
+
let headerParams = {
|
|
198
|
+
"Content-Type": common.Constants.APPLICATION_JSON,
|
|
199
|
+
"opc-retry-token": embedTextRequest.opcRetryToken,
|
|
200
|
+
"opc-request-id": embedTextRequest.opcRequestId
|
|
201
|
+
};
|
|
202
|
+
const specRetryConfiguration = common.OciSdkDefaultRetryConfiguration;
|
|
203
|
+
const retrier = oci_common_1.GenericRetrier.createPreferredRetrier(this._clientConfiguration ? this._clientConfiguration.retryConfiguration : undefined, embedTextRequest.retryConfiguration, specRetryConfiguration);
|
|
204
|
+
if (this.logger)
|
|
205
|
+
retrier.logger = this.logger;
|
|
206
|
+
const request = yield oci_common_1.composeRequest({
|
|
207
|
+
baseEndpoint: this._endpoint,
|
|
208
|
+
defaultHeaders: this._defaultHeaders,
|
|
209
|
+
path: "/actions/embedText",
|
|
210
|
+
method: "POST",
|
|
211
|
+
bodyContent: common.ObjectSerializer.serialize(embedTextRequest.embedTextDetails, "EmbedTextDetails", model.EmbedTextDetails.getJsonObj),
|
|
212
|
+
pathParams: pathParams,
|
|
213
|
+
headerParams: headerParams,
|
|
214
|
+
queryParams: queryParams
|
|
215
|
+
});
|
|
216
|
+
try {
|
|
217
|
+
const response = yield retrier.makeServiceCall(this._httpClient, request, this.targetService, operationName, apiReferenceLink);
|
|
218
|
+
const sdkResponse = oci_common_1.composeResponse({
|
|
219
|
+
responseObject: {},
|
|
220
|
+
body: yield response.json(),
|
|
221
|
+
bodyKey: "embedTextResult",
|
|
222
|
+
bodyModel: model.EmbedTextResult,
|
|
223
|
+
type: "model.EmbedTextResult",
|
|
224
|
+
responseHeaders: [
|
|
225
|
+
{
|
|
226
|
+
value: response.headers.get("etag"),
|
|
227
|
+
key: "etag",
|
|
228
|
+
dataType: "string"
|
|
229
|
+
},
|
|
230
|
+
{
|
|
231
|
+
value: response.headers.get("opc-request-id"),
|
|
232
|
+
key: "opcRequestId",
|
|
233
|
+
dataType: "string"
|
|
234
|
+
}
|
|
235
|
+
]
|
|
236
|
+
});
|
|
237
|
+
return sdkResponse;
|
|
238
|
+
}
|
|
239
|
+
catch (err) {
|
|
240
|
+
throw err;
|
|
241
|
+
}
|
|
242
|
+
});
|
|
243
|
+
}
|
|
244
|
+
/**
|
|
245
|
+
* Generates a text response based on the user prompt.
|
|
246
|
+
*
|
|
247
|
+
* This operation uses {@link common.OciSdkDefaultRetryConfiguration} by default if no retry configuration is defined by the user.
|
|
248
|
+
* @param GenerateTextRequest
|
|
249
|
+
* @return GenerateTextResponse
|
|
250
|
+
* @throws OciError when an error occurs
|
|
251
|
+
* @example Click {@link https://docs.cloud.oracle.com/en-us/iaas/tools/typescript-sdk-examples/2.77.1/generativeaiinference/GenerateText.ts.html |here} to see how to use GenerateText API.
|
|
252
|
+
*/
|
|
253
|
+
generateText(generateTextRequest) {
|
|
254
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
255
|
+
if (this.logger)
|
|
256
|
+
this.logger.debug("Calling operation GenerativeAiInferenceClient#generateText.");
|
|
257
|
+
const operationName = "generateText";
|
|
258
|
+
const apiReferenceLink = "";
|
|
259
|
+
const pathParams = {};
|
|
260
|
+
const queryParams = {};
|
|
261
|
+
let headerParams = {
|
|
262
|
+
"Content-Type": common.Constants.APPLICATION_JSON,
|
|
263
|
+
"opc-retry-token": generateTextRequest.opcRetryToken,
|
|
264
|
+
"opc-request-id": generateTextRequest.opcRequestId
|
|
265
|
+
};
|
|
266
|
+
const specRetryConfiguration = common.OciSdkDefaultRetryConfiguration;
|
|
267
|
+
const retrier = oci_common_1.GenericRetrier.createPreferredRetrier(this._clientConfiguration ? this._clientConfiguration.retryConfiguration : undefined, generateTextRequest.retryConfiguration, specRetryConfiguration);
|
|
268
|
+
if (this.logger)
|
|
269
|
+
retrier.logger = this.logger;
|
|
270
|
+
const request = yield oci_common_1.composeRequest({
|
|
271
|
+
baseEndpoint: this._endpoint,
|
|
272
|
+
defaultHeaders: this._defaultHeaders,
|
|
273
|
+
path: "/actions/generateText",
|
|
274
|
+
method: "POST",
|
|
275
|
+
bodyContent: common.ObjectSerializer.serialize(generateTextRequest.generateTextDetails, "GenerateTextDetails", model.GenerateTextDetails.getJsonObj),
|
|
276
|
+
pathParams: pathParams,
|
|
277
|
+
headerParams: headerParams,
|
|
278
|
+
queryParams: queryParams
|
|
279
|
+
});
|
|
280
|
+
try {
|
|
281
|
+
const response = yield retrier.makeServiceCall(this._httpClient, request, this.targetService, operationName, apiReferenceLink);
|
|
282
|
+
const sdkResponse = oci_common_1.composeResponse({
|
|
283
|
+
responseObject: {},
|
|
284
|
+
body: yield response.json(),
|
|
285
|
+
bodyKey: "generateTextResult",
|
|
286
|
+
bodyModel: model.GenerateTextResult,
|
|
287
|
+
type: "model.GenerateTextResult",
|
|
288
|
+
responseHeaders: [
|
|
289
|
+
{
|
|
290
|
+
value: response.headers.get("etag"),
|
|
291
|
+
key: "etag",
|
|
292
|
+
dataType: "string"
|
|
293
|
+
},
|
|
294
|
+
{
|
|
295
|
+
value: response.headers.get("opc-request-id"),
|
|
296
|
+
key: "opcRequestId",
|
|
297
|
+
dataType: "string"
|
|
298
|
+
}
|
|
299
|
+
]
|
|
300
|
+
});
|
|
301
|
+
return sdkResponse;
|
|
302
|
+
}
|
|
303
|
+
catch (err) {
|
|
304
|
+
throw err;
|
|
305
|
+
}
|
|
306
|
+
});
|
|
307
|
+
}
|
|
308
|
+
/**
|
|
309
|
+
* Summarizes the input text.
|
|
310
|
+
*
|
|
311
|
+
* This operation uses {@link common.OciSdkDefaultRetryConfiguration} by default if no retry configuration is defined by the user.
|
|
312
|
+
* @param SummarizeTextRequest
|
|
313
|
+
* @return SummarizeTextResponse
|
|
314
|
+
* @throws OciError when an error occurs
|
|
315
|
+
* @example Click {@link https://docs.cloud.oracle.com/en-us/iaas/tools/typescript-sdk-examples/2.77.1/generativeaiinference/SummarizeText.ts.html |here} to see how to use SummarizeText API.
|
|
316
|
+
*/
|
|
317
|
+
summarizeText(summarizeTextRequest) {
|
|
318
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
319
|
+
if (this.logger)
|
|
320
|
+
this.logger.debug("Calling operation GenerativeAiInferenceClient#summarizeText.");
|
|
321
|
+
const operationName = "summarizeText";
|
|
322
|
+
const apiReferenceLink = "";
|
|
323
|
+
const pathParams = {};
|
|
324
|
+
const queryParams = {};
|
|
325
|
+
let headerParams = {
|
|
326
|
+
"Content-Type": common.Constants.APPLICATION_JSON,
|
|
327
|
+
"opc-retry-token": summarizeTextRequest.opcRetryToken,
|
|
328
|
+
"opc-request-id": summarizeTextRequest.opcRequestId
|
|
329
|
+
};
|
|
330
|
+
const specRetryConfiguration = common.OciSdkDefaultRetryConfiguration;
|
|
331
|
+
const retrier = oci_common_1.GenericRetrier.createPreferredRetrier(this._clientConfiguration ? this._clientConfiguration.retryConfiguration : undefined, summarizeTextRequest.retryConfiguration, specRetryConfiguration);
|
|
332
|
+
if (this.logger)
|
|
333
|
+
retrier.logger = this.logger;
|
|
334
|
+
const request = yield oci_common_1.composeRequest({
|
|
335
|
+
baseEndpoint: this._endpoint,
|
|
336
|
+
defaultHeaders: this._defaultHeaders,
|
|
337
|
+
path: "/actions/summarizeText",
|
|
338
|
+
method: "POST",
|
|
339
|
+
bodyContent: common.ObjectSerializer.serialize(summarizeTextRequest.summarizeTextDetails, "SummarizeTextDetails", model.SummarizeTextDetails.getJsonObj),
|
|
340
|
+
pathParams: pathParams,
|
|
341
|
+
headerParams: headerParams,
|
|
342
|
+
queryParams: queryParams
|
|
343
|
+
});
|
|
344
|
+
try {
|
|
345
|
+
const response = yield retrier.makeServiceCall(this._httpClient, request, this.targetService, operationName, apiReferenceLink);
|
|
346
|
+
const sdkResponse = oci_common_1.composeResponse({
|
|
347
|
+
responseObject: {},
|
|
348
|
+
body: yield response.json(),
|
|
349
|
+
bodyKey: "summarizeTextResult",
|
|
350
|
+
bodyModel: model.SummarizeTextResult,
|
|
351
|
+
type: "model.SummarizeTextResult",
|
|
352
|
+
responseHeaders: [
|
|
353
|
+
{
|
|
354
|
+
value: response.headers.get("etag"),
|
|
355
|
+
key: "etag",
|
|
356
|
+
dataType: "string"
|
|
357
|
+
},
|
|
358
|
+
{
|
|
359
|
+
value: response.headers.get("opc-request-id"),
|
|
360
|
+
key: "opcRequestId",
|
|
361
|
+
dataType: "string"
|
|
362
|
+
}
|
|
363
|
+
]
|
|
364
|
+
});
|
|
365
|
+
return sdkResponse;
|
|
366
|
+
}
|
|
367
|
+
catch (err) {
|
|
368
|
+
throw err;
|
|
369
|
+
}
|
|
370
|
+
});
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
exports.GenerativeAiInferenceClient = GenerativeAiInferenceClient;
|
|
374
|
+
GenerativeAiInferenceClient.serviceEndpointTemplate = "https://inference.generativeai.{region}.oci.{secondLevelDomain}";
|
|
375
|
+
GenerativeAiInferenceClient.endpointServiceName = "";
|
|
376
|
+
//# sourceMappingURL=client.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"client.js","sourceRoot":"","sources":["../../../../lib/generativeaiinference/lib/client.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;;;;GAkBG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,qCAAsC;AAEtC,+CAAiC;AAEjC,2CAKoB;AACpB,MAAM,OAAO,GAAG,OAAO,CAAC,SAAS,CAAC,CAAC;AAEnC,kDAAkD;AAClD,kDAAkD;AAClD,kDAAkD;AAElD,IAAY,4BAA+B;AAA3C,WAAY,4BAA4B;AAAE,CAAC,EAA/B,4BAA4B,GAA5B,oCAA4B,KAA5B,oCAA4B,QAAG;AAC3C;;GAEG;AACH,MAAa,2BAA2B;IAkBtC,YAAY,MAAyB,EAAE,mBAAgD;QAd7E,6CAAuC,GAAY,KAAK,CAAC;QACzD,iBAAW,GAAW,EAAE,CAAC;QACzB,uBAAiB,GAAQ,EAAE,CAAC;QAE5B,oBAAe,GAA0B,IAAI,CAAC;QAC9C,iBAAY,GAAQ,SAAS,CAAC;QAC9B,oBAAe,GAAQ,SAAS,CAAC;QACpC,kBAAa,GAAG,uBAAuB,CAAC;QACrC,cAAS,GAAW,EAAE,CAAC;QAEvB,6BAAwB,GAAW,EAAE,CAAC;QAK9C,MAAM,aAAa,GAAG,MAAM,CAAC,6BAA6B;YACxD,CAAC,CAAC,IAAI,MAAM,CAAC,oBAAoB,CAAC,MAAM,CAAC,6BAA6B,CAAC;YACvE,CAAC,CAAC,IAAI,CAAC;QACT,IAAI,mBAAmB,EAAE;YACvB,IAAI,CAAC,oBAAoB,GAAG,mBAAmB,CAAC;YAChD,IAAI,CAAC,eAAe,GAAG,mBAAmB,CAAC,cAAc;gBACvD,CAAC,CAAC,mBAAmB,CAAC,cAAe,CAAC,OAAO;gBAC7C,CAAC,CAAC,IAAI,CAAC;YACT,IAAI,CAAC,YAAY,GAAG,mBAAmB,CAAC,WAAW;gBACjD,CAAC,CAAC,mBAAmB,CAAC,WAAW;gBACjC,CAAC,CAAC,SAAS,CAAC;YACd,IAAI,CAAC,eAAe,GAAG,mBAAmB,CAAC,cAAc;gBACvD,CAAC,CAAC,mBAAmB,CAAC,cAAc;gBACpC,CAAC,CAAC,SAAS,CAAC;SACf;QAED,IAAI,CAAC,uCAA0B,CAAC,gBAAgB,CAAC,uBAAuB,CAAC,EAAE;YACzE,IAAI,MAAM,GACR,4UAA4U,CAAC;YAC/U,MAAM,MAAM,CAAC,MAAM,CAAC,uBAAuB,CAAC,CAAC;SAC9C;QAED,+GAA+G;QAC/G,MAAM,yBAAyB,GAAG,IAAI,CAAC;QACvC,IACE,CAAC,IAAI,CAAC,eAAe;YACrB,MAAM,CAAC,KAAK,CAAC,6BAA6B,CAAC,mBAAoB,CAAC;YAChE,CAAC,yBAAyB,IAAI,MAAM,CAAC,cAAc,CAAC,8BAA8B,CAAC,EACnF;YACA,IAAI,CAAC,eAAe,GAAG,IAAI,MAAM,CAAC,cAAc,EAAE,CAAC,OAAO,CAAC;SAC5D;QACD,IAAI,CAAC,WAAW;YACd,MAAM,CAAC,UAAU;gBACjB,IAAI,MAAM,CAAC,eAAe,CACxB,aAAa,EACb,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,YAAY,EACjB,IAAI,CAAC,eAAe,CACrB,CAAC;QAEJ,IACE,MAAM,CAAC,6BAA6B;YACpC,MAAM,CAAC,gBAAgB,CAAC,MAAM,CAAC,6BAA6B,CAAC,EAC7D;YACA,MAAM,QAAQ,GAA0B,MAAM,CAAC,6BAA6B,CAAC;YAC7E,IAAI,QAAQ,CAAC,SAAS,EAAE,EAAE;gBACxB,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,SAAS,EAAE,CAAC;aACpC;SACF;IACH,CAAC;IAED;;OAEG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;OAGG;IACH,IAAW,QAAQ,CAAC,QAAgB;QAClC,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;QAC1B,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,GAAG,WAAW,CAAC;QAC9C,IAAI,IAAI,CAAC,MAAM;YACb,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,+CAA+C,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC;IACtF,CAAC;IAED,IAAW,MAAM;QACf,OAAO,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC;IAC3B,CAAC;IAED;;;;OAIG;IACH,IAAW,gCAAgC,CAAC,oCAA6C;QACvF,IAAI,CAAC,qCAAqC,GAAG,oCAAoC,CAAC;QAClF,IAAI,IAAI,CAAC,MAAM;YACb,IAAI,CAAC,MAAM,CAAC,IAAI,CACd,+CAA+C,IAAI,CAAC,qCAAqC,EAAE,CAC5F,CAAC;QACJ,IAAI,IAAI,CAAC,wBAAwB,KAAK,MAAM,CAAC,MAAM,CAAC,aAAa,EAAE;YACjE,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,CAAC,wBAAwB,CAC7D,2BAA2B,CAAC,uBAAuB,EACnD,IAAI,CAAC,OAAO,EACZ,2BAA2B,CAAC,mBAAmB,CAChD,CAAC;SACH;aAAM,IAAI,IAAI,CAAC,wBAAwB,KAAK,MAAM,CAAC,MAAM,CAAC,gBAAgB,EAAE;YAC3E,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,CAAC,0BAA0B,CAC/D,2BAA2B,CAAC,uBAAuB,EACnD,IAAI,CAAC,SAAS,EACd,2BAA2B,CAAC,mBAAmB,CAChD,CAAC;SACH;IACH,CAAC;IAED;;;;OAIG;IACH,IAAW,MAAM,CAAC,MAAqB;QACrC,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;QACtB,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,CAAC,wBAAwB,CAC7D,2BAA2B,CAAC,uBAAuB,EACnD,MAAM,EACN,2BAA2B,CAAC,mBAAmB,CAChD,CAAC;QACF,IAAI,CAAC,wBAAwB,GAAG,MAAM,CAAC,MAAM,CAAC,aAAa,CAAC;IAC9D,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,QAAQ,CAAC,QAAgB;QAClC,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;QAC1B,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,CAAC,0BAA0B,CAC/D,2BAA2B,CAAC,uBAAuB,EACnD,QAAQ,EACR,2BAA2B,CAAC,mBAAmB,CAChD,CAAC;QACF,IAAI,CAAC,wBAAwB,GAAG,MAAM,CAAC,MAAM,CAAC,gBAAgB,CAAC;IACjE,CAAC;IAED;;OAEG;IACI,sBAAsB;QAC3B,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,IAAI,CAAC,eAAe,CAAC,QAAQ,EAAE,CAAC;SACjC;IACH,CAAC;IAED;;;;;;;;;;SAUK;IACQ,SAAS,CACpB,gBAA2C;;YAE3C,IAAI,IAAI,CAAC,MAAM;gBAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,0DAA0D,CAAC,CAAC;YAC/F,MAAM,aAAa,GAAG,WAAW,CAAC;YAClC,MAAM,gBAAgB,GAAG,EAAE,CAAC;YAC5B,MAAM,UAAU,GAAG,EAAE,CAAC;YAEtB,MAAM,WAAW,GAAG,EAAE,CAAC;YAEvB,IAAI,YAAY,GAAG;gBACjB,cAAc,EAAE,MAAM,CAAC,SAAS,CAAC,gBAAgB;gBACjD,iBAAiB,EAAE,gBAAgB,CAAC,aAAa;gBACjD,gBAAgB,EAAE,gBAAgB,CAAC,YAAY;aAChD,CAAC;YAEF,MAAM,sBAAsB,GAAG,MAAM,CAAC,+BAA+B,CAAC;YACtE,MAAM,OAAO,GAAG,2BAAc,CAAC,sBAAsB,CACnD,IAAI,CAAC,oBAAoB,CAAC,CAAC,CAAC,IAAI,CAAC,oBAAoB,CAAC,kBAAkB,CAAC,CAAC,CAAC,SAAS,EACpF,gBAAgB,CAAC,kBAAkB,EACnC,sBAAsB,CACvB,CAAC;YACF,IAAI,IAAI,CAAC,MAAM;gBAAE,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;YAC9C,MAAM,OAAO,GAAG,MAAM,2BAAc,CAAC;gBACnC,YAAY,EAAE,IAAI,CAAC,SAAS;gBAC5B,cAAc,EAAE,IAAI,CAAC,eAAe;gBACpC,IAAI,EAAE,oBAAoB;gBAC1B,MAAM,EAAE,MAAM;gBACd,WAAW,EAAE,MAAM,CAAC,gBAAgB,CAAC,SAAS,CAC5C,gBAAgB,CAAC,gBAAgB,EACjC,kBAAkB,EAClB,KAAK,CAAC,gBAAgB,CAAC,UAAU,CAClC;gBACD,UAAU,EAAE,UAAU;gBACtB,YAAY,EAAE,YAAY;gBAC1B,WAAW,EAAE,WAAW;aACzB,CAAC,CAAC;YACH,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,OAAO,CAAC,eAAe,CAC5C,IAAI,CAAC,WAAW,EAChB,OAAO,EACP,IAAI,CAAC,aAAa,EAClB,aAAa,EACb,gBAAgB,CACjB,CAAC;gBACF,MAAM,WAAW,GAAG,4BAAe,CAAC;oBAClC,cAAc,EAA+B,EAAE;oBAC/C,IAAI,EAAE,MAAM,QAAQ,CAAC,IAAI,EAAE;oBAC3B,OAAO,EAAE,iBAAiB;oBAC1B,SAAS,EAAE,KAAK,CAAC,eAAe;oBAChC,IAAI,EAAE,uBAAuB;oBAC7B,eAAe,EAAE;wBACf;4BACE,KAAK,EAAE,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC;4BACnC,GAAG,EAAE,MAAM;4BACX,QAAQ,EAAE,QAAQ;yBACnB;wBACD;4BACE,KAAK,EAAE,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC;4BAC7C,GAAG,EAAE,cAAc;4BACnB,QAAQ,EAAE,QAAQ;yBACnB;qBACF;iBACF,CAAC,CAAC;gBAEH,OAAO,WAAW,CAAC;aACpB;YAAC,OAAO,GAAG,EAAE;gBACZ,MAAM,GAAG,CAAC;aACX;QACH,CAAC;KAAA;IAED;;;;;;;;OAQG;IACU,YAAY,CACvB,mBAAiD;;YAEjD,IAAI,IAAI,CAAC,MAAM;gBACb,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,6DAA6D,CAAC,CAAC;YACnF,MAAM,aAAa,GAAG,cAAc,CAAC;YACrC,MAAM,gBAAgB,GAAG,EAAE,CAAC;YAC5B,MAAM,UAAU,GAAG,EAAE,CAAC;YAEtB,MAAM,WAAW,GAAG,EAAE,CAAC;YAEvB,IAAI,YAAY,GAAG;gBACjB,cAAc,EAAE,MAAM,CAAC,SAAS,CAAC,gBAAgB;gBACjD,iBAAiB,EAAE,mBAAmB,CAAC,aAAa;gBACpD,gBAAgB,EAAE,mBAAmB,CAAC,YAAY;aACnD,CAAC;YAEF,MAAM,sBAAsB,GAAG,MAAM,CAAC,+BAA+B,CAAC;YACtE,MAAM,OAAO,GAAG,2BAAc,CAAC,sBAAsB,CACnD,IAAI,CAAC,oBAAoB,CAAC,CAAC,CAAC,IAAI,CAAC,oBAAoB,CAAC,kBAAkB,CAAC,CAAC,CAAC,SAAS,EACpF,mBAAmB,CAAC,kBAAkB,EACtC,sBAAsB,CACvB,CAAC;YACF,IAAI,IAAI,CAAC,MAAM;gBAAE,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;YAC9C,MAAM,OAAO,GAAG,MAAM,2BAAc,CAAC;gBACnC,YAAY,EAAE,IAAI,CAAC,SAAS;gBAC5B,cAAc,EAAE,IAAI,CAAC,eAAe;gBACpC,IAAI,EAAE,uBAAuB;gBAC7B,MAAM,EAAE,MAAM;gBACd,WAAW,EAAE,MAAM,CAAC,gBAAgB,CAAC,SAAS,CAC5C,mBAAmB,CAAC,mBAAmB,EACvC,qBAAqB,EACrB,KAAK,CAAC,mBAAmB,CAAC,UAAU,CACrC;gBACD,UAAU,EAAE,UAAU;gBACtB,YAAY,EAAE,YAAY;gBAC1B,WAAW,EAAE,WAAW;aACzB,CAAC,CAAC;YACH,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,OAAO,CAAC,eAAe,CAC5C,IAAI,CAAC,WAAW,EAChB,OAAO,EACP,IAAI,CAAC,aAAa,EAClB,aAAa,EACb,gBAAgB,CACjB,CAAC;gBACF,MAAM,WAAW,GAAG,4BAAe,CAAC;oBAClC,cAAc,EAAkC,EAAE;oBAClD,IAAI,EAAE,MAAM,QAAQ,CAAC,IAAI,EAAE;oBAC3B,OAAO,EAAE,oBAAoB;oBAC7B,SAAS,EAAE,KAAK,CAAC,kBAAkB;oBACnC,IAAI,EAAE,0BAA0B;oBAChC,eAAe,EAAE;wBACf;4BACE,KAAK,EAAE,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC;4BACnC,GAAG,EAAE,MAAM;4BACX,QAAQ,EAAE,QAAQ;yBACnB;wBACD;4BACE,KAAK,EAAE,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC;4BAC7C,GAAG,EAAE,cAAc;4BACnB,QAAQ,EAAE,QAAQ;yBACnB;qBACF;iBACF,CAAC,CAAC;gBAEH,OAAO,WAAW,CAAC;aACpB;YAAC,OAAO,GAAG,EAAE;gBACZ,MAAM,GAAG,CAAC;aACX;QACH,CAAC;KAAA;IAED;;;;;;;;OAQG;IACU,aAAa,CACxB,oBAAmD;;YAEnD,IAAI,IAAI,CAAC,MAAM;gBACb,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,8DAA8D,CAAC,CAAC;YACpF,MAAM,aAAa,GAAG,eAAe,CAAC;YACtC,MAAM,gBAAgB,GAAG,EAAE,CAAC;YAC5B,MAAM,UAAU,GAAG,EAAE,CAAC;YAEtB,MAAM,WAAW,GAAG,EAAE,CAAC;YAEvB,IAAI,YAAY,GAAG;gBACjB,cAAc,EAAE,MAAM,CAAC,SAAS,CAAC,gBAAgB;gBACjD,iBAAiB,EAAE,oBAAoB,CAAC,aAAa;gBACrD,gBAAgB,EAAE,oBAAoB,CAAC,YAAY;aACpD,CAAC;YAEF,MAAM,sBAAsB,GAAG,MAAM,CAAC,+BAA+B,CAAC;YACtE,MAAM,OAAO,GAAG,2BAAc,CAAC,sBAAsB,CACnD,IAAI,CAAC,oBAAoB,CAAC,CAAC,CAAC,IAAI,CAAC,oBAAoB,CAAC,kBAAkB,CAAC,CAAC,CAAC,SAAS,EACpF,oBAAoB,CAAC,kBAAkB,EACvC,sBAAsB,CACvB,CAAC;YACF,IAAI,IAAI,CAAC,MAAM;gBAAE,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;YAC9C,MAAM,OAAO,GAAG,MAAM,2BAAc,CAAC;gBACnC,YAAY,EAAE,IAAI,CAAC,SAAS;gBAC5B,cAAc,EAAE,IAAI,CAAC,eAAe;gBACpC,IAAI,EAAE,wBAAwB;gBAC9B,MAAM,EAAE,MAAM;gBACd,WAAW,EAAE,MAAM,CAAC,gBAAgB,CAAC,SAAS,CAC5C,oBAAoB,CAAC,oBAAoB,EACzC,sBAAsB,EACtB,KAAK,CAAC,oBAAoB,CAAC,UAAU,CACtC;gBACD,UAAU,EAAE,UAAU;gBACtB,YAAY,EAAE,YAAY;gBAC1B,WAAW,EAAE,WAAW;aACzB,CAAC,CAAC;YACH,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,OAAO,CAAC,eAAe,CAC5C,IAAI,CAAC,WAAW,EAChB,OAAO,EACP,IAAI,CAAC,aAAa,EAClB,aAAa,EACb,gBAAgB,CACjB,CAAC;gBACF,MAAM,WAAW,GAAG,4BAAe,CAAC;oBAClC,cAAc,EAAmC,EAAE;oBACnD,IAAI,EAAE,MAAM,QAAQ,CAAC,IAAI,EAAE;oBAC3B,OAAO,EAAE,qBAAqB;oBAC9B,SAAS,EAAE,KAAK,CAAC,mBAAmB;oBACpC,IAAI,EAAE,2BAA2B;oBACjC,eAAe,EAAE;wBACf;4BACE,KAAK,EAAE,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC;4BACnC,GAAG,EAAE,MAAM;4BACX,QAAQ,EAAE,QAAQ;yBACnB;wBACD;4BACE,KAAK,EAAE,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC;4BAC7C,GAAG,EAAE,cAAc;4BACnB,QAAQ,EAAE,QAAQ;yBACnB;qBACF;iBACF,CAAC,CAAC;gBAEH,OAAO,WAAW,CAAC;aACpB;YAAC,OAAO,GAAG,EAAE;gBACZ,MAAM,GAAG,CAAC;aACX;QACH,CAAC;KAAA;;AAlZH,kEAmZC;AAlZkB,mDAAuB,GACtC,iEAAiE,CAAC;AACnD,+CAAmB,GAAG,EAAE,CAAC"}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Generative AI Service Inference API
|
|
3
|
+
* OCI Generative AI is a fully managed service that provides a set of state-of-the-art, customizable large language models (LLMs) that cover a wide range of use cases for text generation, summarization, and text embeddings.
|
|
4
|
+
|
|
5
|
+
Use the Generative AI service inference API to access your custom model endpoints, or to try the out-of-the-box models to [generate text](#/en/generative-ai-inference/latest/GenerateTextResult/GenerateText), [summarize](#/en/generative-ai-inference/latest/SummarizeTextResult/SummarizeText), and [create text embeddings](#/en/generative-ai-inference/latest/EmbedTextResult/EmbedText).
|
|
6
|
+
|
|
7
|
+
To use a Generative AI custom model for inference, you must first create an endpoint for that model. Use the [Generative AI service management API](/#/en/generative-ai/latest/) to [create a custom model](#/en/generative-ai/latest/Model/) by fine-tuning an out-of-the-box model, or a previous version of a custom model, using your own data. Fine-tune the custom model on a [fine-tuning dedicated AI cluster](#/en/generative-ai/latest/DedicatedAiCluster/). Then, create a [hosting dedicated AI cluster](#/en/generative-ai/latest/DedicatedAiCluster/) with an [endpoint](#/en/generative-ai/latest/Endpoint/) to host your custom model. For resource management in the Generative AI service, use the [Generative AI service management API](/#/en/generative-ai/latest/).
|
|
8
|
+
|
|
9
|
+
To learn more about the service, see the [Generative AI documentation](/iaas/Content/generative-ai/home.htm).
|
|
10
|
+
|
|
11
|
+
* OpenAPI spec version: 20231130
|
|
12
|
+
*
|
|
13
|
+
*
|
|
14
|
+
* NOTE: This class is auto generated by OracleSDKGenerator.
|
|
15
|
+
* Do not edit the class manually.
|
|
16
|
+
*
|
|
17
|
+
* Copyright (c) 2020, 2024, Oracle and/or its affiliates. All rights reserved.
|
|
18
|
+
* This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
|
|
19
|
+
*/
|
|
20
|
+
import * as model from "../model";
|
|
21
|
+
/**
|
|
22
|
+
* Represents a single instance of generated text.
|
|
23
|
+
*/
|
|
24
|
+
export interface Choice {
|
|
25
|
+
/**
|
|
26
|
+
* The index of the generated text. Note: Numbers greater than Number.MAX_SAFE_INTEGER will result in rounding issues.
|
|
27
|
+
*/
|
|
28
|
+
"index": number;
|
|
29
|
+
/**
|
|
30
|
+
* The generated text.
|
|
31
|
+
*/
|
|
32
|
+
"text": string;
|
|
33
|
+
/**
|
|
34
|
+
* The reason why the model stopped generating tokens.
|
|
35
|
+
* <p>
|
|
36
|
+
Stops if the model hits a natural stop point or a provided stop sequence. Returns the length if the tokens reach the specified maximum number of tokens.
|
|
37
|
+
*
|
|
38
|
+
*/
|
|
39
|
+
"finishReason": string;
|
|
40
|
+
"logprobs"?: model.Logprobs;
|
|
41
|
+
}
|
|
42
|
+
export declare namespace Choice {
|
|
43
|
+
function getJsonObj(obj: Choice): object;
|
|
44
|
+
function getDeserializedJsonObj(obj: Choice): object;
|
|
45
|
+
}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Generative AI Service Inference API
|
|
4
|
+
* OCI Generative AI is a fully managed service that provides a set of state-of-the-art, customizable large language models (LLMs) that cover a wide range of use cases for text generation, summarization, and text embeddings.
|
|
5
|
+
|
|
6
|
+
Use the Generative AI service inference API to access your custom model endpoints, or to try the out-of-the-box models to [generate text](#/en/generative-ai-inference/latest/GenerateTextResult/GenerateText), [summarize](#/en/generative-ai-inference/latest/SummarizeTextResult/SummarizeText), and [create text embeddings](#/en/generative-ai-inference/latest/EmbedTextResult/EmbedText).
|
|
7
|
+
|
|
8
|
+
To use a Generative AI custom model for inference, you must first create an endpoint for that model. Use the [Generative AI service management API](/#/en/generative-ai/latest/) to [create a custom model](#/en/generative-ai/latest/Model/) by fine-tuning an out-of-the-box model, or a previous version of a custom model, using your own data. Fine-tune the custom model on a [fine-tuning dedicated AI cluster](#/en/generative-ai/latest/DedicatedAiCluster/). Then, create a [hosting dedicated AI cluster](#/en/generative-ai/latest/DedicatedAiCluster/) with an [endpoint](#/en/generative-ai/latest/Endpoint/) to host your custom model. For resource management in the Generative AI service, use the [Generative AI service management API](/#/en/generative-ai/latest/).
|
|
9
|
+
|
|
10
|
+
To learn more about the service, see the [Generative AI documentation](/iaas/Content/generative-ai/home.htm).
|
|
11
|
+
|
|
12
|
+
* OpenAPI spec version: 20231130
|
|
13
|
+
*
|
|
14
|
+
*
|
|
15
|
+
* NOTE: This class is auto generated by OracleSDKGenerator.
|
|
16
|
+
* Do not edit the class manually.
|
|
17
|
+
*
|
|
18
|
+
* Copyright (c) 2020, 2024, Oracle and/or its affiliates. All rights reserved.
|
|
19
|
+
* This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
|
|
20
|
+
*/
|
|
21
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
22
|
+
if (k2 === undefined) k2 = k;
|
|
23
|
+
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
24
|
+
}) : (function(o, m, k, k2) {
|
|
25
|
+
if (k2 === undefined) k2 = k;
|
|
26
|
+
o[k2] = m[k];
|
|
27
|
+
}));
|
|
28
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
29
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
30
|
+
}) : function(o, v) {
|
|
31
|
+
o["default"] = v;
|
|
32
|
+
});
|
|
33
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
|
34
|
+
if (mod && mod.__esModule) return mod;
|
|
35
|
+
var result = {};
|
|
36
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
37
|
+
__setModuleDefault(result, mod);
|
|
38
|
+
return result;
|
|
39
|
+
};
|
|
40
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
41
|
+
exports.Choice = void 0;
|
|
42
|
+
const model = __importStar(require("../model"));
|
|
43
|
+
var Choice;
|
|
44
|
+
(function (Choice) {
|
|
45
|
+
function getJsonObj(obj) {
|
|
46
|
+
const jsonObj = Object.assign(Object.assign({}, obj), {
|
|
47
|
+
"logprobs": obj.logprobs ? model.Logprobs.getJsonObj(obj.logprobs) : undefined
|
|
48
|
+
});
|
|
49
|
+
return jsonObj;
|
|
50
|
+
}
|
|
51
|
+
Choice.getJsonObj = getJsonObj;
|
|
52
|
+
function getDeserializedJsonObj(obj) {
|
|
53
|
+
const jsonObj = Object.assign(Object.assign({}, obj), {
|
|
54
|
+
"logprobs": obj.logprobs ? model.Logprobs.getDeserializedJsonObj(obj.logprobs) : undefined
|
|
55
|
+
});
|
|
56
|
+
return jsonObj;
|
|
57
|
+
}
|
|
58
|
+
Choice.getDeserializedJsonObj = getDeserializedJsonObj;
|
|
59
|
+
})(Choice = exports.Choice || (exports.Choice = {}));
|
|
60
|
+
//# sourceMappingURL=choice.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"choice.js","sourceRoot":"","sources":["../../../../../lib/generativeaiinference/lib/model/choice.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;;;;GAkBG;;;;;;;;;;;;;;;;;;;;;;AAEH,gDAAkC;AAyBlC,IAAiB,MAAM,CAqBtB;AArBD,WAAiB,MAAM;IACrB,SAAgB,UAAU,CAAC,GAAW;QACpC,MAAM,OAAO,mCACR,GAAG,GACH;YACD,UAAU,EAAE,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;SAC/E,CACF,CAAC;QAEF,OAAO,OAAO,CAAC;IACjB,CAAC;IATe,iBAAU,aASzB,CAAA;IACD,SAAgB,sBAAsB,CAAC,GAAW;QAChD,MAAM,OAAO,mCACR,GAAG,GACH;YACD,UAAU,EAAE,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,sBAAsB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;SAC3F,CACF,CAAC;QAEF,OAAO,OAAO,CAAC;IACjB,CAAC;IATe,6BAAsB,yBASrC,CAAA;AACH,CAAC,EArBgB,MAAM,GAAN,cAAM,KAAN,cAAM,QAqBtB"}
|