objectiveai 1.1.10 → 1.1.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +22 -20
- package/dist/index.d.ts +10 -10
- package/dist/index.js +21 -19
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.MultichatModel = exports.ScoreModel = exports.MultichatLlm = exports.ScoreLlm = exports.Metadata = exports.Auth = exports.Models = exports.
|
|
3
|
+
exports.MultichatModel = exports.ScoreModel = exports.MultichatLlm = exports.ScoreLlm = exports.Metadata = exports.Auth = exports.Models = exports.Functions = exports.Conversation = exports.Multichat = exports.Score = exports.Chat = void 0;
|
|
4
4
|
var Chat;
|
|
5
5
|
(function (Chat) {
|
|
6
6
|
let Completions;
|
|
@@ -663,8 +663,8 @@ var Conversation;
|
|
|
663
663
|
Completions.list = list;
|
|
664
664
|
})(Completions = Conversation.Completions || (Conversation.Completions = {}));
|
|
665
665
|
})(Conversation || (exports.Conversation = Conversation = {}));
|
|
666
|
-
var
|
|
667
|
-
(function (
|
|
666
|
+
var Functions;
|
|
667
|
+
(function (Functions) {
|
|
668
668
|
let Response;
|
|
669
669
|
(function (Response) {
|
|
670
670
|
let Streaming;
|
|
@@ -731,14 +731,16 @@ var Apps;
|
|
|
731
731
|
const [output, outputChanged] = merge(a.output, b.output);
|
|
732
732
|
const [retry_token, retry_tokenChanged] = merge(a.retry_token, b.retry_token);
|
|
733
733
|
const [error, errorChanged] = merge(a.error, b.error);
|
|
734
|
-
const [
|
|
734
|
+
const [function_published, function_publishedChanged] = merge(a.function_published, b.function_published);
|
|
735
735
|
if (completionsChanged ||
|
|
736
736
|
outputChanged ||
|
|
737
737
|
retry_tokenChanged ||
|
|
738
738
|
errorChanged ||
|
|
739
|
-
|
|
739
|
+
function_publishedChanged) {
|
|
740
740
|
return [
|
|
741
|
-
Object.assign(Object.assign(Object.assign(Object.assign({ completions }, (output !== undefined ? { output } : {})), (retry_token !== undefined ? { retry_token } : {})), (error !== undefined ? { error } : {})), (
|
|
741
|
+
Object.assign(Object.assign(Object.assign(Object.assign({ completions }, (output !== undefined ? { output } : {})), (retry_token !== undefined ? { retry_token } : {})), (error !== undefined ? { error } : {})), (function_published !== undefined
|
|
742
|
+
? { function_published }
|
|
743
|
+
: {})),
|
|
742
744
|
true,
|
|
743
745
|
];
|
|
744
746
|
}
|
|
@@ -748,41 +750,41 @@ var Apps;
|
|
|
748
750
|
}
|
|
749
751
|
Streaming.merged = merged;
|
|
750
752
|
})(Streaming = Response.Streaming || (Response.Streaming = {}));
|
|
751
|
-
})(Response =
|
|
753
|
+
})(Response = Functions.Response || (Functions.Response = {}));
|
|
752
754
|
async function list(openai, listOptions, options) {
|
|
753
|
-
const response = await openai.get("/
|
|
755
|
+
const response = await openai.get("/functions", Object.assign({ query: listOptions }, options));
|
|
754
756
|
return response;
|
|
755
757
|
}
|
|
756
|
-
|
|
758
|
+
Functions.list = list;
|
|
757
759
|
async function count(openai, options) {
|
|
758
|
-
const response = await openai.get("/
|
|
760
|
+
const response = await openai.get("/functions/count", options);
|
|
759
761
|
return response;
|
|
760
762
|
}
|
|
761
|
-
|
|
763
|
+
Functions.count = count;
|
|
762
764
|
async function retrieve(openai, author, id, version, retrieveOptions, options) {
|
|
763
765
|
const url = version !== null && version !== undefined
|
|
764
|
-
? `/
|
|
765
|
-
: `/
|
|
766
|
+
? `/functions/${author}/${id}/${version}`
|
|
767
|
+
: `/functions/${author}/${id}`;
|
|
766
768
|
const response = await openai.get(url, Object.assign({ query: retrieveOptions }, options));
|
|
767
769
|
return response;
|
|
768
770
|
}
|
|
769
|
-
|
|
771
|
+
Functions.retrieve = retrieve;
|
|
770
772
|
async function executeById(openai, author, id, version, body, options) {
|
|
771
773
|
var _a;
|
|
772
774
|
const url = version !== null && version !== undefined
|
|
773
|
-
? `/
|
|
774
|
-
: `/
|
|
775
|
+
? `/functions/${author}/${id}/${version}`
|
|
776
|
+
: `/functions/${author}/${id}`;
|
|
775
777
|
const response = await openai.post(url, Object.assign({ body, stream: (_a = body.stream) !== null && _a !== void 0 ? _a : false }, options));
|
|
776
778
|
return response;
|
|
777
779
|
}
|
|
778
|
-
|
|
780
|
+
Functions.executeById = executeById;
|
|
779
781
|
async function executeByDefinition(openai, body, options) {
|
|
780
782
|
var _a;
|
|
781
|
-
const response = await openai.post("/
|
|
783
|
+
const response = await openai.post("/functions", Object.assign({ body, stream: (_a = body.stream) !== null && _a !== void 0 ? _a : false }, options));
|
|
782
784
|
return response;
|
|
783
785
|
}
|
|
784
|
-
|
|
785
|
-
})(
|
|
786
|
+
Functions.executeByDefinition = executeByDefinition;
|
|
787
|
+
})(Functions || (exports.Functions = Functions = {}));
|
|
786
788
|
var Models;
|
|
787
789
|
(function (Models) {
|
|
788
790
|
async function list(openai, listOptions, options) {
|
package/dist/index.d.ts
CHANGED
|
@@ -739,7 +739,7 @@ export declare namespace Conversation {
|
|
|
739
739
|
}>;
|
|
740
740
|
}
|
|
741
741
|
}
|
|
742
|
-
export declare namespace
|
|
742
|
+
export declare namespace Functions {
|
|
743
743
|
interface Expression {
|
|
744
744
|
$jmespath: string;
|
|
745
745
|
}
|
|
@@ -749,7 +749,7 @@ export declare namespace Apps {
|
|
|
749
749
|
interface Array {
|
|
750
750
|
type: "array";
|
|
751
751
|
description?: string | null;
|
|
752
|
-
|
|
752
|
+
minItems?: number | null;
|
|
753
753
|
items: Array.ItemsProperty | Record<string, Array.ItemsProperty>;
|
|
754
754
|
}
|
|
755
755
|
namespace Array {
|
|
@@ -949,7 +949,7 @@ export declare namespace Apps {
|
|
|
949
949
|
output?: JsonValue;
|
|
950
950
|
retry_token?: string;
|
|
951
951
|
error?: ObjectiveAIError;
|
|
952
|
-
|
|
952
|
+
function_published?: boolean;
|
|
953
953
|
}
|
|
954
954
|
type CompletionChunk = CompletionChunk.ScoreCompletionChunk | CompletionChunk.MultichatCompletionChunk;
|
|
955
955
|
namespace CompletionChunk {
|
|
@@ -977,7 +977,7 @@ export declare namespace Apps {
|
|
|
977
977
|
output: JsonValue;
|
|
978
978
|
retry_token: string | null;
|
|
979
979
|
error: ObjectiveAIError | null;
|
|
980
|
-
|
|
980
|
+
function_published?: boolean;
|
|
981
981
|
}
|
|
982
982
|
type Completion = Completion.Score | Completion.Multichat;
|
|
983
983
|
namespace Completion {
|
|
@@ -992,19 +992,19 @@ export declare namespace Apps {
|
|
|
992
992
|
}
|
|
993
993
|
}
|
|
994
994
|
}
|
|
995
|
-
interface
|
|
995
|
+
interface FunctionHeader {
|
|
996
996
|
author: string;
|
|
997
997
|
id: string;
|
|
998
998
|
version: number;
|
|
999
999
|
description: string;
|
|
1000
1000
|
}
|
|
1001
|
-
interface
|
|
1001
|
+
interface Function extends FunctionHeader {
|
|
1002
1002
|
input_schema: Record<string, InputProperty>;
|
|
1003
1003
|
output_schema: OutputProperty;
|
|
1004
1004
|
steps: WithExpression<WithExpression<Step>[]>;
|
|
1005
1005
|
output: Expression | Record<string, Expression>;
|
|
1006
1006
|
}
|
|
1007
|
-
interface
|
|
1007
|
+
interface FunctionWithMetadata extends Function {
|
|
1008
1008
|
created: string;
|
|
1009
1009
|
requests: number;
|
|
1010
1010
|
completion_tokens: number;
|
|
@@ -1012,13 +1012,13 @@ export declare namespace Apps {
|
|
|
1012
1012
|
total_cost: number;
|
|
1013
1013
|
}
|
|
1014
1014
|
function list(openai: OpenAI, listOptions?: Models.ListOptions, options?: OpenAI.RequestOptions): Promise<{
|
|
1015
|
-
data:
|
|
1015
|
+
data: FunctionHeader[];
|
|
1016
1016
|
}>;
|
|
1017
1017
|
function count(openai: OpenAI, options?: OpenAI.RequestOptions): Promise<{
|
|
1018
1018
|
data: number;
|
|
1019
1019
|
}>;
|
|
1020
|
-
function retrieve(openai: OpenAI, author: string, id: string, version?: number | null, retrieveOptions?: Models.RetrieveOptionsWithoutMetadata, options?: OpenAI.RequestOptions): Promise<
|
|
1021
|
-
function retrieve(openai: OpenAI, author: string, id: string, version?: number | null, retrieveOptions?: Models.RetrieveOptionsWithMetadata, options?: OpenAI.RequestOptions): Promise<
|
|
1020
|
+
function retrieve(openai: OpenAI, author: string, id: string, version?: number | null, retrieveOptions?: Models.RetrieveOptionsWithoutMetadata, options?: OpenAI.RequestOptions): Promise<Function>;
|
|
1021
|
+
function retrieve(openai: OpenAI, author: string, id: string, version?: number | null, retrieveOptions?: Models.RetrieveOptionsWithMetadata, options?: OpenAI.RequestOptions): Promise<FunctionWithMetadata>;
|
|
1022
1022
|
function executeById(openai: OpenAI, author: string, id: string, version: number | null | undefined, body: Request.ExecuteByIdRequestStreaming, options?: OpenAI.RequestOptions): Promise<Stream<Response.Streaming.ExecuteResponseChunk>>;
|
|
1023
1023
|
function executeById(openai: OpenAI, author: string, id: string, version: number | null | undefined, body: Request.ExecuteByIdRequestNonStreaming, options?: OpenAI.RequestOptions): Promise<Response.Unary.ExecuteResponse>;
|
|
1024
1024
|
function executeByDefinition(openai: OpenAI, body: Request.ExecuteByDefinitionRequestStreaming, options?: OpenAI.RequestOptions): Promise<Stream<Response.Streaming.ExecuteResponseChunk>>;
|
package/dist/index.js
CHANGED
|
@@ -660,8 +660,8 @@ export var Conversation;
|
|
|
660
660
|
Completions.list = list;
|
|
661
661
|
})(Completions = Conversation.Completions || (Conversation.Completions = {}));
|
|
662
662
|
})(Conversation || (Conversation = {}));
|
|
663
|
-
export var
|
|
664
|
-
(function (
|
|
663
|
+
export var Functions;
|
|
664
|
+
(function (Functions) {
|
|
665
665
|
let Response;
|
|
666
666
|
(function (Response) {
|
|
667
667
|
let Streaming;
|
|
@@ -728,14 +728,16 @@ export var Apps;
|
|
|
728
728
|
const [output, outputChanged] = merge(a.output, b.output);
|
|
729
729
|
const [retry_token, retry_tokenChanged] = merge(a.retry_token, b.retry_token);
|
|
730
730
|
const [error, errorChanged] = merge(a.error, b.error);
|
|
731
|
-
const [
|
|
731
|
+
const [function_published, function_publishedChanged] = merge(a.function_published, b.function_published);
|
|
732
732
|
if (completionsChanged ||
|
|
733
733
|
outputChanged ||
|
|
734
734
|
retry_tokenChanged ||
|
|
735
735
|
errorChanged ||
|
|
736
|
-
|
|
736
|
+
function_publishedChanged) {
|
|
737
737
|
return [
|
|
738
|
-
Object.assign(Object.assign(Object.assign(Object.assign({ completions }, (output !== undefined ? { output } : {})), (retry_token !== undefined ? { retry_token } : {})), (error !== undefined ? { error } : {})), (
|
|
738
|
+
Object.assign(Object.assign(Object.assign(Object.assign({ completions }, (output !== undefined ? { output } : {})), (retry_token !== undefined ? { retry_token } : {})), (error !== undefined ? { error } : {})), (function_published !== undefined
|
|
739
|
+
? { function_published }
|
|
740
|
+
: {})),
|
|
739
741
|
true,
|
|
740
742
|
];
|
|
741
743
|
}
|
|
@@ -745,41 +747,41 @@ export var Apps;
|
|
|
745
747
|
}
|
|
746
748
|
Streaming.merged = merged;
|
|
747
749
|
})(Streaming = Response.Streaming || (Response.Streaming = {}));
|
|
748
|
-
})(Response =
|
|
750
|
+
})(Response = Functions.Response || (Functions.Response = {}));
|
|
749
751
|
async function list(openai, listOptions, options) {
|
|
750
|
-
const response = await openai.get("/
|
|
752
|
+
const response = await openai.get("/functions", Object.assign({ query: listOptions }, options));
|
|
751
753
|
return response;
|
|
752
754
|
}
|
|
753
|
-
|
|
755
|
+
Functions.list = list;
|
|
754
756
|
async function count(openai, options) {
|
|
755
|
-
const response = await openai.get("/
|
|
757
|
+
const response = await openai.get("/functions/count", options);
|
|
756
758
|
return response;
|
|
757
759
|
}
|
|
758
|
-
|
|
760
|
+
Functions.count = count;
|
|
759
761
|
async function retrieve(openai, author, id, version, retrieveOptions, options) {
|
|
760
762
|
const url = version !== null && version !== undefined
|
|
761
|
-
? `/
|
|
762
|
-
: `/
|
|
763
|
+
? `/functions/${author}/${id}/${version}`
|
|
764
|
+
: `/functions/${author}/${id}`;
|
|
763
765
|
const response = await openai.get(url, Object.assign({ query: retrieveOptions }, options));
|
|
764
766
|
return response;
|
|
765
767
|
}
|
|
766
|
-
|
|
768
|
+
Functions.retrieve = retrieve;
|
|
767
769
|
async function executeById(openai, author, id, version, body, options) {
|
|
768
770
|
var _a;
|
|
769
771
|
const url = version !== null && version !== undefined
|
|
770
|
-
? `/
|
|
771
|
-
: `/
|
|
772
|
+
? `/functions/${author}/${id}/${version}`
|
|
773
|
+
: `/functions/${author}/${id}`;
|
|
772
774
|
const response = await openai.post(url, Object.assign({ body, stream: (_a = body.stream) !== null && _a !== void 0 ? _a : false }, options));
|
|
773
775
|
return response;
|
|
774
776
|
}
|
|
775
|
-
|
|
777
|
+
Functions.executeById = executeById;
|
|
776
778
|
async function executeByDefinition(openai, body, options) {
|
|
777
779
|
var _a;
|
|
778
|
-
const response = await openai.post("/
|
|
780
|
+
const response = await openai.post("/functions", Object.assign({ body, stream: (_a = body.stream) !== null && _a !== void 0 ? _a : false }, options));
|
|
779
781
|
return response;
|
|
780
782
|
}
|
|
781
|
-
|
|
782
|
-
})(
|
|
783
|
+
Functions.executeByDefinition = executeByDefinition;
|
|
784
|
+
})(Functions || (Functions = {}));
|
|
783
785
|
export var Models;
|
|
784
786
|
(function (Models) {
|
|
785
787
|
async function list(openai, listOptions, options) {
|