ai-functions 0.2.16 → 0.2.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/functions/ai.js +4 -4
- package/dist/mjs/functions/ai.js +2 -2
- package/functions/ai.ts +2 -2
- package/package.json +1 -1
package/dist/cjs/functions/ai.js
CHANGED
|
@@ -27,7 +27,7 @@ const js_yaml_1 = require("js-yaml");
|
|
|
27
27
|
const schema_1 = require("../utils/schema");
|
|
28
28
|
const AI = (config = {}) => {
|
|
29
29
|
var _a;
|
|
30
|
-
const { model = 'gpt-4-
|
|
30
|
+
const { model = 'gpt-4-turbo-preview', system } = config, rest = __rest(config, ["model", "system"]);
|
|
31
31
|
const openai = (_a = config.openai) !== null && _a !== void 0 ? _a : new openai_1.OpenAI(rest);
|
|
32
32
|
// const { client, db, cache, events, queue } = config.db ? AIDB(config.db) : {}
|
|
33
33
|
// const prompt = {
|
|
@@ -43,9 +43,9 @@ const AI = (config = {}) => {
|
|
|
43
43
|
const ai = new Proxy({}, {
|
|
44
44
|
get: (target, functionName, receiver) => {
|
|
45
45
|
target[functionName] = (returnSchema, options) => (args, callOptions) => __awaiter(void 0, void 0, void 0, function* () {
|
|
46
|
-
var _a;
|
|
46
|
+
var _a, _b;
|
|
47
47
|
console.log((0, schema_1.generateSchema)(returnSchema));
|
|
48
|
-
const
|
|
48
|
+
const _c = Object.assign(Object.assign({}, options), callOptions), { system, description, model = (_a = config.model) !== null && _a !== void 0 ? _a : 'gpt-4-turbo-preview', meta = false } = _c, rest = __rest(_c, ["system", "description", "model", "meta"]);
|
|
49
49
|
const prompt = Object.assign({ model, messages: [
|
|
50
50
|
{
|
|
51
51
|
role: 'user',
|
|
@@ -70,7 +70,7 @@ const AI = (config = {}) => {
|
|
|
70
70
|
const schema = (0, schema_1.generateSchema)(returnSchema);
|
|
71
71
|
let data;
|
|
72
72
|
let error;
|
|
73
|
-
const { message } = (
|
|
73
|
+
const { message } = (_b = completion.choices) === null || _b === void 0 ? void 0 : _b[0];
|
|
74
74
|
console.log({ message });
|
|
75
75
|
prompt.messages.push(message);
|
|
76
76
|
const { content, tool_calls } = message;
|
package/dist/mjs/functions/ai.js
CHANGED
|
@@ -3,7 +3,7 @@ import { OpenAI, } from 'openai';
|
|
|
3
3
|
import { dump } from 'js-yaml';
|
|
4
4
|
import { generateSchema } from '../utils/schema';
|
|
5
5
|
export const AI = (config = {}) => {
|
|
6
|
-
const { model = 'gpt-4-
|
|
6
|
+
const { model = 'gpt-4-turbo-preview', system, ...rest } = config;
|
|
7
7
|
const openai = config.openai ?? new OpenAI(rest);
|
|
8
8
|
// const { client, db, cache, events, queue } = config.db ? AIDB(config.db) : {}
|
|
9
9
|
// const prompt = {
|
|
@@ -20,7 +20,7 @@ export const AI = (config = {}) => {
|
|
|
20
20
|
get: (target, functionName, receiver) => {
|
|
21
21
|
target[functionName] = (returnSchema, options) => async (args, callOptions) => {
|
|
22
22
|
console.log(generateSchema(returnSchema));
|
|
23
|
-
const { system, description, model = 'gpt-
|
|
23
|
+
const { system, description, model = config.model ?? 'gpt-4-turbo-preview', meta = false, ...rest } = { ...options, ...callOptions };
|
|
24
24
|
const prompt = {
|
|
25
25
|
model,
|
|
26
26
|
messages: [
|
package/functions/ai.ts
CHANGED
|
@@ -28,7 +28,7 @@ type AIFunctions<T = Record<string,string>> = Record<string, (
|
|
|
28
28
|
>
|
|
29
29
|
|
|
30
30
|
export const AI = (config: AIConfig = {}) => {
|
|
31
|
-
const { model = 'gpt-4-
|
|
31
|
+
const { model = 'gpt-4-turbo-preview', system, ...rest } = config
|
|
32
32
|
const openai = config.openai ?? new OpenAI(rest)
|
|
33
33
|
// const { client, db, cache, events, queue } = config.db ? AIDB(config.db) : {}
|
|
34
34
|
// const prompt = {
|
|
@@ -48,7 +48,7 @@ export const AI = (config: AIConfig = {}) => {
|
|
|
48
48
|
get: (target, functionName: string, receiver) => {
|
|
49
49
|
target[functionName] = (returnSchema: Record<string,any>, options: FunctionCallOptions) => async (args: string | object, callOptions?: FunctionCallOptions) => {
|
|
50
50
|
console.log(generateSchema(returnSchema))
|
|
51
|
-
const { system, description, model = 'gpt-
|
|
51
|
+
const { system, description, model = config.model ?? 'gpt-4-turbo-preview', meta = false, ...rest } = { ...options, ...callOptions }
|
|
52
52
|
const prompt: ChatCompletionCreateParamsBase = {
|
|
53
53
|
model,
|
|
54
54
|
messages: [
|
package/package.json
CHANGED