ai-functions 0.0.1 → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +44 -1
- package/ai.js +105 -0
- package/example.js +12 -0
- package/index.js +0 -84
- package/index.test.js +35 -25
- package/package.json +8 -4
- package/schema.js +50 -0
package/README.md
CHANGED
|
@@ -8,7 +8,50 @@ Key Features:
|
|
|
8
8
|
```javascript
|
|
9
9
|
import { AI } from 'ai-functions'
|
|
10
10
|
|
|
11
|
-
const ai = AI({ apiKey: OPENAI_API_KEY })
|
|
11
|
+
const { ai, gpt, list } = AI({ apiKey: OPENAI_API_KEY })
|
|
12
|
+
```
|
|
12
13
|
|
|
14
|
+
Then you can use magic `ai` functions:
|
|
15
|
+
```javascript
|
|
16
|
+
|
|
17
|
+
const product = await ai.categorizeProduct({ domain: name }, {
|
|
18
|
+
productType: 'App | API | Marketplace | Platform | Packaged Service | Professional Service | Website',
|
|
19
|
+
customer: 'ideal customer profile in 3-5 words',
|
|
20
|
+
solution: 'describe the offer in 4-10 words',
|
|
21
|
+
description: 'website meta description',
|
|
22
|
+
})
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
you can also use `list` tagged template as a convienence function:
|
|
26
|
+
|
|
27
|
+
```javascript
|
|
28
|
+
const things = await list`fun things to do in Miami`
|
|
29
|
+
console.log(things)
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
or with Async iterators:
|
|
33
|
+
|
|
34
|
+
```javascript
|
|
35
|
+
for await (const thing of list`fun things to do in Miami`) {
|
|
36
|
+
console.log(thing)
|
|
37
|
+
}
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
Or in a more complex example:
|
|
41
|
+
|
|
42
|
+
```javascript
|
|
43
|
+
|
|
44
|
+
const listBlogPosts => (count, topic) => list`${count} blog post titles about ${topic}`
|
|
45
|
+
const writeBlogPost => title => gpt`write a blog post in markdown starting with "# ${title}"`
|
|
46
|
+
|
|
47
|
+
const writeBlog = async (count, topic) => {
|
|
48
|
+
for await (const title of listBlogPosts(count, topic)) {
|
|
49
|
+
const content = await writeBlogPost(title)
|
|
50
|
+
yield { title, content }
|
|
51
|
+
}
|
|
52
|
+
}
|
|
13
53
|
|
|
54
|
+
for await (const post of writeBlog(25, 'future of car sales')) {
|
|
55
|
+
console.log({ post })
|
|
56
|
+
}
|
|
14
57
|
```
|
package/ai.js
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
import { OpenAI } from 'openai'
|
|
2
|
+
import camelcaseKeys from 'camelcase-keys'
|
|
3
|
+
import { dump } from 'js-yaml'
|
|
4
|
+
import { schema } from './schema.js'
|
|
5
|
+
|
|
6
|
+
export const AI = opts => {
|
|
7
|
+
const { system, model = 'gpt-3.5-turbo', apiKey, OPENAI_API_KEY, ...rest } = opts || {}
|
|
8
|
+
|
|
9
|
+
const openai = new OpenAI({ apiKey: apiKey ?? OPENAI_API_KEY, ...rest })
|
|
10
|
+
|
|
11
|
+
const gpt = async (strings, ...values) => {
|
|
12
|
+
const user = values.map((value, i) => strings[i] + value).join('') + strings[strings.length - 1]
|
|
13
|
+
const prompt = {
|
|
14
|
+
model,
|
|
15
|
+
messages: [
|
|
16
|
+
{ role: 'user', content: user },
|
|
17
|
+
],
|
|
18
|
+
}
|
|
19
|
+
if (system) prompt.messages.unshift({ role: 'system', content: system })
|
|
20
|
+
const completion = await openai.chat.completions.create(prompt)
|
|
21
|
+
return completion.choices?.[0].message.content
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const ai = new Proxy({}, {
|
|
25
|
+
get: (target, functionName, receiver) => {
|
|
26
|
+
return async (args, returnSchema, options) => {
|
|
27
|
+
console.log(schema(returnSchema))
|
|
28
|
+
const { system, description, model = 'gpt-3.5-turbo', meta = false, ...rest } = options || {}
|
|
29
|
+
const prompt = {
|
|
30
|
+
model,
|
|
31
|
+
messages: [
|
|
32
|
+
{ role: 'user', content: `Call ${functionName} given the context:\n${dump(args)}` }, // \nThere is no additional information, so make assumptions/guesses as necessary` },
|
|
33
|
+
],
|
|
34
|
+
functions: [{
|
|
35
|
+
name: functionName,
|
|
36
|
+
description,
|
|
37
|
+
parameters: schema(returnSchema),
|
|
38
|
+
}],
|
|
39
|
+
...rest,
|
|
40
|
+
}
|
|
41
|
+
if (system) prompt.messages.unshift({ role: 'system', content: system })
|
|
42
|
+
const completion = await openai.chat.completions.create(prompt)
|
|
43
|
+
let data, error
|
|
44
|
+
const { message } = completion.choices?.[0]
|
|
45
|
+
prompt.messages.push(message)
|
|
46
|
+
const { content, function_call } = message
|
|
47
|
+
if (function_call) {
|
|
48
|
+
try {
|
|
49
|
+
data = JSON.parse(function_call.arguments)
|
|
50
|
+
} catch (err) {
|
|
51
|
+
error = err.message
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
const gpt4 = model.includes('gpt-4')
|
|
55
|
+
const cost = Math.round((gpt4
|
|
56
|
+
? completion.usage.prompt_tokens * 0.003 + completion.usage.completion_tokens * 0.006
|
|
57
|
+
: completion.usage.prompt_tokens * 0.00015 + completion.usage.completion_tokens * 0.0002) * 100000) / 100000
|
|
58
|
+
completion.usage = camelcaseKeys(completion.usage)
|
|
59
|
+
console.log({ data, content, error, cost })
|
|
60
|
+
return meta ? { prompt, content, data, error, cost, ...completion } : data ?? content
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
})
|
|
64
|
+
|
|
65
|
+
async function* list(strings, ...values) {
|
|
66
|
+
const listPrompt = values.map((value, i) => strings[i] + value).join('') + strings[strings.length - 1]
|
|
67
|
+
const prompt = {
|
|
68
|
+
model,
|
|
69
|
+
messages: [{ role: 'user', content: 'List ' + listPrompt }],
|
|
70
|
+
stream: true,
|
|
71
|
+
}
|
|
72
|
+
if (system) prompt.messages.unshift({ role: 'system', content: system })
|
|
73
|
+
const stream = await openai.chat.completions.create(prompt)
|
|
74
|
+
let content = ''
|
|
75
|
+
let seperator = undefined
|
|
76
|
+
let numberedList = undefined
|
|
77
|
+
|
|
78
|
+
for await (const part of stream) {
|
|
79
|
+
const { delta, finish_reason } = part.choices[0]
|
|
80
|
+
content += delta?.content || ''
|
|
81
|
+
if (seperator === undefined && content.length > 4) {
|
|
82
|
+
numberedList = content.match(/(\d+\.\s)/g)
|
|
83
|
+
seperator = numberedList ? '\n' : ', '
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const numberedRegex = /\d+\.\s(?:")?([^"]+)(?:")?/
|
|
87
|
+
|
|
88
|
+
if (content.includes(seperator)) {
|
|
89
|
+
// get the string before the newline, and modify `content` to be the string after the newline
|
|
90
|
+
// then yield the string before the newline
|
|
91
|
+
const items = content.split(seperator)
|
|
92
|
+
while (items.length > 1) {
|
|
93
|
+
const item = items.shift()
|
|
94
|
+
yield numberedList ? item.match(numberedRegex)?.[1] : item
|
|
95
|
+
}
|
|
96
|
+
content = items[0]
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if (finish_reason) yield numberedList ? content.match(numberedRegex)?.[1] : content
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
return { ai, list, gpt }
|
|
105
|
+
}
|
package/example.js
CHANGED
package/index.js
CHANGED
|
@@ -1,84 +0,0 @@
|
|
|
1
|
-
import { Configuration, OpenAIApi } from 'openai-edge'
|
|
2
|
-
|
|
3
|
-
const configuration = new Configuration({
|
|
4
|
-
apiKey: process.env.OPENAI_API_KEY,
|
|
5
|
-
basePath: 'https://aihooks.dev/v1',
|
|
6
|
-
baseOptions: {
|
|
7
|
-
headers: {
|
|
8
|
-
'webhook-app-id': process.env.AIHOOKS_APP_ID
|
|
9
|
-
}
|
|
10
|
-
}
|
|
11
|
-
})
|
|
12
|
-
const openai = new OpenAIApi(configuration)
|
|
13
|
-
|
|
14
|
-
// const startTime = Date.now()
|
|
15
|
-
// const response = await openai.createChatCompletion({
|
|
16
|
-
// // model: 'gpt-3.5-turbo-0613',
|
|
17
|
-
// model: 'gpt-4-0613',
|
|
18
|
-
// messages: [
|
|
19
|
-
// { role: 'system', content: 'You are an expert marketer.' },
|
|
20
|
-
// // { role: 'user', content: 'Write an ES6 function to convert base16 to base62' },
|
|
21
|
-
// // { role: 'user', content: 'Write an ES6 function to do Fizz Buzz' },
|
|
22
|
-
// // { role: 'user', content: 'List 2 possible blog post titles about APIs' },
|
|
23
|
-
// // { role: 'assistant', content: 'The Los Angeles Dodgers won the World Series in 2020.' },
|
|
24
|
-
// // { role: 'user', content: 'Where was it played?' },
|
|
25
|
-
// { role: 'user', content: 'Write a landing page for Driv.ly' },
|
|
26
|
-
// ],
|
|
27
|
-
// functions: [{
|
|
28
|
-
// name: 'writeLandingPage',
|
|
29
|
-
// // description: 'Write a landing page',
|
|
30
|
-
// parameters: {
|
|
31
|
-
// type: 'object',
|
|
32
|
-
// properties: {
|
|
33
|
-
// title: { type: 'string', description: 'The title of the landing page' },
|
|
34
|
-
// description: { type: 'string', description: 'The description of the landing page' },
|
|
35
|
-
// heroTitle: { type: 'string', description: 'The hero title of the landing page' },
|
|
36
|
-
// heroDescription: { type: 'string', description: 'The hero description of the landing page' },
|
|
37
|
-
// // features: { type: 'array', description: 'The features of the landing page' },
|
|
38
|
-
// featuresTitle: { type: 'string', description: 'The features title of the landing page' },
|
|
39
|
-
// featuresDescription: { type: 'string', description: 'The features description of the landing page' },
|
|
40
|
-
// // features: { type: 'array', description: 'The features of the landing page' },
|
|
41
|
-
// },
|
|
42
|
-
// required: ['title', 'description', 'heroTitle', 'heroDescription', 'featuresTitle', 'featuresDescription',]
|
|
43
|
-
// }
|
|
44
|
-
// }]
|
|
45
|
-
|
|
46
|
-
// // max_tokens: 7,
|
|
47
|
-
// // temperature: 0,
|
|
48
|
-
// // stream: true,
|
|
49
|
-
// // go: true,
|
|
50
|
-
// })
|
|
51
|
-
// const completion = await response.json()
|
|
52
|
-
// const requestTime = Date.now() - startTime
|
|
53
|
-
// // console.timeEnd('openai')
|
|
54
|
-
// const processingTime = parseInt(response.headers.get('openai-processing-ms'))
|
|
55
|
-
// const latency = requestTime - processingTime
|
|
56
|
-
// const status = response.status
|
|
57
|
-
|
|
58
|
-
// const functionName = completion?.choices?.[0]?.message?.function_call?.name
|
|
59
|
-
// const args = JSON.parse(completion?.choices?.[0]?.message?.function_call?.arguments)
|
|
60
|
-
|
|
61
|
-
// console.log({ requestTime, processingTime, latency })
|
|
62
|
-
// console.log(completion?.choices?.[0])
|
|
63
|
-
// console.log(completion?.error)
|
|
64
|
-
// console.log({ functionName, args })
|
|
65
|
-
|
|
66
|
-
export const runtime = {
|
|
67
|
-
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
export const AI = (functionName, callback) => {
|
|
71
|
-
runtime[functionName] = callback
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
export const getJsonSchema = propDescriptions => {
|
|
75
|
-
// assume an object like this: { name: 'The name of the person', age: 'The age of the person' }
|
|
76
|
-
// return an object like this: { type: 'object', properties: { name: { type: 'string', description: 'The name of the person' }, age: { type: 'number', description: 'The age of the person' } } required: ['name', 'age'] }
|
|
77
|
-
const properties = Object.entries(propDescriptions).reduce((acc, [key, value]) => {
|
|
78
|
-
acc[key] = { type: typeof value, description: value }
|
|
79
|
-
return acc
|
|
80
|
-
}
|
|
81
|
-
, {})
|
|
82
|
-
const required = Object.keys(properties)
|
|
83
|
-
return { type: 'object', properties, required }
|
|
84
|
-
}
|
package/index.test.js
CHANGED
|
@@ -1,10 +1,7 @@
|
|
|
1
|
-
// import assert from 'node:assert'
|
|
2
|
-
// import { describe, it } from 'node:test'
|
|
3
|
-
|
|
4
1
|
import { describe, test, it, expect } from 'vitest'
|
|
5
2
|
|
|
6
3
|
import { AI } from './index.js'
|
|
7
|
-
|
|
4
|
+
const { ai, list, gpt } = AI()
|
|
8
5
|
|
|
9
6
|
test('Math.sqrt()', () => {
|
|
10
7
|
expect(Math.sqrt(4)).toBe(2)
|
|
@@ -12,43 +9,56 @@ test('Math.sqrt()', () => {
|
|
|
12
9
|
expect(Math.sqrt(2)).toBe(Math.SQRT2)
|
|
13
10
|
})
|
|
14
11
|
|
|
12
|
+
test('getJsonSchema', () => {
|
|
15
13
|
|
|
16
|
-
|
|
14
|
+
const jsonSchema = schema({
|
|
15
|
+
name: 'The name of the person',
|
|
16
|
+
age: 'The age of the person'
|
|
17
|
+
})
|
|
17
18
|
|
|
18
|
-
|
|
19
|
+
expect(jsonSchema).toEqual({
|
|
20
|
+
type: 'object',
|
|
21
|
+
properties: {
|
|
22
|
+
name: { type: 'string', description: 'The name of the person' },
|
|
23
|
+
age: { type: 'string', description: 'The age of the person' } },
|
|
24
|
+
required: ['name', 'age']
|
|
25
|
+
})
|
|
19
26
|
|
|
20
|
-
|
|
21
|
-
name: 'The name of the person',
|
|
22
|
-
age: 'The age of the person'
|
|
23
|
-
})
|
|
27
|
+
})
|
|
24
28
|
|
|
25
|
-
|
|
26
|
-
type: 'object',
|
|
27
|
-
properties: {
|
|
28
|
-
name: { type: 'string', description: 'The name of the person' },
|
|
29
|
-
age: { type: 'string', description: 'The age of the person' } },
|
|
30
|
-
required: ['name', 'age']
|
|
31
|
-
})
|
|
32
|
-
|
|
33
|
-
})
|
|
29
|
+
test('list', () => {
|
|
34
30
|
|
|
35
|
-
|
|
36
|
-
|
|
31
|
+
})
|
|
32
|
+
|
|
33
|
+
test('ai', () => {
|
|
34
|
+
expect(ai.writeLandingPage({
|
|
35
|
+
brand: 'Auto.dev',
|
|
36
|
+
audience: 'developers',
|
|
37
|
+
offers: 'Automotive Data APIs',
|
|
38
|
+
})).toEqual({
|
|
39
|
+
functionName: 'writeLandingPage',
|
|
40
|
+
args: {
|
|
41
|
+
brand: 'Auto.dev',
|
|
42
|
+
audience: 'developers',
|
|
43
|
+
offers: 'Automotive Data APIs',
|
|
44
|
+
}
|
|
37
45
|
})
|
|
46
|
+
|
|
47
|
+
// AI('writeLandingPage', ({ title, description, heroTitle, heroDescription, featuresTitle, featuresDescription }) =>
|
|
38
48
|
})
|
|
39
49
|
|
|
40
|
-
|
|
50
|
+
test('A thing', () => {
|
|
41
51
|
it('should work', () => {
|
|
42
|
-
|
|
52
|
+
expect(3).toBe(3)
|
|
43
53
|
})
|
|
44
54
|
|
|
45
55
|
it('should be ok', () => {
|
|
46
|
-
|
|
56
|
+
expect(2).toBe(2)
|
|
47
57
|
})
|
|
48
58
|
|
|
49
59
|
describe('a nested thing', () => {
|
|
50
60
|
it('should work', () => {
|
|
51
|
-
|
|
61
|
+
expect(3).toBe(3)
|
|
52
62
|
})
|
|
53
63
|
})
|
|
54
64
|
})
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "ai-functions",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.2",
|
|
4
4
|
"description": "Library for Developing and Managing AI Functions (including OpenAI GPT4 / GPT3.5)",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"type": "module",
|
|
@@ -19,10 +19,14 @@
|
|
|
19
19
|
"url": "https://github.com/nathanclevenger/ai-functions/issues"
|
|
20
20
|
},
|
|
21
21
|
"homepage": "https://github.com/nathanclevenger/ai-functions#readme",
|
|
22
|
-
"dependencies": {
|
|
23
|
-
"openai-edge": "^1.1.0"
|
|
24
|
-
},
|
|
25
22
|
"devDependencies": {
|
|
26
23
|
"vitest": "^0.33.0"
|
|
24
|
+
},
|
|
25
|
+
"dependencies": {
|
|
26
|
+
"camelcase-keys": "^9.1.0",
|
|
27
|
+
"js-yaml": "^4.1.0",
|
|
28
|
+
"openai": "^4.11.1",
|
|
29
|
+
"partial-json-parser": "^1.2.2",
|
|
30
|
+
"yaml": "^2.3.2"
|
|
27
31
|
}
|
|
28
32
|
}
|
package/schema.js
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
export const schema = propDescriptions => {
|
|
2
|
+
// assume an object like this: { name: 'The name of the person', age: 'The age of the person' }
|
|
3
|
+
// return an object like this: { type: 'object', properties: { name: { type: 'string', description: 'The name of the person' }, age: { type: 'number', description: 'The age of the person' } } required: ['name', 'age'] }
|
|
4
|
+
if (Array.isArray(propDescriptions)) {
|
|
5
|
+
const [ itemValue ] = propDescriptions
|
|
6
|
+
const itemType = typeof itemValue
|
|
7
|
+
if (itemType == 'string') {
|
|
8
|
+
return { type: 'array', description: itemValue, items: { type: 'string' }}
|
|
9
|
+
} else if (itemType == 'object') {
|
|
10
|
+
const { _description, itemSchema } = itemValue
|
|
11
|
+
return { type: 'array', description: _description, items: schema(itemSchema)}
|
|
12
|
+
}
|
|
13
|
+
} else {
|
|
14
|
+
const properties = Object.entries(propDescriptions).reduce((acc, [key, value]) => {
|
|
15
|
+
const type = typeof value
|
|
16
|
+
if (Array.isArray(value)) {
|
|
17
|
+
const [ itemValue ] = value
|
|
18
|
+
const itemType = typeof itemValue
|
|
19
|
+
if (itemType == 'string') {
|
|
20
|
+
if (itemValue.includes('|')) {
|
|
21
|
+
acc[key] = { type: 'string', enum: itemValue.split('|').map(value => value.trim()) }
|
|
22
|
+
} else {
|
|
23
|
+
acc[key] = { type: 'array', description: itemValue, items: { type: 'string' }}
|
|
24
|
+
}
|
|
25
|
+
} else if (itemType == 'object') {
|
|
26
|
+
// const { _description, itemSchema } = itemValue
|
|
27
|
+
const description = itemValue._description ? `${itemValue._description}` : undefined
|
|
28
|
+
if (description) delete itemValue._description
|
|
29
|
+
acc[key] = { type: 'array', description, items: schema(itemValue)}
|
|
30
|
+
}
|
|
31
|
+
} else {
|
|
32
|
+
if (type == 'string') {
|
|
33
|
+
if (value.includes('|')) {
|
|
34
|
+
acc[key] = { type: 'string', enum: value.split('|').map(value => value.trim()) }
|
|
35
|
+
} else {
|
|
36
|
+
acc[key] = { type, description: value }
|
|
37
|
+
}
|
|
38
|
+
} else if (type == 'object') {
|
|
39
|
+
if (value._description) value._description = undefined
|
|
40
|
+
acc[key] = schema(value)
|
|
41
|
+
} else {
|
|
42
|
+
acc[key] = { type, description: value }
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
return acc
|
|
46
|
+
}, {})
|
|
47
|
+
const required = Object.keys(properties)
|
|
48
|
+
return { type: 'object', properties, required }
|
|
49
|
+
}
|
|
50
|
+
}
|