nuxt-chatgpt 0.1.10 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
|
|
15
15
|
## About the module
|
|
16
16
|
|
|
17
|
-
This user-friendly module boasts of an easy integration process that enables seamless implementation into any [Nuxt 3](https://nuxt.com) project. With type-safe integration, you can integrate [ChatGPT](https://openai.com/) into your [Nuxt 3](https://nuxt.com) project without breaking a <b>sweat</b>. Enjoy easy access to the `chat`, and `chatCompletion` methods through the `useChatgpt()` composable. Additionally, the module guarantees <b><i>security</i></b> as requests are routed through a [Nitro Server](https://nuxt.com/docs/guide/concepts/server-engine), thus preventing the exposure of your <b>API Key</b>.
|
|
17
|
+
This user-friendly module boasts of an easy integration process that enables seamless implementation into any [Nuxt 3](https://nuxt.com) project. With type-safe integration, you can integrate [ChatGPT](https://openai.com/) into your [Nuxt 3](https://nuxt.com) project without breaking a <b>sweat</b>. Enjoy easy access to the `chat`, and `chatCompletion` methods through the `useChatgpt()` composable. Additionally, the module guarantees <b><i>security</i></b> as requests are routed through a [Nitro Server](https://nuxt.com/docs/guide/concepts/server-engine), thus preventing the exposure of your <b>API Key</b>. The module use [openai](https://github.com/openai/openai-node) library version 4.0.0 behind the scene.
|
|
18
18
|
|
|
19
19
|
## Features
|
|
20
20
|
|
package/dist/module.json
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
+
import OpenAI from "openai";
|
|
1
2
|
import { createError, defineEventHandler, readBody } from "h3";
|
|
2
|
-
import { Configuration, OpenAIApi } from "openai";
|
|
3
3
|
import { defaultOptions } from "../../constants/options.mjs";
|
|
4
4
|
import { MODEL_GPT_TURBO_3_5 } from "../../constants/models.mjs";
|
|
5
5
|
import { modelMap } from "../../utils/model-map.mjs";
|
|
@@ -12,18 +12,17 @@ export default defineEventHandler(async (event) => {
|
|
|
12
12
|
message: "Missing OpenAI API Key"
|
|
13
13
|
});
|
|
14
14
|
}
|
|
15
|
-
const
|
|
15
|
+
const openai = new OpenAI({
|
|
16
16
|
apiKey: useRuntimeConfig().chatgpt.apiKey
|
|
17
17
|
});
|
|
18
|
-
const openai = new OpenAIApi(configuration);
|
|
19
18
|
const requestOptions = {
|
|
20
19
|
messages: [{ role: "user", content: message }],
|
|
21
20
|
model: !model ? modelMap[MODEL_GPT_TURBO_3_5] : modelMap[model],
|
|
22
21
|
...options || defaultOptions
|
|
23
22
|
};
|
|
24
23
|
try {
|
|
25
|
-
const
|
|
26
|
-
return
|
|
24
|
+
const chatCompletion = await openai.chat.completions.create(requestOptions);
|
|
25
|
+
return chatCompletion.choices[0].message?.content;
|
|
27
26
|
} catch (error) {
|
|
28
27
|
throw createError({
|
|
29
28
|
statusCode: 500,
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
+
import OpenAI from "openai";
|
|
1
2
|
import { createError, defineEventHandler, readBody } from "h3";
|
|
2
|
-
import { Configuration, OpenAIApi } from "openai";
|
|
3
3
|
import { defaultOptions } from "../../constants/options.mjs";
|
|
4
4
|
import { modelMap } from "../../utils/model-map.mjs";
|
|
5
5
|
import { useRuntimeConfig } from "#imports";
|
|
@@ -11,18 +11,17 @@ export default defineEventHandler(async (event) => {
|
|
|
11
11
|
message: "Missing OpenAI API Key"
|
|
12
12
|
});
|
|
13
13
|
}
|
|
14
|
-
const
|
|
14
|
+
const openai = new OpenAI({
|
|
15
15
|
apiKey: useRuntimeConfig().chatgpt.apiKey
|
|
16
16
|
});
|
|
17
|
-
const openai = new OpenAIApi(configuration);
|
|
18
17
|
const requestOptions = {
|
|
19
18
|
prompt: message,
|
|
20
19
|
model: !model ? modelMap.default : modelMap[model],
|
|
21
20
|
...options || defaultOptions
|
|
22
21
|
};
|
|
23
22
|
try {
|
|
24
|
-
const
|
|
25
|
-
return
|
|
23
|
+
const completion = await openai.completions.create(requestOptions);
|
|
24
|
+
return completion.choices[0].text?.slice(2);
|
|
26
25
|
} catch (error) {
|
|
27
26
|
throw createError({
|
|
28
27
|
statusCode: 500,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "nuxt-chatgpt",
|
|
3
|
-
"version": "0.1
|
|
3
|
+
"version": "0.2.1",
|
|
4
4
|
"description": "ChatGPT integration for Nuxt 3",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"type": "module",
|
|
@@ -54,8 +54,8 @@
|
|
|
54
54
|
},
|
|
55
55
|
"dependencies": {
|
|
56
56
|
"@nuxt/kit": "^3.1.1",
|
|
57
|
-
"
|
|
58
|
-
"
|
|
57
|
+
"defu": "^6.1.2",
|
|
58
|
+
"openai": "^4.0.0"
|
|
59
59
|
},
|
|
60
60
|
"devDependencies": {
|
|
61
61
|
"@nuxt/eslint-config": "^0.1.1",
|