nuxt-chatgpt 0.2.2 → 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +21 -11
- package/dist/module.json +1 -1
- package/dist/runtime/constants/models.d.ts +3 -0
- package/dist/runtime/constants/models.mjs +3 -0
- package/dist/runtime/server/api/chat-completion.mjs +2 -2
- package/dist/runtime/server/api/chat.mjs +2 -2
- package/dist/runtime/utils/model-map.mjs +6 -0
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -1,10 +1,15 @@
|
|
|
1
1
|
<!-- PROJECT LOGO -->
|
|
2
2
|
<br />
|
|
3
3
|
<div>
|
|
4
|
-
<
|
|
5
|
-
<
|
|
4
|
+
<div>
|
|
5
|
+
<h1>Nuxt Chatgpt <a href="https://nuxtchatgpt.com" target="_blank">🔥(VIEW DEMO)🔥</a></h3>
|
|
6
|
+
|
|
7
|
+
</div>
|
|
8
|
+
<div style="display:flex; width:100%; justify-content:center">
|
|
9
|
+
<img src="images/logo.png" alt="Logo">
|
|
10
|
+
</div>
|
|
6
11
|
|
|
7
|
-
> [ChatGPT](https://
|
|
12
|
+
> [ChatGPT](https://nuxtchatgpt.com) integration for [Nuxt 3](https://nuxt.com).
|
|
8
13
|
|
|
9
14
|
[![npm version][npm-version-src]][npm-version-href]
|
|
10
15
|
[![npm downloads][npm-downloads-src]][npm-downloads-href]
|
|
@@ -12,6 +17,10 @@
|
|
|
12
17
|
</div>
|
|
13
18
|
<br />
|
|
14
19
|
|
|
20
|
+
## About the project
|
|
21
|
+
|
|
22
|
+
[Nuxt ChatGPT](https://nuxtchatgpt.com) is a project built to showcase the capabilities of the Nuxt3 ChatGPT module. It functions as a ChatGPT clone with enhanced features, including the ability to organize and sort created documents into folders, offering an improved user experience for managing conversations and outputs.
|
|
23
|
+
|
|
15
24
|
## About the module
|
|
16
25
|
|
|
17
26
|
This user-friendly module boasts of an easy integration process that enables seamless implementation into any [Nuxt 3](https://nuxt.com) project. With type-safe integration, you can integrate [ChatGPT](https://openai.com/) into your [Nuxt 3](https://nuxt.com) project without breaking a <b>sweat</b>. Enjoy easy access to the `chat`, and `chatCompletion` methods through the `useChatgpt()` composable. Additionally, the module guarantees <b><i>security</i></b> as requests are routed through a [Nitro Server](https://nuxt.com/docs/guide/concepts/server-engine), thus preventing the exposure of your <b>API Key</b>. The module use [openai](https://github.com/openai/openai-node) library version 4.0.0 behind the scene.
|
|
@@ -62,7 +71,7 @@ To access the `chat`, and `chatCompletion` methods in the nuxt-chatgpt module, y
|
|
|
62
71
|
|--|--|--|--|
|
|
63
72
|
|**message**|`String`|available only for `chat()`|A string representing the text message that you want to send to the GPT model for processing.
|
|
64
73
|
|**messages**|`Array`|available only for `chatCompletion()`|An array of objects that contains `role` and `content`
|
|
65
|
-
|**model**|`String`|`
|
|
74
|
+
|**model**|`String`|`gpt-4o-mini` for `chat()` and `gpt-4o-mini` for `chatCompletion()`|Represent certain model for different types of natural language processing tasks.
|
|
66
75
|
|**options**|`Object`|`{ temperature: 0.5, max_tokens: 2048, top_p: 1 frequency_penalty: 0, presence_penalty: 0 }`|An optional object that specifies any additional options you want to pass to the API request, such as the number of responses to generate, and the maximum length of each response.
|
|
67
76
|
|
|
68
77
|
Available models:
|
|
@@ -73,6 +82,9 @@ Available models:
|
|
|
73
82
|
- gpt-3.5-turbo-0301
|
|
74
83
|
- gpt-3.5-turbo-1106
|
|
75
84
|
- gpt-4
|
|
85
|
+
- gpt-4o
|
|
86
|
+
- gpt-4o-mini
|
|
87
|
+
- gpt-4-turbo
|
|
76
88
|
- gpt-4-1106-preview
|
|
77
89
|
- gpt-4-0314
|
|
78
90
|
- gpt-4-0613
|
|
@@ -80,10 +92,8 @@ Available models:
|
|
|
80
92
|
- gpt-4-32k-0314
|
|
81
93
|
- gpt-4-32k-0613
|
|
82
94
|
|
|
83
|
-
You need to join waitlist to use gpt-4 models within `chatCompletion` method
|
|
84
|
-
|
|
85
95
|
### Simple `chat` usage
|
|
86
|
-
In the following example, the model is unspecified, and the
|
|
96
|
+
In the following example, the model is unspecified, and the gpt-4o-mini model will be used by default.
|
|
87
97
|
|
|
88
98
|
```js
|
|
89
99
|
const { chat } = useChatgpt()
|
|
@@ -125,7 +135,7 @@ const inputData = ref('')
|
|
|
125
135
|
|
|
126
136
|
async function sendMessage() {
|
|
127
137
|
try {
|
|
128
|
-
const response = await chat(inputData.value, 'gpt-
|
|
138
|
+
const response = await chat(inputData.value, 'gpt-4o-mini')
|
|
129
139
|
data.value = response
|
|
130
140
|
} catch(error) {
|
|
131
141
|
alert(`Join the waiting list if you want to use GPT-4 models: ${error}`)
|
|
@@ -148,7 +158,7 @@ async function sendMessage() {
|
|
|
148
158
|
```
|
|
149
159
|
|
|
150
160
|
### Simple `chatCompletion` usage
|
|
151
|
-
In the following example, the model is unspecified, and the gpt-
|
|
161
|
+
In the following example, the model is unspecified, and the gpt-4o-mini model will be used by default.
|
|
152
162
|
|
|
153
163
|
```js
|
|
154
164
|
const { chatCompletion } = useChatgpt()
|
|
@@ -218,7 +228,7 @@ async function sendMessage() {
|
|
|
218
228
|
|
|
219
229
|
chatTree.value.push(message)
|
|
220
230
|
|
|
221
|
-
const response = await chatCompletion(chatTree.value, 'gpt-
|
|
231
|
+
const response = await chatCompletion(chatTree.value, 'gpt-4o-mini')
|
|
222
232
|
|
|
223
233
|
const responseMessage = {
|
|
224
234
|
role: response[0].message.role,
|
|
@@ -293,7 +303,7 @@ Distributed under the MIT License. See `LICENSE.txt` for more information.
|
|
|
293
303
|
|
|
294
304
|
Oliver Trajceski - [LinkedIn](https://mk.linkedin.com/in/oliver-trajceski-8a28b070) - oliver@akrinum.com
|
|
295
305
|
|
|
296
|
-
Project Link: [https://
|
|
306
|
+
Project Link: [https://nuxtchatgpt.com](https://nuxtchatgpt.com)
|
|
297
307
|
|
|
298
308
|
## Development
|
|
299
309
|
|
package/dist/module.json
CHANGED
|
@@ -4,6 +4,9 @@ export declare const MODEL_GPT_TURBO_3_5: string;
|
|
|
4
4
|
export declare const MODEL_GPT_TURBO_3_5_0301: string;
|
|
5
5
|
export declare const MODEL_GPT_TURBO_3_5_1106 = "gpt-3.5-turbo-1106";
|
|
6
6
|
export declare const MODEL_GPT_4: string;
|
|
7
|
+
export declare const MODEL_GPT_4_O: string;
|
|
8
|
+
export declare const MODEL_GPT_4_MINI: string;
|
|
9
|
+
export declare const MODEL_GPT_4_TURBO: string;
|
|
7
10
|
export declare const MODEL_GPT_4_1106_PREVIEW = "gpt-4-1106-preview";
|
|
8
11
|
export declare const MODEL_GPT_4_0314: string;
|
|
9
12
|
export declare const MODEL_GPT_4_0613 = "gpt-4-0613";
|
|
@@ -4,6 +4,9 @@ export const MODEL_GPT_TURBO_3_5 = "gpt-3.5-turbo";
|
|
|
4
4
|
export const MODEL_GPT_TURBO_3_5_0301 = "gpt-3.5-turbo-0301";
|
|
5
5
|
export const MODEL_GPT_TURBO_3_5_1106 = "gpt-3.5-turbo-1106";
|
|
6
6
|
export const MODEL_GPT_4 = "gpt-4";
|
|
7
|
+
export const MODEL_GPT_4_O = "gpt-4o";
|
|
8
|
+
export const MODEL_GPT_4_MINI = "gpt-4o-mini";
|
|
9
|
+
export const MODEL_GPT_4_TURBO = "gpt-4-turbo";
|
|
7
10
|
export const MODEL_GPT_4_1106_PREVIEW = "gpt-4-1106-preview";
|
|
8
11
|
export const MODEL_GPT_4_0314 = "gpt-4-0314";
|
|
9
12
|
export const MODEL_GPT_4_0613 = "gpt-4-0613";
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import OpenAI from "openai";
|
|
2
2
|
import { createError, defineEventHandler, readBody } from "h3";
|
|
3
3
|
import { defaultOptions } from "../../constants/options.mjs";
|
|
4
|
-
import {
|
|
4
|
+
import { MODEL_GPT_4_MINI } from "../../constants/models.mjs";
|
|
5
5
|
import { modelMap } from "../../utils/model-map.mjs";
|
|
6
6
|
import { useRuntimeConfig } from "#imports";
|
|
7
7
|
export default defineEventHandler(async (event) => {
|
|
@@ -17,7 +17,7 @@ export default defineEventHandler(async (event) => {
|
|
|
17
17
|
});
|
|
18
18
|
const requestOptions = {
|
|
19
19
|
messages,
|
|
20
|
-
model: !model ? modelMap[
|
|
20
|
+
model: !model ? modelMap[MODEL_GPT_4_MINI] : modelMap[model],
|
|
21
21
|
...options || defaultOptions
|
|
22
22
|
};
|
|
23
23
|
try {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import OpenAI from "openai";
|
|
2
2
|
import { createError, defineEventHandler, readBody } from "h3";
|
|
3
3
|
import { defaultOptions } from "../../constants/options.mjs";
|
|
4
|
-
import {
|
|
4
|
+
import { MODEL_GPT_4_MINI } from "../../constants/models.mjs";
|
|
5
5
|
import { modelMap } from "../../utils/model-map.mjs";
|
|
6
6
|
import { useRuntimeConfig } from "#imports";
|
|
7
7
|
export default defineEventHandler(async (event) => {
|
|
@@ -17,7 +17,7 @@ export default defineEventHandler(async (event) => {
|
|
|
17
17
|
});
|
|
18
18
|
const requestOptions = {
|
|
19
19
|
messages: [{ role: "user", content: message }],
|
|
20
|
-
model: !model ? modelMap[
|
|
20
|
+
model: !model ? modelMap[MODEL_GPT_4_MINI] : modelMap[model],
|
|
21
21
|
...options || defaultOptions
|
|
22
22
|
};
|
|
23
23
|
try {
|
|
@@ -5,6 +5,9 @@ import {
|
|
|
5
5
|
MODEL_GPT_TURBO_3_5_0301,
|
|
6
6
|
MODEL_GPT_TURBO_3_5_1106,
|
|
7
7
|
MODEL_GPT_4,
|
|
8
|
+
MODEL_GPT_4_O,
|
|
9
|
+
MODEL_GPT_4_MINI,
|
|
10
|
+
MODEL_GPT_4_TURBO,
|
|
8
11
|
MODEL_GPT_4_1106_PREVIEW,
|
|
9
12
|
MODEL_GPT_4_0314,
|
|
10
13
|
MODEL_GPT_4_0613,
|
|
@@ -19,6 +22,9 @@ export const modelMap = {
|
|
|
19
22
|
[MODEL_GPT_TURBO_3_5_0301]: MODEL_GPT_TURBO_3_5_0301,
|
|
20
23
|
[MODEL_GPT_TURBO_3_5_1106]: MODEL_GPT_TURBO_3_5_1106,
|
|
21
24
|
[MODEL_GPT_4]: MODEL_GPT_4,
|
|
25
|
+
[MODEL_GPT_4_O]: MODEL_GPT_4_O,
|
|
26
|
+
[MODEL_GPT_4_MINI]: MODEL_GPT_4_MINI,
|
|
27
|
+
[MODEL_GPT_4_TURBO]: MODEL_GPT_4_TURBO,
|
|
22
28
|
[MODEL_GPT_4_1106_PREVIEW]: MODEL_GPT_4_1106_PREVIEW,
|
|
23
29
|
[MODEL_GPT_4_0314]: MODEL_GPT_4_0314,
|
|
24
30
|
[MODEL_GPT_4_0613]: MODEL_GPT_4_0613,
|
package/package.json
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "nuxt-chatgpt",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.4",
|
|
4
4
|
"description": "ChatGPT integration for Nuxt 3",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"type": "module",
|
|
7
|
-
"homepage": "https://vuemadness.com
|
|
7
|
+
"homepage": "https://vuemadness.com/nuxt-chatgpt",
|
|
8
8
|
"bugs": {
|
|
9
9
|
"url": "https://github.com/schnapsterdog/nuxt-chatgpt/issues"
|
|
10
10
|
},
|