nuxt-chatgpt 0.2.5 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -2,7 +2,8 @@
2
2
  <br />
3
3
  <div>
4
4
  <div>
5
- <h1>Nuxt Chatgpt + Image Generator<a href="https://nuxtchatgpt.com" target="_blank">🔥(VIEW DEMO)🔥</a></h3>
5
+ <h1>Hausly + Image Generator<a href="https://hausly.io" target="_blank">🔥(IMAGE DEMO)🔥</a></h2></h1>
6
+ <h2>Nuxt Chatgpt + Image Generator<a href="https://nuxtchatgpt.com" target="_blank">🔥(CHATGPT DEMO)🔥</a></h2>
6
7
 
7
8
  </div>
8
9
  <div style="display:flex; width:100%; justify-content:center">
@@ -33,6 +34,12 @@ This user-friendly module boasts of an easy integration process that enables sea
33
34
  - 🔥 &nbsp; Ensures security by routing requests through a [Nitro Server](https://nuxt.com/docs/guide/concepts/server-engine), preventing the <b>API Key</b> from being exposed.
34
35
  - 🧱 &nbsp; It is lightweight and performs well.
35
36
 
37
+
38
+ ## Recommended Node Version
39
+
40
+ ### min `v18.20.5 or higher`
41
+ ### recommended `v20.19.0`
42
+
36
43
  ## Getting Started
37
44
 
38
45
  1. Add nuxt-chatgpt dependency to your project
@@ -72,7 +79,7 @@ The `chat`, and `chatCompletion` methods requires three parameters:
72
79
  |--|--|--|--|
73
80
  |**message**|`String`|available only for `chat()`|A string representing the text message that you want to send to the GPT model for processing.
74
81
  |**messages**|`Array`|available only for `chatCompletion()`|An array of objects that contains `role` and `content`
75
- |**model**|`String`|`gpt-4o-mini` for `chat()` and `gpt-4o-mini` for `chatCompletion()`|Represent certain model for different types of natural language processing tasks.
82
+ |**model**|`String`|`gpt-5-mini` for `chat()` and `gpt-5-mini` for `chatCompletion()`|Represent certain model for different types of natural language processing tasks.
76
83
  |**options**|`Object`|`{ temperature: 0.5, max_tokens: 2048, top_p: 1 frequency_penalty: 0, presence_penalty: 0 }`|An optional object that specifies any additional options you want to pass to the API request, such as, the number of responses to generate, and the maximum length of each response.
77
84
 
78
85
  The `generateImage` method requires one parameters:
@@ -80,7 +87,7 @@ The `generateImage` method requires one parameters:
80
87
  | Name | Type | Default | Description |
81
88
  |--|--|--|--|
82
89
  |**message**|`String`| A text description of the desired image(s). The maximum length is 1000 characters.
83
- |**model**|`String`|`dall-e-2` or `dall-e-3`| The model to use for image generation. Only dall-e-2 is supported at this time.
90
+ |**model**|`String`|`gpt-image-1-mini`| The model to use for image generation.
84
91
  |**options**|`Object`|`{ n: 1, quality: 'standard', response_format: 'url', size: '1024x1024', style: 'natural' }`|An optional object that specifies any additional options you want to pass to the API request, such as, the number of images to generate, quality, size and style of the generated images.
85
92
 
86
93
  Available models:
@@ -100,8 +107,16 @@ Available models:
100
107
  - gpt-4-32k
101
108
  - gpt-4-32k-0314
102
109
  - gpt-4-32k-0613
103
- - dall-e-2
110
+ - gpt-5-nano
111
+ - gpt-5-mini
112
+ - gpt-5-pro
113
+ - gpt-5.1
114
+ - gpt-5.2-pro
115
+ - gpt-5.2
104
116
  - dall-e-3
117
+ - gpt-image-1
118
+ - gpt-image-1-mini
119
+ - gpt-image-1.5
105
120
 
106
121
  ### Simple `chat` usage
107
122
  In the following example, the model is unspecified, and the gpt-4o-mini model will be used by default.
@@ -117,7 +132,7 @@ async function sendMessage() {
117
132
  const response = await chat(inputData.value)
118
133
  data.value = response
119
134
  } catch(error) {
120
- alert(`Join the waiting list if you want to use GPT-4 models: ${error}`)
135
+ alert(`Verify your organization if you want to use GPT-5 models: ${error}`)
121
136
  }
122
137
  }
123
138
 
@@ -146,10 +161,10 @@ const inputData = ref('')
146
161
 
147
162
  async function sendMessage() {
148
163
  try {
149
- const response = await chat(inputData.value, 'gpt-4o-mini')
164
+ const response = await chat(inputData.value, 'gpt-5-mini')
150
165
  data.value = response
151
166
  } catch(error) {
152
- alert(`Join the waiting list if you want to use GPT-4 models: ${error}`)
167
+ alert(`Verify your organization if you want to use GPT-5 models: ${error}`)
153
168
  }
154
169
  }
155
170
 
@@ -195,7 +210,7 @@ async function sendMessage() {
195
210
 
196
211
  chatTree.value.push(responseMessage)
197
212
  } catch(error) {
198
- alert(`Join the waiting list if you want to use GPT-4 models: ${error}`)
213
+ alert(`Verify your organization if you want to use GPT-5 models: ${error}`)
199
214
  }
200
215
  }
201
216
 
@@ -239,7 +254,7 @@ async function sendMessage() {
239
254
 
240
255
  chatTree.value.push(message)
241
256
 
242
- const response = await chatCompletion(chatTree.value, 'gpt-4o-mini')
257
+ const response = await chatCompletion(chatTree.value, 'gpt-5-mini')
243
258
 
244
259
  const responseMessage = {
245
260
  role: response[0].message.role,
@@ -248,7 +263,7 @@ async function sendMessage() {
248
263
 
249
264
  chatTree.value.push(responseMessage)
250
265
  } catch(error) {
251
- alert(`Join the waiting list if you want to use GPT-4 models: ${error}`)
266
+ alert(`Verify your organization if you want to use GPT-5 models: ${error}`)
252
267
  }
253
268
  }
254
269
 
@@ -276,7 +291,7 @@ async function sendMessage() {
276
291
  ```
277
292
 
278
293
  ### Simple `generateImage` usage
279
- In the following example, the model is unspecified, and the dall-e-2 model will be used by default.
294
+ In the following example, the model is unspecified, and the `gpt-image-1-mini` model will be used by default.
280
295
 
281
296
  ```js
282
297
  const { generateImage } = useChatgpt()
@@ -285,14 +300,23 @@ const images = ref([])
285
300
  const inputData = ref('')
286
301
  const loading = ref(false)
287
302
 
303
+ function b64ToBlobUrl(b64) {
304
+ const bytes = Uint8Array.from(atob(b64), (c) => c.charCodeAt(0));
305
+ const blob = new Blob([bytes], { type: "image/png" });
306
+ return URL.createObjectURL(blob);
307
+ }
308
+
288
309
  async function sendPrompt() {
289
- loading.value = true
310
+ loading.value = true;
290
311
  try {
291
- images.value = await generateImage(inputData.value)
312
+ const result = await generateImage(inputData.value);
313
+ images.value = result.map((img) => ({
314
+ url: b64ToBlobUrl(img.b64_json),
315
+ }));
292
316
  } catch (error) {
293
- alert(`Error: ${error}`)
317
+ alert(`Error: ${error}`);
294
318
  }
295
- loading.value = false
319
+ loading.value = false;
296
320
  }
297
321
 
298
322
  ```
@@ -327,7 +351,7 @@ const loading = ref(false)
327
351
  async function sendPrompt() {
328
352
  loading.value = true
329
353
  try {
330
- images.value = await generateImage(inputData.value, 'dall-e-2', {
354
+ images.value = await generateImage(inputData.value, 'dall-e-3', {
331
355
  n: 1,
332
356
  quality: 'standard',
333
357
  response_format: 'url',
@@ -14,6 +14,7 @@ interface ModuleOptions {
14
14
  */
15
15
  isEnabled?: boolean;
16
16
  }
17
- declare const _default: _nuxt_schema.NuxtModule<ModuleOptions>;
17
+ declare const _default: _nuxt_schema.NuxtModule<ModuleOptions, ModuleOptions, false>;
18
18
 
19
- export { ModuleOptions, _default as default };
19
+ export { _default as default };
20
+ export type { ModuleOptions };
package/dist/module.json CHANGED
@@ -2,7 +2,11 @@
2
2
  "name": "nuxt-chatgpt",
3
3
  "configKey": "chatgpt",
4
4
  "compatibility": {
5
- "nuxt": "^3.0.0"
5
+ "nuxt": ">=3.0.0 <5.0.0"
6
6
  },
7
- "version": "0.2.5"
7
+ "version": "0.3.0",
8
+ "builder": {
9
+ "@nuxt/module-builder": "1.0.2",
10
+ "unbuild": "3.6.1"
11
+ }
8
12
  }
package/dist/module.mjs CHANGED
@@ -4,9 +4,9 @@ import { defu } from 'defu';
4
4
 
5
5
  const configKey = "chatgpt";
6
6
  const moduleName = "nuxt-chatgpt";
7
- const nuxtVersion = "^3.0.0";
7
+ const nuxtVersion = ">=3.0.0 <5.0.0";
8
8
 
9
- const module = defineNuxtModule({
9
+ const module$1 = defineNuxtModule({
10
10
  meta: {
11
11
  name: moduleName,
12
12
  configKey,
@@ -56,4 +56,4 @@ const module = defineNuxtModule({
56
56
  }
57
57
  });
58
58
 
59
- export { module as default };
59
+ export { module$1 as default };
@@ -1,2 +1,2 @@
1
- import type { IChatgptClient } from "../types";
1
+ import type { IChatgptClient } from "../types/index.js";
2
2
  export declare const useChatgpt: () => IChatgptClient;
@@ -13,5 +13,13 @@ export declare const MODEL_GPT_4_0613 = "gpt-4-0613";
13
13
  export declare const MODEL_GPT_4_32k: string;
14
14
  export declare const MODEL_GPT_4_32k_0314: string;
15
15
  export declare const MODEL_GPT_4_32k_0613 = "gpt-4-32k-0613";
16
- export declare const MODEL_GPT_DALL_E_2 = "dall-e-2";
16
+ export declare const MODEL_GPT_5_NANO = "gpt-5-nano";
17
+ export declare const MODEL_GPT_5_MINI = "gpt-5-mini";
18
+ export declare const MODEL_GPT_5_PRO = "gpt-5-pro";
19
+ export declare const MODEL_GPT_5_1 = "gpt-5.1";
20
+ export declare const MODEL_GPT_5_2_PRO = "gpt-5.2-pro";
21
+ export declare const MODEL_GPT_5_2 = "gpt-5.2";
17
22
  export declare const MODEL_GPT_DALL_E_3 = "dall-e-3";
23
+ export declare const MODEL_GPT_IMAGE_1 = "gpt-image-1";
24
+ export declare const MODEL_GPT_IMAGE_1_MINI = "gpt-image-1-mini";
25
+ export declare const MODEL_GPT_IMAGE_1_5 = "gpt-image-1.5";
@@ -13,5 +13,13 @@ export const MODEL_GPT_4_0613 = "gpt-4-0613";
13
13
  export const MODEL_GPT_4_32k = "gpt-4-32k";
14
14
  export const MODEL_GPT_4_32k_0314 = "gpt-4-32k-0314";
15
15
  export const MODEL_GPT_4_32k_0613 = "gpt-4-32k-0613";
16
- export const MODEL_GPT_DALL_E_2 = "dall-e-2";
16
+ export const MODEL_GPT_5_NANO = "gpt-5-nano";
17
+ export const MODEL_GPT_5_MINI = "gpt-5-mini";
18
+ export const MODEL_GPT_5_PRO = "gpt-5-pro";
19
+ export const MODEL_GPT_5_1 = "gpt-5.1";
20
+ export const MODEL_GPT_5_2_PRO = "gpt-5.2-pro";
21
+ export const MODEL_GPT_5_2 = "gpt-5.2";
17
22
  export const MODEL_GPT_DALL_E_3 = "dall-e-3";
23
+ export const MODEL_GPT_IMAGE_1 = "gpt-image-1";
24
+ export const MODEL_GPT_IMAGE_1_MINI = "gpt-image-1-mini";
25
+ export const MODEL_GPT_IMAGE_1_5 = "gpt-image-1.5";
@@ -1,14 +1,2 @@
1
- export declare const defaultOptions: {
2
- temperature: number;
3
- max_tokens: number;
4
- top_p: number;
5
- frequency_penalty: number;
6
- presence_penalty: number;
7
- };
8
- export declare const defaultDaleOptions: {
9
- n: number;
10
- quality: string;
11
- response_format: string;
12
- size: string;
13
- style: string;
14
- };
1
+ export declare const defaultOptions: {};
2
+ export declare const defaultDaleOptions: {};
@@ -0,0 +1,14 @@
1
+ export const defaultOptions = {
2
+ // temperature: 0.5,
3
+ // max_tokens: 2048,
4
+ // top_p: 1,
5
+ // frequency_penalty: 0,
6
+ // presence_penalty: 0
7
+ };
8
+ export const defaultDaleOptions = {
9
+ // n: 1,
10
+ // quality: 'standard',
11
+ // response_format: 'url',
12
+ // size: '1024x1024',
13
+ // style: 'natural'
14
+ };
@@ -1,8 +1,8 @@
1
1
  import OpenAI from "openai";
2
2
  import { createError, defineEventHandler, readBody } from "h3";
3
- import { defaultOptions } from "../../constants/options.mjs";
4
- import { MODEL_GPT_4_MINI } from "../../constants/models.mjs";
5
- import { modelMap } from "../../utils/model-map.mjs";
3
+ import { defaultOptions } from "../../constants/options.js";
4
+ import { MODEL_GPT_5_MINI } from "../../constants/models.js";
5
+ import { modelMap } from "../../utils/model-map.js";
6
6
  import { useRuntimeConfig } from "#imports";
7
7
  export default defineEventHandler(async (event) => {
8
8
  const { messages, model, options } = await readBody(event);
@@ -17,7 +17,7 @@ export default defineEventHandler(async (event) => {
17
17
  });
18
18
  const requestOptions = {
19
19
  messages,
20
- model: !model ? modelMap[MODEL_GPT_4_MINI] : modelMap[model],
20
+ model: !model ? modelMap[MODEL_GPT_5_MINI] : modelMap[model],
21
21
  ...options || defaultOptions
22
22
  };
23
23
  try {
@@ -26,7 +26,7 @@ export default defineEventHandler(async (event) => {
26
26
  } catch (error) {
27
27
  throw createError({
28
28
  statusCode: 500,
29
- message: "Failed to forward request to OpenAI API"
29
+ message: error
30
30
  });
31
31
  }
32
32
  });
@@ -1,8 +1,8 @@
1
1
  import OpenAI from "openai";
2
2
  import { createError, defineEventHandler, readBody } from "h3";
3
- import { defaultOptions } from "../../constants/options.mjs";
4
- import { MODEL_GPT_4_MINI } from "../../constants/models.mjs";
5
- import { modelMap } from "../../utils/model-map.mjs";
3
+ import { defaultOptions } from "../../constants/options.js";
4
+ import { MODEL_GPT_5_MINI } from "../../constants/models.js";
5
+ import { modelMap } from "../../utils/model-map.js";
6
6
  import { useRuntimeConfig } from "#imports";
7
7
  export default defineEventHandler(async (event) => {
8
8
  const { message, model, options } = await readBody(event);
@@ -17,7 +17,7 @@ export default defineEventHandler(async (event) => {
17
17
  });
18
18
  const requestOptions = {
19
19
  messages: [{ role: "user", content: message }],
20
- model: !model ? modelMap[MODEL_GPT_4_MINI] : modelMap[model],
20
+ model: !model ? modelMap[MODEL_GPT_5_MINI] : modelMap[model],
21
21
  ...options || defaultOptions
22
22
  };
23
23
  try {
@@ -26,7 +26,7 @@ export default defineEventHandler(async (event) => {
26
26
  } catch (error) {
27
27
  throw createError({
28
28
  statusCode: 500,
29
- message: "Failed to forward request to OpenAI API"
29
+ message: error
30
30
  });
31
31
  }
32
32
  });
@@ -1,3 +1,3 @@
1
1
  import OpenAI from 'openai';
2
- declare const _default: import("h3").EventHandler<import("h3").EventHandlerRequest, Promise<OpenAI.Images.Image[]>>;
2
+ declare const _default: import("h3").EventHandler<import("h3").EventHandlerRequest, Promise<OpenAI.Images.Image[] | undefined>>;
3
3
  export default _default;
@@ -1,8 +1,8 @@
1
1
  import OpenAI from "openai";
2
2
  import { createError, defineEventHandler, readBody } from "h3";
3
- import { defaultDaleOptions } from "../../constants/options.mjs";
4
- import { MODEL_GPT_DALL_E_2 } from "../../constants/models.mjs";
5
- import { modelMap } from "../../utils/model-map.mjs";
3
+ import { defaultDaleOptions } from "../../constants/options.js";
4
+ import { MODEL_GPT_IMAGE_1_MINI } from "../../constants/models.js";
5
+ import { modelMap } from "../../utils/model-map.js";
6
6
  import { useRuntimeConfig } from "#imports";
7
7
  export default defineEventHandler(async (event) => {
8
8
  const { message, model, options } = await readBody(event);
@@ -17,7 +17,7 @@ export default defineEventHandler(async (event) => {
17
17
  });
18
18
  const requestOptions = {
19
19
  prompt: message,
20
- model: !model ? modelMap[MODEL_GPT_DALL_E_2] : modelMap[model],
20
+ model: !model ? modelMap[MODEL_GPT_IMAGE_1_MINI] : modelMap[model],
21
21
  ...options || defaultDaleOptions
22
22
  };
23
23
  try {
@@ -26,7 +26,7 @@ export default defineEventHandler(async (event) => {
26
26
  } catch (error) {
27
27
  throw createError({
28
28
  statusCode: 500,
29
- message: "Failed to forward request to OpenAI API"
29
+ message: error
30
30
  });
31
31
  }
32
32
  });
@@ -1,10 +1,29 @@
1
+ import { MODEL_TEXT_DAVINCI_003, MODEL_TEXT_DAVINCI_002, MODEL_GPT_TURBO_3_5, MODEL_GPT_TURBO_3_5_0301, MODEL_GPT_4, MODEL_GPT_4_O, MODEL_GPT_4_MINI, MODEL_GPT_4_TURBO, MODEL_GPT_4_0314, MODEL_GPT_4_32k, MODEL_GPT_4_32k_0314 } from "../constants/models.js";
1
2
  export declare const modelMap: {
2
- [x: string]: string;
3
+ [MODEL_TEXT_DAVINCI_003]: string;
4
+ [MODEL_TEXT_DAVINCI_002]: string;
5
+ [MODEL_GPT_TURBO_3_5]: string;
6
+ [MODEL_GPT_TURBO_3_5_0301]: string;
7
+ [MODEL_GPT_4]: string;
8
+ [MODEL_GPT_4_O]: string;
9
+ [MODEL_GPT_4_MINI]: string;
10
+ [MODEL_GPT_4_TURBO]: string;
11
+ [MODEL_GPT_4_0314]: string;
12
+ [MODEL_GPT_4_32k]: string;
13
+ [MODEL_GPT_4_32k_0314]: string;
3
14
  "gpt-3.5-turbo-1106": string;
4
15
  "gpt-4-1106-preview": string;
5
16
  "gpt-4-0613": string;
6
17
  "gpt-4-32k-0613": string;
7
- "dall-e-2": string;
8
18
  "dall-e-3": string;
19
+ "gpt-image-1": string;
20
+ "gpt-image-1-mini": string;
21
+ "gpt-image-1.5": string;
22
+ "gpt-5-nano": string;
23
+ "gpt-5-mini": string;
24
+ "gpt-5-pro": string;
25
+ "gpt-5.1": string;
26
+ "gpt-5.2-pro": string;
27
+ "gpt-5.2": string;
9
28
  default: string;
10
29
  };
@@ -14,9 +14,17 @@ import {
14
14
  MODEL_GPT_4_32k,
15
15
  MODEL_GPT_4_32k_0314,
16
16
  MODEL_GPT_4_32k_0613,
17
- MODEL_GPT_DALL_E_2,
18
- MODEL_GPT_DALL_E_3
19
- } from "../constants/models.mjs";
17
+ MODEL_GPT_5_NANO,
18
+ MODEL_GPT_5_MINI,
19
+ MODEL_GPT_5_PRO,
20
+ MODEL_GPT_5_1,
21
+ MODEL_GPT_5_2_PRO,
22
+ MODEL_GPT_5_2,
23
+ MODEL_GPT_DALL_E_3,
24
+ MODEL_GPT_IMAGE_1,
25
+ MODEL_GPT_IMAGE_1_MINI,
26
+ MODEL_GPT_IMAGE_1_5
27
+ } from "../constants/models.js";
20
28
  export const modelMap = {
21
29
  [MODEL_TEXT_DAVINCI_003]: MODEL_TEXT_DAVINCI_003,
22
30
  [MODEL_TEXT_DAVINCI_002]: MODEL_TEXT_DAVINCI_002,
@@ -33,7 +41,15 @@ export const modelMap = {
33
41
  [MODEL_GPT_4_32k]: MODEL_GPT_4_32k,
34
42
  [MODEL_GPT_4_32k_0314]: MODEL_GPT_4_32k_0314,
35
43
  [MODEL_GPT_4_32k_0613]: MODEL_GPT_4_32k_0613,
36
- [MODEL_GPT_DALL_E_2]: MODEL_GPT_DALL_E_2,
37
44
  [MODEL_GPT_DALL_E_3]: MODEL_GPT_DALL_E_3,
38
- default: MODEL_GPT_4_MINI
45
+ [MODEL_GPT_IMAGE_1]: MODEL_GPT_IMAGE_1,
46
+ [MODEL_GPT_IMAGE_1_MINI]: MODEL_GPT_IMAGE_1_MINI,
47
+ [MODEL_GPT_IMAGE_1_5]: MODEL_GPT_IMAGE_1_5,
48
+ [MODEL_GPT_5_NANO]: MODEL_GPT_5_NANO,
49
+ [MODEL_GPT_5_MINI]: MODEL_GPT_5_MINI,
50
+ [MODEL_GPT_5_PRO]: MODEL_GPT_5_PRO,
51
+ [MODEL_GPT_5_1]: MODEL_GPT_5_1,
52
+ [MODEL_GPT_5_2_PRO]: MODEL_GPT_5_2_PRO,
53
+ [MODEL_GPT_5_2]: MODEL_GPT_5_2,
54
+ default: MODEL_GPT_5_MINI
39
55
  };
@@ -0,0 +1,3 @@
1
+ export { default } from './module.mjs'
2
+
3
+ export { type ModuleOptions } from './module.mjs'
package/package.json CHANGED
@@ -1,72 +1,72 @@
1
- {
2
- "name": "nuxt-chatgpt",
3
- "version": "0.2.5",
4
- "description": "ChatGPT integration for Nuxt 3",
5
- "license": "MIT",
6
- "type": "module",
7
- "homepage": "https://vuemadness.com/nuxt-chatgpt",
8
- "bugs": {
9
- "url": "https://github.com/schnapsterdog/nuxt-chatgpt/issues"
10
- },
11
- "repository": {
12
- "type": "git",
13
- "url": "git+https://github.com/schnapsterdog/nuxt-chatgpt"
14
- },
15
- "contributors": [
16
- {
17
- "name": "Oliver Trajceski (@schnapsterdog)"
18
- }
19
- ],
20
- "author": {
21
- "name": "Oliver Trajceski",
22
- "email": "oliver@akrinum.com"
23
- },
24
- "keywords": [
25
- "vue3",
26
- "nuxt3",
27
- "nuxt",
28
- "nuxt.js",
29
- "nuxt-chatgpt",
30
- "image",
31
- "image-generator"
32
- ],
33
- "exports": {
34
- ".": {
35
- "types": "./dist/types.d.ts",
36
- "import": "./dist/module.mjs",
37
- "require": "./dist/module.cjs"
38
- }
39
- },
40
- "main": "./dist/module.cjs",
41
- "types": "./dist/types.d.ts",
42
- "files": [
43
- "dist"
44
- ],
45
- "scripts": {
46
- "prepack": "nuxt-module-build",
47
- "dev": "nuxi dev playground",
48
- "dev:build": "nuxi build playground",
49
- "dev:generate": "nuxi generate playground",
50
- "dev:prepare": "nuxt-module-build --stub && nuxi prepare playground",
51
- "dev:preview": "nuxi preview playground",
52
- "release": "npm run lint && npm run test && npm run prepack && changelogen --release --minor && npm publish && git push --follow-tags",
53
- "lint": "eslint .",
54
- "test": "vitest run",
55
- "test:watch": "vitest watch"
56
- },
57
- "dependencies": {
58
- "@nuxt/kit": "latest",
59
- "defu": "latest",
60
- "openai": "4.0.0"
61
- },
62
- "devDependencies": {
63
- "@nuxt/eslint-config": "latest",
64
- "@nuxt/module-builder": "^0.2.1",
65
- "@nuxt/schema": "latest",
66
- "@nuxt/test-utils": "latest",
67
- "changelogen": "latest",
68
- "eslint": "latest",
69
- "nuxt": "latest",
70
- "vitest": "latest"
71
- }
72
- }
1
+ {
2
+ "name": "nuxt-chatgpt",
3
+ "version": "0.3.0",
4
+ "description": "ChatGPT integration for Nuxt 3",
5
+ "license": "MIT",
6
+ "type": "module",
7
+ "homepage": "https://vuemadness.com/nuxt-chatgpt",
8
+ "bugs": {
9
+ "url": "https://github.com/schnapsterdog/nuxt-chatgpt/issues"
10
+ },
11
+ "repository": {
12
+ "type": "git",
13
+ "url": "git+https://github.com/schnapsterdog/nuxt-chatgpt"
14
+ },
15
+ "contributors": [
16
+ {
17
+ "name": "Oliver Trajceski (@schnapsterdog)"
18
+ }
19
+ ],
20
+ "author": {
21
+ "name": "Oliver Trajceski",
22
+ "email": "oliver@akrinum.com"
23
+ },
24
+ "keywords": [
25
+ "vue3",
26
+ "nuxt3",
27
+ "nuxt",
28
+ "nuxt.js",
29
+ "nuxt-chatgpt",
30
+ "image",
31
+ "image-generator"
32
+ ],
33
+ "exports": {
34
+ ".": {
35
+ "types": "./dist/types.d.mts",
36
+ "import": "./dist/module.mjs",
37
+ "default": "./dist/module.mjs"
38
+ }
39
+ },
40
+ "main": "./dist/module.mjs",
41
+ "types": "./dist/types.d.mts",
42
+ "files": [
43
+ "dist"
44
+ ],
45
+ "scripts": {
46
+ "prepack": "nuxt-module-build",
47
+ "dev": "nuxi dev playground",
48
+ "dev:build": "nuxi build playground",
49
+ "dev:generate": "nuxi generate playground",
50
+ "dev:prepare": "nuxt-module-build --stub && nuxi prepare playground",
51
+ "dev:preview": "nuxi preview playground",
52
+ "release": "npm run lint && npm run test && npm run prepack && changelogen --release --minor && npm publish && git push --follow-tags",
53
+ "lint": "eslint .",
54
+ "test": "vitest run",
55
+ "test:watch": "vitest watch"
56
+ },
57
+ "dependencies": {
58
+ "@nuxt/kit": "latest",
59
+ "defu": "latest",
60
+ "openai": "^4.96.2"
61
+ },
62
+ "devDependencies": {
63
+ "@nuxt/eslint-config": "latest",
64
+ "@nuxt/module-builder": "^1.0.1",
65
+ "@nuxt/schema": "latest",
66
+ "@nuxt/test-utils": "^3.18.0",
67
+ "changelogen": "latest",
68
+ "eslint": "latest",
69
+ "nuxt": "^3.16.2",
70
+ "vitest": "latest"
71
+ }
72
+ }
package/dist/module.cjs DELETED
@@ -1,5 +0,0 @@
1
- module.exports = function(...args) {
2
- return import('./module.mjs').then(m => m.default.call(this, ...args))
3
- }
4
- const _meta = module.exports.meta = require('./module.json')
5
- module.exports.getMeta = () => Promise.resolve(_meta)
@@ -1,14 +0,0 @@
1
- export const defaultOptions = {
2
- temperature: 0.5,
3
- max_tokens: 2048,
4
- top_p: 1,
5
- frequency_penalty: 0,
6
- presence_penalty: 0
7
- };
8
- export const defaultDaleOptions = {
9
- n: 1,
10
- quality: "standard",
11
- response_format: "url",
12
- size: "1024x1024",
13
- style: "natural"
14
- };
package/dist/types.d.ts DELETED
@@ -1,10 +0,0 @@
1
-
2
- import { ModuleOptions } from './module'
3
-
4
- declare module '@nuxt/schema' {
5
- interface NuxtConfig { ['chatgpt']?: Partial<ModuleOptions> }
6
- interface NuxtOptions { ['chatgpt']?: ModuleOptions }
7
- }
8
-
9
-
10
- export { ModuleOptions, default } from './module'