@plasius/ai 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. package/CHANGELOG.md +72 -0
  2. package/CODE_OF_CONDUCT.md +79 -0
  3. package/CONTRIBUTORS.md +27 -0
  4. package/LICENSE +21 -0
  5. package/README.md +132 -0
  6. package/SECURITY.md +17 -0
  7. package/dist/components/pixelverse/balance.d.ts +4 -0
  8. package/dist/components/pixelverse/balance.d.ts.map +1 -0
  9. package/dist/components/pixelverse/balance.js +40 -0
  10. package/dist/components/pixelverse/index.d.ts +3 -0
  11. package/dist/components/pixelverse/index.d.ts.map +1 -0
  12. package/dist/components/pixelverse/index.js +2 -0
  13. package/dist/components/pixelverse/pixelverseeditor.d.ts +16 -0
  14. package/dist/components/pixelverse/pixelverseeditor.d.ts.map +1 -0
  15. package/dist/components/pixelverse/pixelverseeditor.js +21 -0
  16. package/dist/index.d.ts +2 -0
  17. package/dist/index.d.ts.map +1 -0
  18. package/dist/index.js +1 -0
  19. package/dist/lib/chatWithAI.d.ts +2 -0
  20. package/dist/lib/chatWithAI.d.ts.map +1 -0
  21. package/dist/lib/chatWithAI.js +1 -0
  22. package/dist/lib/generateImage.d.ts +2 -0
  23. package/dist/lib/generateImage.d.ts.map +1 -0
  24. package/dist/lib/generateImage.js +1 -0
  25. package/dist/lib/synthesizeSpeech.d.ts +2 -0
  26. package/dist/lib/synthesizeSpeech.d.ts.map +1 -0
  27. package/dist/lib/synthesizeSpeech.js +1 -0
  28. package/dist/lib/transcribeSpeech.d.ts +2 -0
  29. package/dist/lib/transcribeSpeech.d.ts.map +1 -0
  30. package/dist/lib/transcribeSpeech.js +1 -0
  31. package/dist/platform/index.d.ts +76 -0
  32. package/dist/platform/index.d.ts.map +1 -0
  33. package/dist/platform/index.js +125 -0
  34. package/dist/platform/openai.d.ts +8 -0
  35. package/dist/platform/openai.d.ts.map +1 -0
  36. package/dist/platform/openai.js +61 -0
  37. package/dist/platform/pixelverse.d.ts +6 -0
  38. package/dist/platform/pixelverse.d.ts.map +1 -0
  39. package/dist/platform/pixelverse.js +196 -0
  40. package/dist-cjs/components/pixelverse/balance.d.ts +4 -0
  41. package/dist-cjs/components/pixelverse/balance.d.ts.map +1 -0
  42. package/dist-cjs/components/pixelverse/balance.js +46 -0
  43. package/dist-cjs/components/pixelverse/index.d.ts +3 -0
  44. package/dist-cjs/components/pixelverse/index.d.ts.map +1 -0
  45. package/dist-cjs/components/pixelverse/index.js +18 -0
  46. package/dist-cjs/components/pixelverse/pixelverseeditor.d.ts +16 -0
  47. package/dist-cjs/components/pixelverse/pixelverseeditor.d.ts.map +1 -0
  48. package/dist-cjs/components/pixelverse/pixelverseeditor.js +27 -0
  49. package/dist-cjs/index.d.ts +2 -0
  50. package/dist-cjs/index.d.ts.map +1 -0
  51. package/dist-cjs/index.js +17 -0
  52. package/dist-cjs/lib/chatWithAI.d.ts +1 -0
  53. package/dist-cjs/lib/chatWithAI.d.ts.map +1 -0
  54. package/dist-cjs/lib/chatWithAI.js +1 -0
  55. package/dist-cjs/lib/generateImage.d.ts +1 -0
  56. package/dist-cjs/lib/generateImage.d.ts.map +1 -0
  57. package/dist-cjs/lib/generateImage.js +1 -0
  58. package/dist-cjs/lib/synthesizeSpeech.d.ts +1 -0
  59. package/dist-cjs/lib/synthesizeSpeech.d.ts.map +1 -0
  60. package/dist-cjs/lib/synthesizeSpeech.js +1 -0
  61. package/dist-cjs/lib/transcribeSpeech.d.ts +1 -0
  62. package/dist-cjs/lib/transcribeSpeech.d.ts.map +1 -0
  63. package/dist-cjs/lib/transcribeSpeech.js +1 -0
  64. package/dist-cjs/platform/index.d.ts +76 -0
  65. package/dist-cjs/platform/index.d.ts.map +1 -0
  66. package/dist-cjs/platform/index.js +128 -0
  67. package/dist-cjs/platform/openai.d.ts +8 -0
  68. package/dist-cjs/platform/openai.d.ts.map +1 -0
  69. package/dist-cjs/platform/openai.js +67 -0
  70. package/dist-cjs/platform/pixelverse.d.ts +6 -0
  71. package/dist-cjs/platform/pixelverse.d.ts.map +1 -0
  72. package/dist-cjs/platform/pixelverse.js +199 -0
  73. package/docs/adrs/adr-0001-ai-package-scope.md +21 -0
  74. package/docs/adrs/adr-0002-public-repo-governance.md +24 -0
  75. package/docs/adrs/adr-0003-contracts-first-documentation.md +25 -0
  76. package/docs/adrs/adr-template.md +35 -0
  77. package/docs/api-reference.md +64 -0
  78. package/docs/architecture.md +21 -0
  79. package/docs/providers.md +26 -0
  80. package/legal/CLA-REGISTRY.csv +1 -0
  81. package/legal/CLA.md +22 -0
  82. package/legal/CORPORATE_CLA.md +57 -0
  83. package/legal/INDIVIDUAL_CLA.md +91 -0
  84. package/package.json +117 -0
  85. package/src/components/pixelverse/balance.module.css +6 -0
  86. package/src/components/pixelverse/balance.tsx +65 -0
  87. package/src/components/pixelverse/index.ts +2 -0
  88. package/src/components/pixelverse/pixelverseeditor.mocule.css +0 -0
  89. package/src/components/pixelverse/pixelverseeditor.tsx +74 -0
  90. package/src/global.d.ts +9 -0
  91. package/src/index.ts +1 -0
  92. package/src/lib/chatWithAI.ts +0 -0
  93. package/src/lib/generateImage.ts +0 -0
  94. package/src/lib/synthesizeSpeech.ts +0 -0
  95. package/src/lib/transcribeSpeech.ts +0 -0
  96. package/src/platform/index.ts +237 -0
  97. package/src/platform/openai.ts +123 -0
  98. package/src/platform/pixelverse.ts +309 -0
@@ -0,0 +1,125 @@
1
+ import { createSchema, field } from "@plasius/schema";
2
+ export const completionSchema = createSchema({
3
+ id: field
4
+ .string()
5
+ .description("A unique ID for this completion")
6
+ .version("1.0"),
7
+ type: field
8
+ .string()
9
+ .description("The type of completion (e.g. 'chat', 'text', 'speech')")
10
+ .version("1.0"),
11
+ model: field
12
+ .string()
13
+ .description("The model used to generate this completion")
14
+ .version("1.0"),
15
+ durationMs: field
16
+ .number()
17
+ .description("How long the AI task took in milliseconds")
18
+ .version("1.0"),
19
+ createdAt: field
20
+ .string()
21
+ .description("ISO timestamp when the completion was created")
22
+ .version("1.0"),
23
+ partitionKey: field
24
+ .string()
25
+ .description("User or system identifier that made the request")
26
+ .version("1.0"),
27
+ usage: field
28
+ .object({})
29
+ .description("Optional usage metrics like token count or cost")
30
+ .version("1.0")
31
+ .optional()
32
+ .as(),
33
+ }, "completion", {
34
+ version: "1.0",
35
+ piiEnforcement: "none",
36
+ table: "completions",
37
+ schemaValidator: () => {
38
+ return true;
39
+ },
40
+ });
41
+ export const chatCompletionSchema = createSchema({
42
+ message: field
43
+ .string()
44
+ .description("The response from the AI")
45
+ .version("1.0"),
46
+ outputUser: field
47
+ .string()
48
+ .description("The 'actor' who is chatting")
49
+ .version("1.0"),
50
+ }, "chatCompletion", {
51
+ version: "1.0",
52
+ table: "completions",
53
+ schemaValidator: () => {
54
+ return true;
55
+ },
56
+ });
57
+ export const textCompletionSchema = createSchema({
58
+ message: field
59
+ .string()
60
+ .description("The response from the AI")
61
+ .version("1.0"),
62
+ }, "textCompletion", {
63
+ version: "1.0",
64
+ table: "completions",
65
+ schemaValidator: () => {
66
+ return true;
67
+ },
68
+ });
69
+ export const imageCompletionSchema = createSchema({
70
+ url: field
71
+ .string()
72
+ .description("The response from the AI")
73
+ .version("1.0")
74
+ .as(),
75
+ }, "imageCompletion", {
76
+ version: "1.0",
77
+ table: "completions",
78
+ schemaValidator: () => {
79
+ return true;
80
+ },
81
+ });
82
+ export const speechCompletionSchema = createSchema({
83
+ url: field
84
+ .string()
85
+ .description("The response from the AI")
86
+ .version("1.0")
87
+ .as(),
88
+ }, "speechCompletion", {
89
+ version: "1.0",
90
+ table: "completions",
91
+ schemaValidator: () => {
92
+ return true;
93
+ },
94
+ });
95
+ export const videoCompletionSchema = createSchema({
96
+ url: field
97
+ .string()
98
+ .description("The response from the AI")
99
+ .version("1.0")
100
+ .as(),
101
+ }, "videoCompletion", {
102
+ version: "1.0",
103
+ table: "completions",
104
+ schemaValidator: () => {
105
+ return true;
106
+ },
107
+ });
108
+ export const balanceCompletionSchema = createSchema({
109
+ balance: field.number().description("Current balance").version("1.0"),
110
+ }, "balanceCompletion", {
111
+ version: "1.0",
112
+ table: "completions",
113
+ schemaValidator: () => {
114
+ return true;
115
+ },
116
+ });
117
+ export var AICapability;
118
+ (function (AICapability) {
119
+ AICapability[AICapability["Chat"] = 0] = "Chat";
120
+ AICapability[AICapability["Text"] = 1] = "Text";
121
+ AICapability[AICapability["Speech"] = 2] = "Speech";
122
+ AICapability[AICapability["Image"] = 3] = "Image";
123
+ AICapability[AICapability["Video"] = 4] = "Video";
124
+ AICapability[AICapability["Balance"] = 5] = "Balance";
125
+ })(AICapability || (AICapability = {}));
@@ -0,0 +1,8 @@
1
+ import type { AIPlatform } from "./index.js";
2
+ export interface OpenAIPlatformProps {
3
+ openaiAPIKey: string;
4
+ openaiProjectKey: string;
5
+ openaiOrgID: string;
6
+ }
7
+ export declare function OpenAIPlatform(userId: string, props: OpenAIPlatformProps): Promise<AIPlatform>;
8
+ //# sourceMappingURL=openai.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../src/platform/openai.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EACV,UAAU,EAQX,MAAM,YAAY,CAAC;AAGpB,MAAM,WAAW,mBAAmB;IAClC,YAAY,EAAE,MAAM,CAAC;IACrB,gBAAgB,EAAE,MAAM,CAAC;IACzB,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,wBAAsB,cAAc,CAClC,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,mBAAmB,GACzB,OAAO,CAAC,UAAU,CAAC,CAoGrB"}
@@ -0,0 +1,61 @@
1
+ import OpenAI from "openai";
2
+ import { useState } from "react";
3
+ export async function OpenAIPlatform(userId, props) {
4
+ const openai = new OpenAI({
5
+ apiKey: props.openaiAPIKey,
6
+ project: props.openaiProjectKey,
7
+ organization: props.openaiOrgID,
8
+ dangerouslyAllowBrowser: false,
9
+ });
10
+ void openai;
11
+ function baseCompletionData(type, model, requestor, duration) {
12
+ return {
13
+ partitionKey: requestor,
14
+ id: crypto.randomUUID(),
15
+ type,
16
+ model,
17
+ createdAt: new Date().toISOString(),
18
+ durationMs: duration,
19
+ usage: {},
20
+ };
21
+ }
22
+ const chatWithAI = (userId, input, context, model) => {
23
+ void [input, context, model];
24
+ const base = baseCompletionData("chat", "model", userId, 0);
25
+ return Promise.resolve({ ...base, message: "Something", outputUser: "" });
26
+ };
27
+ const synthesizeSpeech = (userId, input, voice, context, model) => {
28
+ void [input, voice, context, model];
29
+ const base = baseCompletionData("chat", "model", userId, 0);
30
+ return Promise.resolve({ ...base, url: new URL("Something") });
31
+ };
32
+ const transcribeSpeech = (userId, input, context, model) => {
33
+ void [input, context, model];
34
+ const base = baseCompletionData("chat", "model", userId, 0);
35
+ return Promise.resolve({ ...base, message: "Something" });
36
+ };
37
+ const generateImage = (userId, input, context, model) => {
38
+ void [input, context, model];
39
+ const base = baseCompletionData("chat", "model", userId, 0);
40
+ return Promise.resolve({ ...base, url: new URL("Something") });
41
+ };
42
+ const produceVideo = (userId, imput, image, context, model) => {
43
+ void [imput, image, context, model];
44
+ const base = baseCompletionData("chat", "model", userId, 0);
45
+ return Promise.resolve({ ...base, url: new URL("Something") });
46
+ };
47
+ const checkBalance = (userId) => {
48
+ const base = baseCompletionData("balanceCompletion", "", userId, 0);
49
+ return Promise.resolve({ ...base, balance: 0.0 });
50
+ };
51
+ const [currentBalance] = useState((await checkBalance(userId)).balance);
52
+ return {
53
+ chatWithAI,
54
+ synthesizeSpeech,
55
+ transcribeSpeech,
56
+ generateImage,
57
+ produceVideo,
58
+ checkBalance,
59
+ currentBalance,
60
+ };
61
+ }
@@ -0,0 +1,6 @@
1
+ import type { AIPlatform } from "./index.js";
2
+ export interface PixelVersePlatformProps {
3
+ pixelVerseAPIKey: string;
4
+ }
5
+ export declare function PixelVersePlatform(userId: string, props: PixelVersePlatformProps): Promise<AIPlatform>;
6
+ //# sourceMappingURL=pixelverse.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pixelverse.d.ts","sourceRoot":"","sources":["../../src/platform/pixelverse.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EACV,UAAU,EAQX,MAAM,YAAY,CAAC;AAmBpB,MAAM,WAAW,uBAAuB;IACtC,gBAAgB,EAAE,MAAM,CAAC;CAC1B;AAED,wBAAsB,kBAAkB,CACtC,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,uBAAuB,GAC7B,OAAO,CAAC,UAAU,CAAC,CA8QrB"}
@@ -0,0 +1,196 @@
1
+ import { v4 as uuidv4 } from "uuid";
2
+ import { performance } from "perf_hooks";
3
+ import { useState } from "react";
4
+ export async function PixelVersePlatform(userId, props) {
5
+ async function uploadImage(image, apiKey) {
6
+ const headers = new Headers();
7
+ headers.append("API-KEY", apiKey);
8
+ headers.append("Ai-trace-id", uuidv4());
9
+ headers.append("Access-Control-Allow-Origin", "*");
10
+ const formData = new FormData();
11
+ if (image instanceof File) {
12
+ formData.append("image", image, "");
13
+ }
14
+ else {
15
+ const blob = await fetch(image.toString()).then((r) => r.blob());
16
+ formData.append("image", blob, "image-from-url");
17
+ }
18
+ // pixelapi is proxied through the vite.config.ts file
19
+ // to avoid CORS issues and to allow for local development
20
+ const response = await fetch("/pixelapi/openapi/v2/image/upload", {
21
+ method: "POST",
22
+ headers,
23
+ body: formData,
24
+ redirect: "follow",
25
+ });
26
+ const data = (await response.json());
27
+ return data;
28
+ }
29
+ async function generateVideo(imgId, prompt, apiKey, seed, template_id, negative_prompt) {
30
+ const headers = new Headers();
31
+ headers.append("API-KEY", apiKey);
32
+ headers.append("Ai-trace-id", uuidv4());
33
+ headers.append("Content-Type", "application/json");
34
+ headers.append("Access-Control-Allow-Origin", "*");
35
+ headers.append("Accept", "application/json");
36
+ const values = {
37
+ duration: 5,
38
+ img_id: imgId,
39
+ model: "v3.5",
40
+ motion_mode: "normal",
41
+ prompt: prompt,
42
+ quality: "720p",
43
+ water_mark: false,
44
+ };
45
+ if (seed) {
46
+ values.seed = seed;
47
+ }
48
+ if (template_id) {
49
+ values.template_id = template_id;
50
+ }
51
+ if (negative_prompt) {
52
+ values.negative_prompt = negative_prompt;
53
+ }
54
+ const body = JSON.stringify(values);
55
+ // pixelapi is proxied through the vite.config.ts file
56
+ // to avoid CORS issues and to allow for local development
57
+ const response = await fetch("/pixelapi/openapi/v2/video/img/generate", {
58
+ method: "POST",
59
+ headers: headers,
60
+ referrerPolicy: "no-referrer",
61
+ body,
62
+ });
63
+ const data = (await response.json());
64
+ return data;
65
+ }
66
+ async function checkVideoStatus(id, apiKey) {
67
+ const headers = new Headers();
68
+ headers.append("API-KEY", apiKey);
69
+ headers.append("Ai-trace-id", uuidv4());
70
+ headers.append("Access-Control-Allow-Origin", "*");
71
+ headers.append("Accept", "application/json");
72
+ // pixelapi is proxied through the vite.config.ts file
73
+ // to avoid CORS issues and to allow for local development
74
+ const response = await fetch(`/pixelapi/openapi/v2/video/result/${id}`, {
75
+ method: "GET",
76
+ headers,
77
+ referrerPolicy: "no-referrer",
78
+ });
79
+ const data = (await response.json());
80
+ return data;
81
+ }
82
+ function baseCompletionData(type, model, requestor, duration) {
83
+ return {
84
+ partitionKey: requestor,
85
+ id: crypto.randomUUID(),
86
+ type,
87
+ model,
88
+ createdAt: new Date().toISOString(),
89
+ durationMs: duration,
90
+ usage: {},
91
+ };
92
+ }
93
+ const chatWithAI = (_userId, _input, _context, _model) => {
94
+ void [_userId, _input, _context, _model];
95
+ return Promise.reject(new Error("Not implemented"));
96
+ };
97
+ const synthesizeSpeech = (_userId, _input, _voice, _context, _model) => {
98
+ void [_userId, _input, _voice, _context, _model];
99
+ return Promise.reject(new Error("Not implemented"));
100
+ };
101
+ const transcribeSpeech = (_userId, _input, _context, _model) => {
102
+ void [_userId, _input, _context, _model];
103
+ return Promise.reject(new Error("Not implemented"));
104
+ };
105
+ const generateImage = (_userId, _input, _context, _model) => {
106
+ void [_userId, _input, _context, _model];
107
+ return Promise.reject(new Error("Not implemented"));
108
+ };
109
+ const produceVideo = (userId, input, image, context, model) => {
110
+ const start = performance.now();
111
+ return uploadImage(image, props.pixelVerseAPIKey)
112
+ .then((uploadResult) => {
113
+ const imageId = uploadResult?.Resp?.id;
114
+ if (!imageId)
115
+ throw new Error("Invalid image upload response.");
116
+ return generateVideo(imageId, input, props.pixelVerseAPIKey);
117
+ })
118
+ .then((generated) => {
119
+ const videoId = generated?.Resp?.id;
120
+ if (!videoId)
121
+ throw new Error("Video generation did not return a valid ID.");
122
+ return waitForVideoCompletion(videoId, props.pixelVerseAPIKey);
123
+ })
124
+ .then((videoUrl) => {
125
+ const duration = performance.now() - start;
126
+ const base = baseCompletionData("video", model, userId, duration);
127
+ return {
128
+ ...base,
129
+ url: new URL(videoUrl),
130
+ };
131
+ })
132
+ .catch((err) => {
133
+ // Optional: log or re-throw error for upstream handling
134
+ throw new Error(`produceVideo failed: ${err.message}`);
135
+ });
136
+ };
137
+ async function waitForVideoCompletion(videoId, apiKey, maxRetries = 20, delayMs = 3000) {
138
+ for (let attempt = 0; attempt < maxRetries; attempt++) {
139
+ await new Promise((res) => setTimeout(res, delayMs));
140
+ try {
141
+ const videoCheck = await checkVideoStatus(videoId, apiKey);
142
+ if (videoCheck?.Resp?.status === 1) {
143
+ const url = videoCheck?.Resp?.url;
144
+ if (!url)
145
+ throw new Error("Video marked complete but no URL returned.");
146
+ return url;
147
+ }
148
+ }
149
+ catch (err) {
150
+ console.warn(`Attempt ${attempt + 1} failed: ${err.message}`);
151
+ }
152
+ }
153
+ throw new Error("Timed out waiting for video to complete.");
154
+ }
155
+ const checkBalance = (userId) => {
156
+ const start = performance.now();
157
+ const headers = new Headers();
158
+ headers.append("API-KEY", props.pixelVerseAPIKey);
159
+ headers.append("AI-trace-ID", uuidv4());
160
+ headers.append("Access-Control-Allow-Origin", "*");
161
+ headers.append("Accept", "application/json");
162
+ headers.append("Content-Type", "application/json");
163
+ return fetch("/pixelapi/openapi/v2/account/balance", {
164
+ method: "GET",
165
+ headers,
166
+ referrerPolicy: "no-referrer",
167
+ })
168
+ .then(async (res) => (await res.json()))
169
+ .then((data) => {
170
+ if (!data?.Resp) {
171
+ throw new Error("Invalid balance response");
172
+ }
173
+ const duration = performance.now() - start;
174
+ const base = baseCompletionData("balanceCompletion", "", userId, duration);
175
+ const monthly = data.Resp.credit_monthly ?? 0;
176
+ const pkg = data.Resp.credit_package ?? 0;
177
+ return {
178
+ ...base,
179
+ balance: monthly + pkg,
180
+ };
181
+ })
182
+ .catch((err) => {
183
+ throw new Error(`checkBalance failed: ${err.message}`);
184
+ });
185
+ };
186
+ const [currentBalance] = useState((await checkBalance(userId)).balance ?? 0);
187
+ return {
188
+ chatWithAI,
189
+ synthesizeSpeech,
190
+ transcribeSpeech,
191
+ generateImage,
192
+ produceVideo,
193
+ checkBalance,
194
+ currentBalance,
195
+ };
196
+ }
@@ -0,0 +1,4 @@
1
+ export default function Balance({ apiKey }: {
2
+ apiKey: string;
3
+ }): import("react/jsx-runtime").JSX.Element;
4
+ //# sourceMappingURL=balance.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"balance.d.ts","sourceRoot":"","sources":["../../../src/components/pixelverse/balance.tsx"],"names":[],"mappings":"AAcA,MAAM,CAAC,OAAO,UAAU,OAAO,CAAC,EAAE,MAAM,EAAE,EAAE;IAAE,MAAM,EAAE,MAAM,CAAA;CAAE,2CAkD7D"}
@@ -0,0 +1,46 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.default = Balance;
7
+ const jsx_runtime_1 = require("react/jsx-runtime");
8
+ const react_1 = require("react");
9
+ const uuid_1 = require("uuid");
10
+ const balance_module_css_1 = __importDefault(require("./balance.module.css"));
11
+ function Balance({ apiKey }) {
12
+ const [balance, setBalance] = (0, react_1.useState)(null);
13
+ const fetchBalance = async () => {
14
+ try {
15
+ const response = await fetch("/pixelapi/openapi/v2/account/balance", {
16
+ method: "GET",
17
+ headers: {
18
+ "API-KEY": apiKey,
19
+ "AI-trace-ID": (0, uuid_1.v4)(),
20
+ Accept: "application/json",
21
+ "Content-Type": "application/json",
22
+ },
23
+ referrerPolicy: "no-referrer",
24
+ });
25
+ if (!response.ok) {
26
+ console.error("Failed to fetch balance:", response.status, response.statusText);
27
+ return;
28
+ }
29
+ const data = (await response.json());
30
+ if (data?.Resp) {
31
+ setBalance(data.Resp);
32
+ }
33
+ }
34
+ catch (err) {
35
+ console.error("fetchBalance() error", err);
36
+ }
37
+ };
38
+ (0, react_1.useEffect)(() => {
39
+ void fetchBalance(); // initial load
40
+ const intervalId = setInterval(() => {
41
+ void fetchBalance();
42
+ }, 600000);
43
+ return () => clearInterval(intervalId);
44
+ }, [apiKey]);
45
+ return ((0, jsx_runtime_1.jsx)("div", { className: balance_module_css_1.default.balance_container, children: balance ? ((0, jsx_runtime_1.jsxs)("div", { children: [(0, jsx_runtime_1.jsxs)("p", { children: ["Monthly Credit: ", balance.credit_monthly] }), (0, jsx_runtime_1.jsxs)("p", { children: ["Package Credit: ", balance.credit_package] })] })) : ((0, jsx_runtime_1.jsx)("p", { children: "Loading balance..." })) }));
46
+ }
@@ -0,0 +1,3 @@
1
+ export * from "./balance.js";
2
+ export * from "./pixelverseeditor.js";
3
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/components/pixelverse/index.ts"],"names":[],"mappings":"AAAA,cAAc,cAAc,CAAC;AAC7B,cAAc,uBAAuB,CAAC"}
@@ -0,0 +1,18 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
+ };
16
+ Object.defineProperty(exports, "__esModule", { value: true });
17
+ __exportStar(require("./balance.js"), exports);
18
+ __exportStar(require("./pixelverseeditor.js"), exports);
@@ -0,0 +1,16 @@
1
+ interface PixelverseEditorProps {
2
+ apiKey: string;
3
+ onVideoGenerated?: (videoUrl: string) => void;
4
+ onImageUpload?: (imageUrl: string) => void;
5
+ prompt?: string;
6
+ onPromptChange?: (prompt: string) => void;
7
+ negative_prompt?: string;
8
+ onNegativePromptChange?: (negative_prompt: string) => void;
9
+ template_id?: string;
10
+ onTemplateIdChange?: (template_id: string) => void;
11
+ seed?: number;
12
+ onSeedChange?: (seed: number) => void;
13
+ }
14
+ export declare function PixelverseEditor({ apiKey, onVideoGenerated, prompt, negative_prompt, template_id, seed, }: PixelverseEditorProps): import("react/jsx-runtime").JSX.Element;
15
+ export {};
16
+ //# sourceMappingURL=pixelverseeditor.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pixelverseeditor.d.ts","sourceRoot":"","sources":["../../../src/components/pixelverse/pixelverseeditor.tsx"],"names":[],"mappings":"AAKA,UAAU,qBAAqB;IAC7B,MAAM,EAAE,MAAM,CAAC;IACf,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAC9C,aAAa,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAC3C,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,IAAI,CAAC;IAC1C,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,sBAAsB,CAAC,EAAE,CAAC,eAAe,EAAE,MAAM,KAAK,IAAI,CAAC;IAC3D,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,kBAAkB,CAAC,EAAE,CAAC,WAAW,EAAE,MAAM,KAAK,IAAI,CAAC;IACnD,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,YAAY,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;CACvC;AAED,wBAAgB,gBAAgB,CAAC,EAC/B,MAAM,EACN,gBAAgB,EAChB,MAAM,EACN,eAAe,EACf,WAAW,EACX,IAAI,GACL,EAAE,qBAAqB,2CA+CvB"}
@@ -0,0 +1,27 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.PixelverseEditor = PixelverseEditor;
7
+ const jsx_runtime_1 = require("react/jsx-runtime");
8
+ const react_1 = require("react");
9
+ const balance_js_1 = __importDefault(require("./balance.js"));
10
+ function PixelverseEditor({ apiKey, onVideoGenerated, prompt, negative_prompt, template_id, seed, }) {
11
+ const [videoUrl, setVideoUrl] = (0, react_1.useState)("");
12
+ const [videoId, setVideoId] = (0, react_1.useState)(null);
13
+ const [selectedFile, setSelectedFile] = (0, react_1.useState)(null);
14
+ const [loading, setLoading] = (0, react_1.useState)(false);
15
+ const [videoStatus, setVideoStatus] = (0, react_1.useState)(0);
16
+ const handleFileChange = (e) => {
17
+ if (e.target.files?.[0]) {
18
+ setSelectedFile(e.target.files[0]);
19
+ handleUploadProcess();
20
+ }
21
+ };
22
+ const handleRegenerate = () => {
23
+ };
24
+ const handleUploadProcess = async () => {
25
+ };
26
+ return ((0, jsx_runtime_1.jsxs)("div", { children: [(0, jsx_runtime_1.jsx)(balance_js_1.default, { apiKey: apiKey }), videoStatus === 0 && !selectedFile && ((0, jsx_runtime_1.jsxs)("div", { children: [(0, jsx_runtime_1.jsx)("p", { children: "Drag/Drop or Click HERE to upload" }), (0, jsx_runtime_1.jsx)("input", { title: "Upload Image", type: "file", accept: ".jpg,.jpeg,.png,.webp", onChange: handleFileChange })] })), loading && (0, jsx_runtime_1.jsx)("div", { children: "Loading..." }), videoStatus === 0 && selectedFile && ((0, jsx_runtime_1.jsx)("button", { onClick: handleUploadProcess, children: "Start Upload" })), videoStatus === 1 && ((0, jsx_runtime_1.jsxs)("div", { children: [(0, jsx_runtime_1.jsx)("video", { src: videoUrl, controls: true }), (0, jsx_runtime_1.jsx)("button", { onClick: handleRegenerate, children: "Regenerate" })] }))] }));
27
+ }
@@ -0,0 +1,2 @@
1
+ export * from "./platform/index.js";
2
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,qBAAqB,CAAC"}
@@ -0,0 +1,17 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
+ };
16
+ Object.defineProperty(exports, "__esModule", { value: true });
17
+ __exportStar(require("./platform/index.js"), exports);
@@ -0,0 +1 @@
1
+ //# sourceMappingURL=chatWithAI.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"chatWithAI.d.ts","sourceRoot":"","sources":["../../src/lib/chatWithAI.ts"],"names":[],"mappings":""}
@@ -0,0 +1 @@
1
+ "use strict";
@@ -0,0 +1 @@
1
+ //# sourceMappingURL=generateImage.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"generateImage.d.ts","sourceRoot":"","sources":["../../src/lib/generateImage.ts"],"names":[],"mappings":""}
@@ -0,0 +1 @@
1
+ "use strict";
@@ -0,0 +1 @@
1
+ //# sourceMappingURL=synthesizeSpeech.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"synthesizeSpeech.d.ts","sourceRoot":"","sources":["../../src/lib/synthesizeSpeech.ts"],"names":[],"mappings":""}
@@ -0,0 +1 @@
1
+ "use strict";
@@ -0,0 +1 @@
1
+ //# sourceMappingURL=transcribeSpeech.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"transcribeSpeech.d.ts","sourceRoot":"","sources":["../../src/lib/transcribeSpeech.ts"],"names":[],"mappings":""}
@@ -0,0 +1 @@
1
+ "use strict";