@plasius/ai 1.1.9 → 1.1.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +1 -0
- package/README.md +6 -0
- package/dist/components/pixelverse/pixelverseeditor.d.ts +16 -0
- package/dist/components/pixelverse/pixelverseeditor.d.ts.map +1 -0
- package/dist/components/pixelverse/pixelverseeditor.js +21 -0
- package/dist/platform/openai.d.ts +8 -0
- package/dist/platform/openai.d.ts.map +1 -0
- package/dist/platform/openai.js +43 -0
- package/dist/platform/pixelverse.d.ts +6 -0
- package/dist/platform/pixelverse.d.ts.map +1 -0
- package/dist/platform/pixelverse.js +196 -0
- package/dist-cjs/components/pixelverse/pixelverseeditor.d.ts +16 -0
- package/dist-cjs/components/pixelverse/pixelverseeditor.d.ts.map +1 -0
- package/dist-cjs/components/pixelverse/pixelverseeditor.js +27 -0
- package/dist-cjs/package.json +3 -0
- package/dist-cjs/platform/openai.d.ts +8 -0
- package/dist-cjs/platform/openai.d.ts.map +1 -0
- package/dist-cjs/platform/openai.js +49 -0
- package/dist-cjs/platform/pixelverse.d.ts +6 -0
- package/dist-cjs/platform/pixelverse.d.ts.map +1 -0
- package/dist-cjs/platform/pixelverse.js +199 -0
- package/docs/adrs/adr-0004-dual-esm-cjs-runtime-compatibility.md +22 -0
- package/docs/adrs/index.md +1 -0
- package/package.json +4 -4
package/CHANGELOG.md
CHANGED
|
@@ -40,6 +40,7 @@ The format is based on **[Keep a Changelog](https://keepachangelog.com/en/1.1.0/
|
|
|
40
40
|
- Removed provider-specific identifiers from code roots to enforce public package boundaries.
|
|
41
41
|
|
|
42
42
|
- **Fixed**
|
|
43
|
+
- Enforced CommonJS runtime compatibility for dual-build output by generating and validating `dist-cjs/package.json` (`type: commonjs`) during build and package verification.
|
|
43
44
|
- `pack:check` now passes vendor-namespace checks for `src/**` by using generic provider naming in runtime/editor code.
|
|
44
45
|
|
|
45
46
|
- **Security**
|
package/README.md
CHANGED
|
@@ -27,6 +27,12 @@ Provider wiring and runtime adapters are documented in [`docs/providers.md`](./d
|
|
|
27
27
|
npm install @plasius/ai
|
|
28
28
|
```
|
|
29
29
|
|
|
30
|
+
## Module formats
|
|
31
|
+
|
|
32
|
+
This package publishes dual ESM and CJS artifacts.
|
|
33
|
+
When CJS output is emitted under `dist-cjs/*.js` with `type: module`, `dist-cjs/package.json` is generated with `{ "type": "commonjs" }` to ensure Node `require(...)` compatibility.
|
|
34
|
+
|
|
35
|
+
|
|
30
36
|
## Usage
|
|
31
37
|
|
|
32
38
|
```ts
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
interface PixelverseEditorProps {
|
|
2
|
+
apiKey: string;
|
|
3
|
+
onVideoGenerated?: (videoUrl: string) => void;
|
|
4
|
+
onImageUpload?: (imageUrl: string) => void;
|
|
5
|
+
prompt?: string;
|
|
6
|
+
onPromptChange?: (prompt: string) => void;
|
|
7
|
+
negative_prompt?: string;
|
|
8
|
+
onNegativePromptChange?: (negative_prompt: string) => void;
|
|
9
|
+
template_id?: string;
|
|
10
|
+
onTemplateIdChange?: (template_id: string) => void;
|
|
11
|
+
seed?: number;
|
|
12
|
+
onSeedChange?: (seed: number) => void;
|
|
13
|
+
}
|
|
14
|
+
export declare function PixelverseEditor({ apiKey, onVideoGenerated, prompt, negative_prompt, template_id, seed, }: PixelverseEditorProps): import("react/jsx-runtime").JSX.Element;
|
|
15
|
+
export {};
|
|
16
|
+
//# sourceMappingURL=pixelverseeditor.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pixelverseeditor.d.ts","sourceRoot":"","sources":["../../../src/components/pixelverse/pixelverseeditor.tsx"],"names":[],"mappings":"AAKA,UAAU,qBAAqB;IAC7B,MAAM,EAAE,MAAM,CAAC;IACf,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAC9C,aAAa,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAC3C,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,IAAI,CAAC;IAC1C,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,sBAAsB,CAAC,EAAE,CAAC,eAAe,EAAE,MAAM,KAAK,IAAI,CAAC;IAC3D,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,kBAAkB,CAAC,EAAE,CAAC,WAAW,EAAE,MAAM,KAAK,IAAI,CAAC;IACnD,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,YAAY,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;CACvC;AAED,wBAAgB,gBAAgB,CAAC,EAC/B,MAAM,EACN,gBAAgB,EAChB,MAAM,EACN,eAAe,EACf,WAAW,EACX,IAAI,GACL,EAAE,qBAAqB,2CA+CvB"}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { jsx as _jsx, jsxs as _jsxs } from "react/jsx-runtime";
|
|
2
|
+
import { useState } from "react";
|
|
3
|
+
import Balance from "./balance.js";
|
|
4
|
+
export function PixelverseEditor({ apiKey, onVideoGenerated, prompt, negative_prompt, template_id, seed, }) {
|
|
5
|
+
const [videoUrl, setVideoUrl] = useState("");
|
|
6
|
+
const [videoId, setVideoId] = useState(null);
|
|
7
|
+
const [selectedFile, setSelectedFile] = useState(null);
|
|
8
|
+
const [loading, setLoading] = useState(false);
|
|
9
|
+
const [videoStatus, setVideoStatus] = useState(0);
|
|
10
|
+
const handleFileChange = (e) => {
|
|
11
|
+
if (e.target.files?.[0]) {
|
|
12
|
+
setSelectedFile(e.target.files[0]);
|
|
13
|
+
handleUploadProcess();
|
|
14
|
+
}
|
|
15
|
+
};
|
|
16
|
+
const handleRegenerate = () => {
|
|
17
|
+
};
|
|
18
|
+
const handleUploadProcess = async () => {
|
|
19
|
+
};
|
|
20
|
+
return (_jsxs("div", { children: [_jsx(Balance, { apiKey: apiKey }), videoStatus === 0 && !selectedFile && (_jsxs("div", { children: [_jsx("p", { children: "Drag/Drop or Click HERE to upload" }), _jsx("input", { title: "Upload Image", type: "file", accept: ".jpg,.jpeg,.png,.webp", onChange: handleFileChange })] })), loading && _jsx("div", { children: "Loading..." }), videoStatus === 0 && selectedFile && (_jsx("button", { onClick: handleUploadProcess, children: "Start Upload" })), videoStatus === 1 && (_jsxs("div", { children: [_jsx("video", { src: videoUrl, controls: true }), _jsx("button", { onClick: handleRegenerate, children: "Regenerate" })] }))] }));
|
|
21
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { AIPlatform } from "./index.js";
|
|
2
|
+
export interface OpenAIPlatformProps {
|
|
3
|
+
openaiAPIKey: string;
|
|
4
|
+
openaiProjectKey?: string;
|
|
5
|
+
openaiOrgID?: string;
|
|
6
|
+
}
|
|
7
|
+
export declare function OpenAIPlatform(userId: string, props: OpenAIPlatformProps): Promise<AIPlatform>;
|
|
8
|
+
//# sourceMappingURL=openai.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../src/platform/openai.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EACV,UAAU,EAGX,MAAM,YAAY,CAAC;AAEpB,MAAM,WAAW,mBAAmB;IAClC,YAAY,EAAE,MAAM,CAAC;IACrB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAmBD,wBAAsB,cAAc,CAClC,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,mBAAmB,GACzB,OAAO,CAAC,UAAU,CAAC,CAsCrB"}
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import OpenAI from "openai";
|
|
2
|
+
function createCompletionData(type, model, requestor, durationMs) {
|
|
3
|
+
return {
|
|
4
|
+
partitionKey: requestor,
|
|
5
|
+
id: crypto.randomUUID(),
|
|
6
|
+
type,
|
|
7
|
+
model,
|
|
8
|
+
createdAt: new Date().toISOString(),
|
|
9
|
+
durationMs,
|
|
10
|
+
usage: {},
|
|
11
|
+
};
|
|
12
|
+
}
|
|
13
|
+
export async function OpenAIPlatform(userId, props) {
|
|
14
|
+
const apiKey = props.openaiAPIKey.trim();
|
|
15
|
+
if (!apiKey) {
|
|
16
|
+
throw new Error("openaiAPIKey is required.");
|
|
17
|
+
}
|
|
18
|
+
const openai = new OpenAI({
|
|
19
|
+
apiKey,
|
|
20
|
+
project: props.openaiProjectKey?.trim() || undefined,
|
|
21
|
+
organization: props.openaiOrgID?.trim() || undefined,
|
|
22
|
+
dangerouslyAllowBrowser: false,
|
|
23
|
+
});
|
|
24
|
+
void openai;
|
|
25
|
+
const notImplemented = (operation) => {
|
|
26
|
+
return Promise.reject(new Error(`OpenAIPlatform "${operation}" is not implemented yet.`));
|
|
27
|
+
};
|
|
28
|
+
const checkBalance = async (requestorId) => {
|
|
29
|
+
const base = createCompletionData("balanceCompletion", "", requestorId, 0);
|
|
30
|
+
return Promise.resolve({ ...base, balance: 0.0 });
|
|
31
|
+
};
|
|
32
|
+
const currentBalance = (await checkBalance(userId)).balance;
|
|
33
|
+
return {
|
|
34
|
+
chatWithAI: async () => notImplemented("chatWithAI"),
|
|
35
|
+
synthesizeSpeech: async () => notImplemented("synthesizeSpeech"),
|
|
36
|
+
transcribeSpeech: async () => notImplemented("transcribeSpeech"),
|
|
37
|
+
generateImage: async () => notImplemented("generateImage"),
|
|
38
|
+
produceVideo: async () => notImplemented("produceVideo"),
|
|
39
|
+
generateModel: async () => notImplemented("generateModel"),
|
|
40
|
+
checkBalance,
|
|
41
|
+
currentBalance,
|
|
42
|
+
};
|
|
43
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import type { AIPlatform } from "./index.js";
|
|
2
|
+
export interface PixelVersePlatformProps {
|
|
3
|
+
pixelVerseAPIKey: string;
|
|
4
|
+
}
|
|
5
|
+
export declare function PixelVersePlatform(userId: string, props: PixelVersePlatformProps): Promise<AIPlatform>;
|
|
6
|
+
//# sourceMappingURL=pixelverse.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pixelverse.d.ts","sourceRoot":"","sources":["../../src/platform/pixelverse.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EACV,UAAU,EAQX,MAAM,YAAY,CAAC;AAmBpB,MAAM,WAAW,uBAAuB;IACtC,gBAAgB,EAAE,MAAM,CAAC;CAC1B;AAED,wBAAsB,kBAAkB,CACtC,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,uBAAuB,GAC7B,OAAO,CAAC,UAAU,CAAC,CA8QrB"}
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
import { v4 as uuidv4 } from "uuid";
|
|
2
|
+
import { performance } from "perf_hooks";
|
|
3
|
+
import { useState } from "react";
|
|
4
|
+
export async function PixelVersePlatform(userId, props) {
|
|
5
|
+
async function uploadImage(image, apiKey) {
|
|
6
|
+
const headers = new Headers();
|
|
7
|
+
headers.append("API-KEY", apiKey);
|
|
8
|
+
headers.append("Ai-trace-id", uuidv4());
|
|
9
|
+
headers.append("Access-Control-Allow-Origin", "*");
|
|
10
|
+
const formData = new FormData();
|
|
11
|
+
if (image instanceof File) {
|
|
12
|
+
formData.append("image", image, "");
|
|
13
|
+
}
|
|
14
|
+
else {
|
|
15
|
+
const blob = await fetch(image.toString()).then((r) => r.blob());
|
|
16
|
+
formData.append("image", blob, "image-from-url");
|
|
17
|
+
}
|
|
18
|
+
// pixelapi is proxied through the vite.config.ts file
|
|
19
|
+
// to avoid CORS issues and to allow for local development
|
|
20
|
+
const response = await fetch("/pixelapi/openapi/v2/image/upload", {
|
|
21
|
+
method: "POST",
|
|
22
|
+
headers,
|
|
23
|
+
body: formData,
|
|
24
|
+
redirect: "follow",
|
|
25
|
+
});
|
|
26
|
+
const data = (await response.json());
|
|
27
|
+
return data;
|
|
28
|
+
}
|
|
29
|
+
async function generateVideo(imgId, prompt, apiKey, seed, template_id, negative_prompt) {
|
|
30
|
+
const headers = new Headers();
|
|
31
|
+
headers.append("API-KEY", apiKey);
|
|
32
|
+
headers.append("Ai-trace-id", uuidv4());
|
|
33
|
+
headers.append("Content-Type", "application/json");
|
|
34
|
+
headers.append("Access-Control-Allow-Origin", "*");
|
|
35
|
+
headers.append("Accept", "application/json");
|
|
36
|
+
const values = {
|
|
37
|
+
duration: 5,
|
|
38
|
+
img_id: imgId,
|
|
39
|
+
model: "v3.5",
|
|
40
|
+
motion_mode: "normal",
|
|
41
|
+
prompt: prompt,
|
|
42
|
+
quality: "720p",
|
|
43
|
+
water_mark: false,
|
|
44
|
+
};
|
|
45
|
+
if (seed) {
|
|
46
|
+
values.seed = seed;
|
|
47
|
+
}
|
|
48
|
+
if (template_id) {
|
|
49
|
+
values.template_id = template_id;
|
|
50
|
+
}
|
|
51
|
+
if (negative_prompt) {
|
|
52
|
+
values.negative_prompt = negative_prompt;
|
|
53
|
+
}
|
|
54
|
+
const body = JSON.stringify(values);
|
|
55
|
+
// pixelapi is proxied through the vite.config.ts file
|
|
56
|
+
// to avoid CORS issues and to allow for local development
|
|
57
|
+
const response = await fetch("/pixelapi/openapi/v2/video/img/generate", {
|
|
58
|
+
method: "POST",
|
|
59
|
+
headers: headers,
|
|
60
|
+
referrerPolicy: "no-referrer",
|
|
61
|
+
body,
|
|
62
|
+
});
|
|
63
|
+
const data = (await response.json());
|
|
64
|
+
return data;
|
|
65
|
+
}
|
|
66
|
+
async function checkVideoStatus(id, apiKey) {
|
|
67
|
+
const headers = new Headers();
|
|
68
|
+
headers.append("API-KEY", apiKey);
|
|
69
|
+
headers.append("Ai-trace-id", uuidv4());
|
|
70
|
+
headers.append("Access-Control-Allow-Origin", "*");
|
|
71
|
+
headers.append("Accept", "application/json");
|
|
72
|
+
// pixelapi is proxied through the vite.config.ts file
|
|
73
|
+
// to avoid CORS issues and to allow for local development
|
|
74
|
+
const response = await fetch(`/pixelapi/openapi/v2/video/result/${id}`, {
|
|
75
|
+
method: "GET",
|
|
76
|
+
headers,
|
|
77
|
+
referrerPolicy: "no-referrer",
|
|
78
|
+
});
|
|
79
|
+
const data = (await response.json());
|
|
80
|
+
return data;
|
|
81
|
+
}
|
|
82
|
+
function baseCompletionData(type, model, requestor, duration) {
|
|
83
|
+
return {
|
|
84
|
+
partitionKey: requestor,
|
|
85
|
+
id: crypto.randomUUID(),
|
|
86
|
+
type,
|
|
87
|
+
model,
|
|
88
|
+
createdAt: new Date().toISOString(),
|
|
89
|
+
durationMs: duration,
|
|
90
|
+
usage: {},
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
const chatWithAI = (_userId, _input, _context, _model) => {
|
|
94
|
+
void [_userId, _input, _context, _model];
|
|
95
|
+
return Promise.reject(new Error("Not implemented"));
|
|
96
|
+
};
|
|
97
|
+
const synthesizeSpeech = (_userId, _input, _voice, _context, _model) => {
|
|
98
|
+
void [_userId, _input, _voice, _context, _model];
|
|
99
|
+
return Promise.reject(new Error("Not implemented"));
|
|
100
|
+
};
|
|
101
|
+
const transcribeSpeech = (_userId, _input, _context, _model) => {
|
|
102
|
+
void [_userId, _input, _context, _model];
|
|
103
|
+
return Promise.reject(new Error("Not implemented"));
|
|
104
|
+
};
|
|
105
|
+
const generateImage = (_userId, _input, _context, _model) => {
|
|
106
|
+
void [_userId, _input, _context, _model];
|
|
107
|
+
return Promise.reject(new Error("Not implemented"));
|
|
108
|
+
};
|
|
109
|
+
const produceVideo = (userId, input, image, context, model) => {
|
|
110
|
+
const start = performance.now();
|
|
111
|
+
return uploadImage(image, props.pixelVerseAPIKey)
|
|
112
|
+
.then((uploadResult) => {
|
|
113
|
+
const imageId = uploadResult?.Resp?.id;
|
|
114
|
+
if (!imageId)
|
|
115
|
+
throw new Error("Invalid image upload response.");
|
|
116
|
+
return generateVideo(imageId, input, props.pixelVerseAPIKey);
|
|
117
|
+
})
|
|
118
|
+
.then((generated) => {
|
|
119
|
+
const videoId = generated?.Resp?.id;
|
|
120
|
+
if (!videoId)
|
|
121
|
+
throw new Error("Video generation did not return a valid ID.");
|
|
122
|
+
return waitForVideoCompletion(videoId, props.pixelVerseAPIKey);
|
|
123
|
+
})
|
|
124
|
+
.then((videoUrl) => {
|
|
125
|
+
const duration = performance.now() - start;
|
|
126
|
+
const base = baseCompletionData("video", model, userId, duration);
|
|
127
|
+
return {
|
|
128
|
+
...base,
|
|
129
|
+
url: new URL(videoUrl),
|
|
130
|
+
};
|
|
131
|
+
})
|
|
132
|
+
.catch((err) => {
|
|
133
|
+
// Optional: log or re-throw error for upstream handling
|
|
134
|
+
throw new Error(`produceVideo failed: ${err.message}`);
|
|
135
|
+
});
|
|
136
|
+
};
|
|
137
|
+
async function waitForVideoCompletion(videoId, apiKey, maxRetries = 20, delayMs = 3000) {
|
|
138
|
+
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
|
139
|
+
await new Promise((res) => setTimeout(res, delayMs));
|
|
140
|
+
try {
|
|
141
|
+
const videoCheck = await checkVideoStatus(videoId, apiKey);
|
|
142
|
+
if (videoCheck?.Resp?.status === 1) {
|
|
143
|
+
const url = videoCheck?.Resp?.url;
|
|
144
|
+
if (!url)
|
|
145
|
+
throw new Error("Video marked complete but no URL returned.");
|
|
146
|
+
return url;
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
catch (err) {
|
|
150
|
+
console.warn(`Attempt ${attempt + 1} failed: ${err.message}`);
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
throw new Error("Timed out waiting for video to complete.");
|
|
154
|
+
}
|
|
155
|
+
const checkBalance = (userId) => {
|
|
156
|
+
const start = performance.now();
|
|
157
|
+
const headers = new Headers();
|
|
158
|
+
headers.append("API-KEY", props.pixelVerseAPIKey);
|
|
159
|
+
headers.append("AI-trace-ID", uuidv4());
|
|
160
|
+
headers.append("Access-Control-Allow-Origin", "*");
|
|
161
|
+
headers.append("Accept", "application/json");
|
|
162
|
+
headers.append("Content-Type", "application/json");
|
|
163
|
+
return fetch("/pixelapi/openapi/v2/account/balance", {
|
|
164
|
+
method: "GET",
|
|
165
|
+
headers,
|
|
166
|
+
referrerPolicy: "no-referrer",
|
|
167
|
+
})
|
|
168
|
+
.then(async (res) => (await res.json()))
|
|
169
|
+
.then((data) => {
|
|
170
|
+
if (!data?.Resp) {
|
|
171
|
+
throw new Error("Invalid balance response");
|
|
172
|
+
}
|
|
173
|
+
const duration = performance.now() - start;
|
|
174
|
+
const base = baseCompletionData("balanceCompletion", "", userId, duration);
|
|
175
|
+
const monthly = data.Resp.credit_monthly ?? 0;
|
|
176
|
+
const pkg = data.Resp.credit_package ?? 0;
|
|
177
|
+
return {
|
|
178
|
+
...base,
|
|
179
|
+
balance: monthly + pkg,
|
|
180
|
+
};
|
|
181
|
+
})
|
|
182
|
+
.catch((err) => {
|
|
183
|
+
throw new Error(`checkBalance failed: ${err.message}`);
|
|
184
|
+
});
|
|
185
|
+
};
|
|
186
|
+
const [currentBalance] = useState((await checkBalance(userId)).balance ?? 0);
|
|
187
|
+
return {
|
|
188
|
+
chatWithAI,
|
|
189
|
+
synthesizeSpeech,
|
|
190
|
+
transcribeSpeech,
|
|
191
|
+
generateImage,
|
|
192
|
+
produceVideo,
|
|
193
|
+
checkBalance,
|
|
194
|
+
currentBalance,
|
|
195
|
+
};
|
|
196
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
interface PixelverseEditorProps {
|
|
2
|
+
apiKey: string;
|
|
3
|
+
onVideoGenerated?: (videoUrl: string) => void;
|
|
4
|
+
onImageUpload?: (imageUrl: string) => void;
|
|
5
|
+
prompt?: string;
|
|
6
|
+
onPromptChange?: (prompt: string) => void;
|
|
7
|
+
negative_prompt?: string;
|
|
8
|
+
onNegativePromptChange?: (negative_prompt: string) => void;
|
|
9
|
+
template_id?: string;
|
|
10
|
+
onTemplateIdChange?: (template_id: string) => void;
|
|
11
|
+
seed?: number;
|
|
12
|
+
onSeedChange?: (seed: number) => void;
|
|
13
|
+
}
|
|
14
|
+
export declare function PixelverseEditor({ apiKey, onVideoGenerated, prompt, negative_prompt, template_id, seed, }: PixelverseEditorProps): import("react/jsx-runtime").JSX.Element;
|
|
15
|
+
export {};
|
|
16
|
+
//# sourceMappingURL=pixelverseeditor.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pixelverseeditor.d.ts","sourceRoot":"","sources":["../../../src/components/pixelverse/pixelverseeditor.tsx"],"names":[],"mappings":"AAKA,UAAU,qBAAqB;IAC7B,MAAM,EAAE,MAAM,CAAC;IACf,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAC9C,aAAa,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAC3C,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,IAAI,CAAC;IAC1C,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,sBAAsB,CAAC,EAAE,CAAC,eAAe,EAAE,MAAM,KAAK,IAAI,CAAC;IAC3D,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,kBAAkB,CAAC,EAAE,CAAC,WAAW,EAAE,MAAM,KAAK,IAAI,CAAC;IACnD,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,YAAY,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;CACvC;AAED,wBAAgB,gBAAgB,CAAC,EAC/B,MAAM,EACN,gBAAgB,EAChB,MAAM,EACN,eAAe,EACf,WAAW,EACX,IAAI,GACL,EAAE,qBAAqB,2CA+CvB"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.PixelverseEditor = PixelverseEditor;
|
|
7
|
+
const jsx_runtime_1 = require("react/jsx-runtime");
|
|
8
|
+
const react_1 = require("react");
|
|
9
|
+
const balance_js_1 = __importDefault(require("./balance.js"));
|
|
10
|
+
function PixelverseEditor({ apiKey, onVideoGenerated, prompt, negative_prompt, template_id, seed, }) {
|
|
11
|
+
const [videoUrl, setVideoUrl] = (0, react_1.useState)("");
|
|
12
|
+
const [videoId, setVideoId] = (0, react_1.useState)(null);
|
|
13
|
+
const [selectedFile, setSelectedFile] = (0, react_1.useState)(null);
|
|
14
|
+
const [loading, setLoading] = (0, react_1.useState)(false);
|
|
15
|
+
const [videoStatus, setVideoStatus] = (0, react_1.useState)(0);
|
|
16
|
+
const handleFileChange = (e) => {
|
|
17
|
+
if (e.target.files?.[0]) {
|
|
18
|
+
setSelectedFile(e.target.files[0]);
|
|
19
|
+
handleUploadProcess();
|
|
20
|
+
}
|
|
21
|
+
};
|
|
22
|
+
const handleRegenerate = () => {
|
|
23
|
+
};
|
|
24
|
+
const handleUploadProcess = async () => {
|
|
25
|
+
};
|
|
26
|
+
return ((0, jsx_runtime_1.jsxs)("div", { children: [(0, jsx_runtime_1.jsx)(balance_js_1.default, { apiKey: apiKey }), videoStatus === 0 && !selectedFile && ((0, jsx_runtime_1.jsxs)("div", { children: [(0, jsx_runtime_1.jsx)("p", { children: "Drag/Drop or Click HERE to upload" }), (0, jsx_runtime_1.jsx)("input", { title: "Upload Image", type: "file", accept: ".jpg,.jpeg,.png,.webp", onChange: handleFileChange })] })), loading && (0, jsx_runtime_1.jsx)("div", { children: "Loading..." }), videoStatus === 0 && selectedFile && ((0, jsx_runtime_1.jsx)("button", { onClick: handleUploadProcess, children: "Start Upload" })), videoStatus === 1 && ((0, jsx_runtime_1.jsxs)("div", { children: [(0, jsx_runtime_1.jsx)("video", { src: videoUrl, controls: true }), (0, jsx_runtime_1.jsx)("button", { onClick: handleRegenerate, children: "Regenerate" })] }))] }));
|
|
27
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { AIPlatform } from "./index.js";
|
|
2
|
+
export interface OpenAIPlatformProps {
|
|
3
|
+
openaiAPIKey: string;
|
|
4
|
+
openaiProjectKey?: string;
|
|
5
|
+
openaiOrgID?: string;
|
|
6
|
+
}
|
|
7
|
+
export declare function OpenAIPlatform(userId: string, props: OpenAIPlatformProps): Promise<AIPlatform>;
|
|
8
|
+
//# sourceMappingURL=openai.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../src/platform/openai.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EACV,UAAU,EAGX,MAAM,YAAY,CAAC;AAEpB,MAAM,WAAW,mBAAmB;IAClC,YAAY,EAAE,MAAM,CAAC;IACrB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAmBD,wBAAsB,cAAc,CAClC,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,mBAAmB,GACzB,OAAO,CAAC,UAAU,CAAC,CAsCrB"}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.OpenAIPlatform = OpenAIPlatform;
|
|
7
|
+
const openai_1 = __importDefault(require("openai"));
|
|
8
|
+
function createCompletionData(type, model, requestor, durationMs) {
|
|
9
|
+
return {
|
|
10
|
+
partitionKey: requestor,
|
|
11
|
+
id: crypto.randomUUID(),
|
|
12
|
+
type,
|
|
13
|
+
model,
|
|
14
|
+
createdAt: new Date().toISOString(),
|
|
15
|
+
durationMs,
|
|
16
|
+
usage: {},
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
async function OpenAIPlatform(userId, props) {
|
|
20
|
+
const apiKey = props.openaiAPIKey.trim();
|
|
21
|
+
if (!apiKey) {
|
|
22
|
+
throw new Error("openaiAPIKey is required.");
|
|
23
|
+
}
|
|
24
|
+
const openai = new openai_1.default({
|
|
25
|
+
apiKey,
|
|
26
|
+
project: props.openaiProjectKey?.trim() || undefined,
|
|
27
|
+
organization: props.openaiOrgID?.trim() || undefined,
|
|
28
|
+
dangerouslyAllowBrowser: false,
|
|
29
|
+
});
|
|
30
|
+
void openai;
|
|
31
|
+
const notImplemented = (operation) => {
|
|
32
|
+
return Promise.reject(new Error(`OpenAIPlatform "${operation}" is not implemented yet.`));
|
|
33
|
+
};
|
|
34
|
+
const checkBalance = async (requestorId) => {
|
|
35
|
+
const base = createCompletionData("balanceCompletion", "", requestorId, 0);
|
|
36
|
+
return Promise.resolve({ ...base, balance: 0.0 });
|
|
37
|
+
};
|
|
38
|
+
const currentBalance = (await checkBalance(userId)).balance;
|
|
39
|
+
return {
|
|
40
|
+
chatWithAI: async () => notImplemented("chatWithAI"),
|
|
41
|
+
synthesizeSpeech: async () => notImplemented("synthesizeSpeech"),
|
|
42
|
+
transcribeSpeech: async () => notImplemented("transcribeSpeech"),
|
|
43
|
+
generateImage: async () => notImplemented("generateImage"),
|
|
44
|
+
produceVideo: async () => notImplemented("produceVideo"),
|
|
45
|
+
generateModel: async () => notImplemented("generateModel"),
|
|
46
|
+
checkBalance,
|
|
47
|
+
currentBalance,
|
|
48
|
+
};
|
|
49
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import type { AIPlatform } from "./index.js";
|
|
2
|
+
export interface PixelVersePlatformProps {
|
|
3
|
+
pixelVerseAPIKey: string;
|
|
4
|
+
}
|
|
5
|
+
export declare function PixelVersePlatform(userId: string, props: PixelVersePlatformProps): Promise<AIPlatform>;
|
|
6
|
+
//# sourceMappingURL=pixelverse.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pixelverse.d.ts","sourceRoot":"","sources":["../../src/platform/pixelverse.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EACV,UAAU,EAQX,MAAM,YAAY,CAAC;AAmBpB,MAAM,WAAW,uBAAuB;IACtC,gBAAgB,EAAE,MAAM,CAAC;CAC1B;AAED,wBAAsB,kBAAkB,CACtC,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,uBAAuB,GAC7B,OAAO,CAAC,UAAU,CAAC,CA8QrB"}
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.PixelVersePlatform = PixelVersePlatform;
|
|
4
|
+
const uuid_1 = require("uuid");
|
|
5
|
+
const perf_hooks_1 = require("perf_hooks");
|
|
6
|
+
const react_1 = require("react");
|
|
7
|
+
async function PixelVersePlatform(userId, props) {
|
|
8
|
+
async function uploadImage(image, apiKey) {
|
|
9
|
+
const headers = new Headers();
|
|
10
|
+
headers.append("API-KEY", apiKey);
|
|
11
|
+
headers.append("Ai-trace-id", (0, uuid_1.v4)());
|
|
12
|
+
headers.append("Access-Control-Allow-Origin", "*");
|
|
13
|
+
const formData = new FormData();
|
|
14
|
+
if (image instanceof File) {
|
|
15
|
+
formData.append("image", image, "");
|
|
16
|
+
}
|
|
17
|
+
else {
|
|
18
|
+
const blob = await fetch(image.toString()).then((r) => r.blob());
|
|
19
|
+
formData.append("image", blob, "image-from-url");
|
|
20
|
+
}
|
|
21
|
+
// pixelapi is proxied through the vite.config.ts file
|
|
22
|
+
// to avoid CORS issues and to allow for local development
|
|
23
|
+
const response = await fetch("/pixelapi/openapi/v2/image/upload", {
|
|
24
|
+
method: "POST",
|
|
25
|
+
headers,
|
|
26
|
+
body: formData,
|
|
27
|
+
redirect: "follow",
|
|
28
|
+
});
|
|
29
|
+
const data = (await response.json());
|
|
30
|
+
return data;
|
|
31
|
+
}
|
|
32
|
+
async function generateVideo(imgId, prompt, apiKey, seed, template_id, negative_prompt) {
|
|
33
|
+
const headers = new Headers();
|
|
34
|
+
headers.append("API-KEY", apiKey);
|
|
35
|
+
headers.append("Ai-trace-id", (0, uuid_1.v4)());
|
|
36
|
+
headers.append("Content-Type", "application/json");
|
|
37
|
+
headers.append("Access-Control-Allow-Origin", "*");
|
|
38
|
+
headers.append("Accept", "application/json");
|
|
39
|
+
const values = {
|
|
40
|
+
duration: 5,
|
|
41
|
+
img_id: imgId,
|
|
42
|
+
model: "v3.5",
|
|
43
|
+
motion_mode: "normal",
|
|
44
|
+
prompt: prompt,
|
|
45
|
+
quality: "720p",
|
|
46
|
+
water_mark: false,
|
|
47
|
+
};
|
|
48
|
+
if (seed) {
|
|
49
|
+
values.seed = seed;
|
|
50
|
+
}
|
|
51
|
+
if (template_id) {
|
|
52
|
+
values.template_id = template_id;
|
|
53
|
+
}
|
|
54
|
+
if (negative_prompt) {
|
|
55
|
+
values.negative_prompt = negative_prompt;
|
|
56
|
+
}
|
|
57
|
+
const body = JSON.stringify(values);
|
|
58
|
+
// pixelapi is proxied through the vite.config.ts file
|
|
59
|
+
// to avoid CORS issues and to allow for local development
|
|
60
|
+
const response = await fetch("/pixelapi/openapi/v2/video/img/generate", {
|
|
61
|
+
method: "POST",
|
|
62
|
+
headers: headers,
|
|
63
|
+
referrerPolicy: "no-referrer",
|
|
64
|
+
body,
|
|
65
|
+
});
|
|
66
|
+
const data = (await response.json());
|
|
67
|
+
return data;
|
|
68
|
+
}
|
|
69
|
+
async function checkVideoStatus(id, apiKey) {
|
|
70
|
+
const headers = new Headers();
|
|
71
|
+
headers.append("API-KEY", apiKey);
|
|
72
|
+
headers.append("Ai-trace-id", (0, uuid_1.v4)());
|
|
73
|
+
headers.append("Access-Control-Allow-Origin", "*");
|
|
74
|
+
headers.append("Accept", "application/json");
|
|
75
|
+
// pixelapi is proxied through the vite.config.ts file
|
|
76
|
+
// to avoid CORS issues and to allow for local development
|
|
77
|
+
const response = await fetch(`/pixelapi/openapi/v2/video/result/${id}`, {
|
|
78
|
+
method: "GET",
|
|
79
|
+
headers,
|
|
80
|
+
referrerPolicy: "no-referrer",
|
|
81
|
+
});
|
|
82
|
+
const data = (await response.json());
|
|
83
|
+
return data;
|
|
84
|
+
}
|
|
85
|
+
function baseCompletionData(type, model, requestor, duration) {
|
|
86
|
+
return {
|
|
87
|
+
partitionKey: requestor,
|
|
88
|
+
id: crypto.randomUUID(),
|
|
89
|
+
type,
|
|
90
|
+
model,
|
|
91
|
+
createdAt: new Date().toISOString(),
|
|
92
|
+
durationMs: duration,
|
|
93
|
+
usage: {},
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
const chatWithAI = (_userId, _input, _context, _model) => {
|
|
97
|
+
void [_userId, _input, _context, _model];
|
|
98
|
+
return Promise.reject(new Error("Not implemented"));
|
|
99
|
+
};
|
|
100
|
+
const synthesizeSpeech = (_userId, _input, _voice, _context, _model) => {
|
|
101
|
+
void [_userId, _input, _voice, _context, _model];
|
|
102
|
+
return Promise.reject(new Error("Not implemented"));
|
|
103
|
+
};
|
|
104
|
+
const transcribeSpeech = (_userId, _input, _context, _model) => {
|
|
105
|
+
void [_userId, _input, _context, _model];
|
|
106
|
+
return Promise.reject(new Error("Not implemented"));
|
|
107
|
+
};
|
|
108
|
+
const generateImage = (_userId, _input, _context, _model) => {
|
|
109
|
+
void [_userId, _input, _context, _model];
|
|
110
|
+
return Promise.reject(new Error("Not implemented"));
|
|
111
|
+
};
|
|
112
|
+
const produceVideo = (userId, input, image, context, model) => {
|
|
113
|
+
const start = perf_hooks_1.performance.now();
|
|
114
|
+
return uploadImage(image, props.pixelVerseAPIKey)
|
|
115
|
+
.then((uploadResult) => {
|
|
116
|
+
const imageId = uploadResult?.Resp?.id;
|
|
117
|
+
if (!imageId)
|
|
118
|
+
throw new Error("Invalid image upload response.");
|
|
119
|
+
return generateVideo(imageId, input, props.pixelVerseAPIKey);
|
|
120
|
+
})
|
|
121
|
+
.then((generated) => {
|
|
122
|
+
const videoId = generated?.Resp?.id;
|
|
123
|
+
if (!videoId)
|
|
124
|
+
throw new Error("Video generation did not return a valid ID.");
|
|
125
|
+
return waitForVideoCompletion(videoId, props.pixelVerseAPIKey);
|
|
126
|
+
})
|
|
127
|
+
.then((videoUrl) => {
|
|
128
|
+
const duration = perf_hooks_1.performance.now() - start;
|
|
129
|
+
const base = baseCompletionData("video", model, userId, duration);
|
|
130
|
+
return {
|
|
131
|
+
...base,
|
|
132
|
+
url: new URL(videoUrl),
|
|
133
|
+
};
|
|
134
|
+
})
|
|
135
|
+
.catch((err) => {
|
|
136
|
+
// Optional: log or re-throw error for upstream handling
|
|
137
|
+
throw new Error(`produceVideo failed: ${err.message}`);
|
|
138
|
+
});
|
|
139
|
+
};
|
|
140
|
+
async function waitForVideoCompletion(videoId, apiKey, maxRetries = 20, delayMs = 3000) {
|
|
141
|
+
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
|
142
|
+
await new Promise((res) => setTimeout(res, delayMs));
|
|
143
|
+
try {
|
|
144
|
+
const videoCheck = await checkVideoStatus(videoId, apiKey);
|
|
145
|
+
if (videoCheck?.Resp?.status === 1) {
|
|
146
|
+
const url = videoCheck?.Resp?.url;
|
|
147
|
+
if (!url)
|
|
148
|
+
throw new Error("Video marked complete but no URL returned.");
|
|
149
|
+
return url;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
catch (err) {
|
|
153
|
+
console.warn(`Attempt ${attempt + 1} failed: ${err.message}`);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
throw new Error("Timed out waiting for video to complete.");
|
|
157
|
+
}
|
|
158
|
+
const checkBalance = (userId) => {
|
|
159
|
+
const start = perf_hooks_1.performance.now();
|
|
160
|
+
const headers = new Headers();
|
|
161
|
+
headers.append("API-KEY", props.pixelVerseAPIKey);
|
|
162
|
+
headers.append("AI-trace-ID", (0, uuid_1.v4)());
|
|
163
|
+
headers.append("Access-Control-Allow-Origin", "*");
|
|
164
|
+
headers.append("Accept", "application/json");
|
|
165
|
+
headers.append("Content-Type", "application/json");
|
|
166
|
+
return fetch("/pixelapi/openapi/v2/account/balance", {
|
|
167
|
+
method: "GET",
|
|
168
|
+
headers,
|
|
169
|
+
referrerPolicy: "no-referrer",
|
|
170
|
+
})
|
|
171
|
+
.then(async (res) => (await res.json()))
|
|
172
|
+
.then((data) => {
|
|
173
|
+
if (!data?.Resp) {
|
|
174
|
+
throw new Error("Invalid balance response");
|
|
175
|
+
}
|
|
176
|
+
const duration = perf_hooks_1.performance.now() - start;
|
|
177
|
+
const base = baseCompletionData("balanceCompletion", "", userId, duration);
|
|
178
|
+
const monthly = data.Resp.credit_monthly ?? 0;
|
|
179
|
+
const pkg = data.Resp.credit_package ?? 0;
|
|
180
|
+
return {
|
|
181
|
+
...base,
|
|
182
|
+
balance: monthly + pkg,
|
|
183
|
+
};
|
|
184
|
+
})
|
|
185
|
+
.catch((err) => {
|
|
186
|
+
throw new Error(`checkBalance failed: ${err.message}`);
|
|
187
|
+
});
|
|
188
|
+
};
|
|
189
|
+
const [currentBalance] = (0, react_1.useState)((await checkBalance(userId)).balance ?? 0);
|
|
190
|
+
return {
|
|
191
|
+
chatWithAI,
|
|
192
|
+
synthesizeSpeech,
|
|
193
|
+
transcribeSpeech,
|
|
194
|
+
generateImage,
|
|
195
|
+
produceVideo,
|
|
196
|
+
checkBalance,
|
|
197
|
+
currentBalance,
|
|
198
|
+
};
|
|
199
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
# ADR-0004: Dual ESM and CJS Runtime Compatibility
|
|
2
|
+
|
|
3
|
+
- Date: 2026-03-01
|
|
4
|
+
- Status: Accepted
|
|
5
|
+
|
|
6
|
+
## Context
|
|
7
|
+
|
|
8
|
+
`@plasius/ai` publishes dual ESM and CJS entry points. This package currently emits CJS output under `dist-cjs/*.js` while the repository root uses `type: module`. Without an explicit CommonJS boundary for `dist-cjs`, Node can interpret those files as ESM and fail at runtime for `require(...)` consumers.
|
|
9
|
+
|
|
10
|
+
## Decision
|
|
11
|
+
|
|
12
|
+
Keep dual output and enforce a runtime-compatible CJS boundary by:
|
|
13
|
+
|
|
14
|
+
- generating `dist-cjs/package.json` with `{ "type": "commonjs" }` as part of `build:cjs`;
|
|
15
|
+
- validating this metadata in `pack:check`;
|
|
16
|
+
- ensuring the packed artifact includes `dist-cjs/package.json`.
|
|
17
|
+
|
|
18
|
+
## Consequences
|
|
19
|
+
|
|
20
|
+
- Node CommonJS consumers can reliably load `@plasius/ai` via `require(...)`.
|
|
21
|
+
- ESM consumers remain unchanged.
|
|
22
|
+
- CD publish checks fail fast if CJS runtime compatibility metadata is missing.
|
package/docs/adrs/index.md
CHANGED
|
@@ -3,3 +3,4 @@
|
|
|
3
3
|
- [ADR-0001: Standalone @plasius/ai Package Scope](./adr-0001-ai-package-scope.md)
|
|
4
4
|
- [ADR-0002: Public Repository Governance Baseline](./adr-0002-public-repo-governance.md)
|
|
5
5
|
- [ADR-0003: Contracts-First Documentation Baseline](./adr-0003-contracts-first-documentation.md)
|
|
6
|
+
- [ADR-0004: Dual ESM and CJS Runtime Compatibility](./adr-0004-dual-esm-cjs-runtime-compatibility.md)
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@plasius/ai",
|
|
3
|
-
"version": "1.1.
|
|
3
|
+
"version": "1.1.11",
|
|
4
4
|
"description": "Plasius AI functions providing chatbot, text-to-speech, speech-to-text, and AI-generated images and videos",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"chatbot",
|
|
@@ -29,7 +29,7 @@
|
|
|
29
29
|
"audit:npm": "npm audit --audit-level=high --omit=dev",
|
|
30
30
|
"audit:test": "vitest run --coverage",
|
|
31
31
|
"audit:all": "npm-run-all -l audit:ts audit:eslint audit:deps audit:npm audit:test",
|
|
32
|
-
"build:cjs": "tsc -p tsconfig.json --module commonjs --moduleResolution node --outDir dist-cjs --tsBuildInfoFile dist-cjs/tsconfig.tsbuildinfo",
|
|
32
|
+
"build:cjs": "tsc -p tsconfig.json --module commonjs --moduleResolution node --outDir dist-cjs --tsBuildInfoFile dist-cjs/tsconfig.tsbuildinfo && node scripts/write-cjs-package-json.cjs",
|
|
33
33
|
"lint": "eslint . --max-warnings=0",
|
|
34
34
|
"prepare": "npm run build",
|
|
35
35
|
"demo:run": "npm run build && node demo/example.mjs",
|
|
@@ -40,8 +40,8 @@
|
|
|
40
40
|
"license": "MIT",
|
|
41
41
|
"dependencies": {
|
|
42
42
|
"@plasius/entity-manager": "^1.0.6",
|
|
43
|
-
"@plasius/error": "^1.0.
|
|
44
|
-
"@plasius/profile": "^1.0.
|
|
43
|
+
"@plasius/error": "^1.0.6",
|
|
44
|
+
"@plasius/profile": "^1.0.9",
|
|
45
45
|
"@plasius/schema": "^1.2.2"
|
|
46
46
|
},
|
|
47
47
|
"peerDependencies": {
|