@plasius/ai 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. package/CHANGELOG.md +72 -0
  2. package/CODE_OF_CONDUCT.md +79 -0
  3. package/CONTRIBUTORS.md +27 -0
  4. package/LICENSE +21 -0
  5. package/README.md +132 -0
  6. package/SECURITY.md +17 -0
  7. package/dist/components/pixelverse/balance.d.ts +4 -0
  8. package/dist/components/pixelverse/balance.d.ts.map +1 -0
  9. package/dist/components/pixelverse/balance.js +40 -0
  10. package/dist/components/pixelverse/index.d.ts +3 -0
  11. package/dist/components/pixelverse/index.d.ts.map +1 -0
  12. package/dist/components/pixelverse/index.js +2 -0
  13. package/dist/components/pixelverse/pixelverseeditor.d.ts +16 -0
  14. package/dist/components/pixelverse/pixelverseeditor.d.ts.map +1 -0
  15. package/dist/components/pixelverse/pixelverseeditor.js +21 -0
  16. package/dist/index.d.ts +2 -0
  17. package/dist/index.d.ts.map +1 -0
  18. package/dist/index.js +1 -0
  19. package/dist/lib/chatWithAI.d.ts +2 -0
  20. package/dist/lib/chatWithAI.d.ts.map +1 -0
  21. package/dist/lib/chatWithAI.js +1 -0
  22. package/dist/lib/generateImage.d.ts +2 -0
  23. package/dist/lib/generateImage.d.ts.map +1 -0
  24. package/dist/lib/generateImage.js +1 -0
  25. package/dist/lib/synthesizeSpeech.d.ts +2 -0
  26. package/dist/lib/synthesizeSpeech.d.ts.map +1 -0
  27. package/dist/lib/synthesizeSpeech.js +1 -0
  28. package/dist/lib/transcribeSpeech.d.ts +2 -0
  29. package/dist/lib/transcribeSpeech.d.ts.map +1 -0
  30. package/dist/lib/transcribeSpeech.js +1 -0
  31. package/dist/platform/index.d.ts +76 -0
  32. package/dist/platform/index.d.ts.map +1 -0
  33. package/dist/platform/index.js +125 -0
  34. package/dist/platform/openai.d.ts +8 -0
  35. package/dist/platform/openai.d.ts.map +1 -0
  36. package/dist/platform/openai.js +61 -0
  37. package/dist/platform/pixelverse.d.ts +6 -0
  38. package/dist/platform/pixelverse.d.ts.map +1 -0
  39. package/dist/platform/pixelverse.js +196 -0
  40. package/dist-cjs/components/pixelverse/balance.d.ts +4 -0
  41. package/dist-cjs/components/pixelverse/balance.d.ts.map +1 -0
  42. package/dist-cjs/components/pixelverse/balance.js +46 -0
  43. package/dist-cjs/components/pixelverse/index.d.ts +3 -0
  44. package/dist-cjs/components/pixelverse/index.d.ts.map +1 -0
  45. package/dist-cjs/components/pixelverse/index.js +18 -0
  46. package/dist-cjs/components/pixelverse/pixelverseeditor.d.ts +16 -0
  47. package/dist-cjs/components/pixelverse/pixelverseeditor.d.ts.map +1 -0
  48. package/dist-cjs/components/pixelverse/pixelverseeditor.js +27 -0
  49. package/dist-cjs/index.d.ts +2 -0
  50. package/dist-cjs/index.d.ts.map +1 -0
  51. package/dist-cjs/index.js +17 -0
  52. package/dist-cjs/lib/chatWithAI.d.ts +1 -0
  53. package/dist-cjs/lib/chatWithAI.d.ts.map +1 -0
  54. package/dist-cjs/lib/chatWithAI.js +1 -0
  55. package/dist-cjs/lib/generateImage.d.ts +1 -0
  56. package/dist-cjs/lib/generateImage.d.ts.map +1 -0
  57. package/dist-cjs/lib/generateImage.js +1 -0
  58. package/dist-cjs/lib/synthesizeSpeech.d.ts +1 -0
  59. package/dist-cjs/lib/synthesizeSpeech.d.ts.map +1 -0
  60. package/dist-cjs/lib/synthesizeSpeech.js +1 -0
  61. package/dist-cjs/lib/transcribeSpeech.d.ts +1 -0
  62. package/dist-cjs/lib/transcribeSpeech.d.ts.map +1 -0
  63. package/dist-cjs/lib/transcribeSpeech.js +1 -0
  64. package/dist-cjs/platform/index.d.ts +76 -0
  65. package/dist-cjs/platform/index.d.ts.map +1 -0
  66. package/dist-cjs/platform/index.js +128 -0
  67. package/dist-cjs/platform/openai.d.ts +8 -0
  68. package/dist-cjs/platform/openai.d.ts.map +1 -0
  69. package/dist-cjs/platform/openai.js +67 -0
  70. package/dist-cjs/platform/pixelverse.d.ts +6 -0
  71. package/dist-cjs/platform/pixelverse.d.ts.map +1 -0
  72. package/dist-cjs/platform/pixelverse.js +199 -0
  73. package/docs/adrs/adr-0001-ai-package-scope.md +21 -0
  74. package/docs/adrs/adr-0002-public-repo-governance.md +24 -0
  75. package/docs/adrs/adr-0003-contracts-first-documentation.md +25 -0
  76. package/docs/adrs/adr-template.md +35 -0
  77. package/docs/api-reference.md +64 -0
  78. package/docs/architecture.md +21 -0
  79. package/docs/providers.md +26 -0
  80. package/legal/CLA-REGISTRY.csv +1 -0
  81. package/legal/CLA.md +22 -0
  82. package/legal/CORPORATE_CLA.md +57 -0
  83. package/legal/INDIVIDUAL_CLA.md +91 -0
  84. package/package.json +117 -0
  85. package/src/components/pixelverse/balance.module.css +6 -0
  86. package/src/components/pixelverse/balance.tsx +65 -0
  87. package/src/components/pixelverse/index.ts +2 -0
  88. package/src/components/pixelverse/pixelverseeditor.mocule.css +0 -0
  89. package/src/components/pixelverse/pixelverseeditor.tsx +74 -0
  90. package/src/global.d.ts +9 -0
  91. package/src/index.ts +1 -0
  92. package/src/lib/chatWithAI.ts +0 -0
  93. package/src/lib/generateImage.ts +0 -0
  94. package/src/lib/synthesizeSpeech.ts +0 -0
  95. package/src/lib/transcribeSpeech.ts +0 -0
  96. package/src/platform/index.ts +237 -0
  97. package/src/platform/openai.ts +123 -0
  98. package/src/platform/pixelverse.ts +309 -0
@@ -0,0 +1,76 @@
1
+ import type { FieldBuilder } from "@plasius/schema";
2
+ export declare const completionSchema: import("@plasius/schema").Schema<{
3
+ id: FieldBuilder<string, string>;
4
+ type: FieldBuilder<string, string>;
5
+ model: FieldBuilder<string, string>;
6
+ durationMs: FieldBuilder<number, number>;
7
+ createdAt: FieldBuilder<string, string>;
8
+ partitionKey: FieldBuilder<string, string>;
9
+ usage: FieldBuilder<Record<string, number>, Record<string, FieldBuilder<number, number>>>;
10
+ }>;
11
+ export interface Completion {
12
+ id: string;
13
+ partitionKey: string;
14
+ type: string;
15
+ model: string;
16
+ durationMs: number;
17
+ createdAt: string;
18
+ usage?: Record<string, number>;
19
+ }
20
+ export declare const chatCompletionSchema: import("@plasius/schema").Schema<{
21
+ message: FieldBuilder<string, string>;
22
+ outputUser: FieldBuilder<string, string>;
23
+ }>;
24
+ export interface ChatCompletion extends Completion {
25
+ message: string;
26
+ outputUser: string;
27
+ }
28
+ export declare const textCompletionSchema: import("@plasius/schema").Schema<{
29
+ message: FieldBuilder<string, string>;
30
+ }>;
31
+ export interface TextCompletion extends Completion {
32
+ message: string;
33
+ }
34
+ export declare const imageCompletionSchema: import("@plasius/schema").Schema<{
35
+ url: FieldBuilder<URL, string>;
36
+ }>;
37
+ export interface ImageCompletion extends Completion {
38
+ url: URL;
39
+ }
40
+ export declare const speechCompletionSchema: import("@plasius/schema").Schema<{
41
+ url: FieldBuilder<URL, string>;
42
+ }>;
43
+ export interface SpeechCompletion extends Completion {
44
+ url: URL;
45
+ }
46
+ export declare const videoCompletionSchema: import("@plasius/schema").Schema<{
47
+ url: FieldBuilder<URL, string>;
48
+ }>;
49
+ export interface VideoCompletion extends Completion {
50
+ url: URL;
51
+ }
52
+ export declare const balanceCompletionSchema: import("@plasius/schema").Schema<{
53
+ balance: FieldBuilder<number, number>;
54
+ }>;
55
+ export interface BalanceCompletion extends Completion {
56
+ balance: number;
57
+ }
58
+ export declare enum AICapability {
59
+ Chat = 0,
60
+ Text = 1,
61
+ Speech = 2,
62
+ Image = 3,
63
+ Video = 4,
64
+ Balance = 5
65
+ }
66
+ export interface AIPlatform {
67
+ canHandle?: (userId: string, capabilities: AICapability[]) => Promise<boolean>;
68
+ chatWithAI: (userId: string, input: string, context: string, model: string) => Promise<ChatCompletion>;
69
+ synthesizeSpeech: (userId: string, input: string, voice: string, context: string, model: string) => Promise<SpeechCompletion>;
70
+ transcribeSpeech: (userId: string, input: Buffer, context: string, model: string) => Promise<TextCompletion>;
71
+ generateImage: (userId: string, input: string, context: string, model: string) => Promise<ImageCompletion>;
72
+ produceVideo: (userId: string, imput: string, image: URL, context: string, model: string) => Promise<VideoCompletion>;
73
+ checkBalance: (userId: string) => Promise<BalanceCompletion>;
74
+ currentBalance: number;
75
+ }
76
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/platform/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,YAAY,EAAe,MAAM,iBAAiB,CAAC;AAEjE,eAAO,MAAM,gBAAgB;;;;;;;;EA0C5B,CAAC;AAEF,MAAM,WAAW,UAAU;IACzB,EAAE,EAAE,MAAM,CAAC;IACX,YAAY,EAAE,MAAM,CAAC;IACrB,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAChC;AAED,eAAO,MAAM,oBAAoB;;;EAmBhC,CAAC;AAEF,MAAM,WAAW,cAAe,SAAQ,UAAU;IAChD,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,EAAE,MAAM,CAAC;CACpB;AAED,eAAO,MAAM,oBAAoB;;EAehC,CAAC;AAEF,MAAM,WAAW,cAAe,SAAQ,UAAU;IAChD,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,eAAO,MAAM,qBAAqB;;EAgBjC,CAAC;AAEF,MAAM,WAAW,eAAgB,SAAQ,UAAU;IACjD,GAAG,EAAE,GAAG,CAAC;CACV;AAED,eAAO,MAAM,sBAAsB;;EAgBlC,CAAC;AAEF,MAAM,WAAW,gBAAiB,SAAQ,UAAU;IAClD,GAAG,EAAE,GAAG,CAAC;CACV;AAED,eAAO,MAAM,qBAAqB;;EAgBjC,CAAC;AAEF,MAAM,WAAW,eAAgB,SAAQ,UAAU;IACjD,GAAG,EAAE,GAAG,CAAC;CACV;AAED,eAAO,MAAM,uBAAuB;;EAYnC,CAAC;AAEF,MAAM,WAAW,iBAAkB,SAAQ,UAAU;IACnD,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,oBAAY,YAAY;IACtB,IAAI,IAAA;IACJ,IAAI,IAAA;IACJ,MAAM,IAAA;IACN,KAAK,IAAA;IACL,KAAK,IAAA;IACL,OAAO,IAAA;CACR;AAED,MAAM,WAAW,UAAU;IACzB,SAAS,CAAC,EAAE,CACV,MAAM,EAAE,MAAM,EACd,YAAY,EAAE,YAAY,EAAE,KACzB,OAAO,CAAC,OAAO,CAAC,CAAC;IACtB,UAAU,EAAE,CACV,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,MAAM,EACf,KAAK,EAAE,MAAM,KACV,OAAO,CAAC,cAAc,CAAC,CAAC;IAC7B,gBAAgB,EAAE,CAChB,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,MAAM,EACb,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,MAAM,EACf,KAAK,EAAE,MAAM,KACV,OAAO,CAAC,gBAAgB,CAAC,CAAC;IAC/B,gBAAgB,EAAE,CAChB,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,MAAM,EACf,KAAK,EAAE,MAAM,KACV,OAAO,CAAC,cAAc,CAAC,CAAC;IAC7B,aAAa,EAAE,CACb,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,MAAM,EACf,KAAK,EAAE,MAAM,KACV,OAAO,CAAC,eAAe,CAAC,CAAC;IAC9B,YAAY,EAAE,CACZ,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,MAAM,EACb,KAAK,EAAE,GAAG,EACV,OAAO,EAAE,MAAM,EACf,KAAK,EAAE,MAAM,KACV,OAAO,CAAC,eAAe,CAAC,CAAC;IAC9B,YAAY,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,OAAO,CAAC,iBAAiB,CAAC,CAAC;IAC7D,cAAc,EAAE,MAAM,CAAC;CACxB"}
@@ -0,0 +1,128 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.AICapability = exports.balanceCompletionSchema = exports.videoCompletionSchema = exports.speechCompletionSchema = exports.imageCompletionSchema = exports.textCompletionSchema = exports.chatCompletionSchema = exports.completionSchema = void 0;
4
+ const schema_1 = require("@plasius/schema");
5
+ exports.completionSchema = (0, schema_1.createSchema)({
6
+ id: schema_1.field
7
+ .string()
8
+ .description("A unique ID for this completion")
9
+ .version("1.0"),
10
+ type: schema_1.field
11
+ .string()
12
+ .description("The type of completion (e.g. 'chat', 'text', 'speech')")
13
+ .version("1.0"),
14
+ model: schema_1.field
15
+ .string()
16
+ .description("The model used to generate this completion")
17
+ .version("1.0"),
18
+ durationMs: schema_1.field
19
+ .number()
20
+ .description("How long the AI task took in milliseconds")
21
+ .version("1.0"),
22
+ createdAt: schema_1.field
23
+ .string()
24
+ .description("ISO timestamp when the completion was created")
25
+ .version("1.0"),
26
+ partitionKey: schema_1.field
27
+ .string()
28
+ .description("User or system identifier that made the request")
29
+ .version("1.0"),
30
+ usage: schema_1.field
31
+ .object({})
32
+ .description("Optional usage metrics like token count or cost")
33
+ .version("1.0")
34
+ .optional()
35
+ .as(),
36
+ }, "completion", {
37
+ version: "1.0",
38
+ piiEnforcement: "none",
39
+ table: "completions",
40
+ schemaValidator: () => {
41
+ return true;
42
+ },
43
+ });
44
+ exports.chatCompletionSchema = (0, schema_1.createSchema)({
45
+ message: schema_1.field
46
+ .string()
47
+ .description("The response from the AI")
48
+ .version("1.0"),
49
+ outputUser: schema_1.field
50
+ .string()
51
+ .description("The 'actor' who is chatting")
52
+ .version("1.0"),
53
+ }, "chatCompletion", {
54
+ version: "1.0",
55
+ table: "completions",
56
+ schemaValidator: () => {
57
+ return true;
58
+ },
59
+ });
60
+ exports.textCompletionSchema = (0, schema_1.createSchema)({
61
+ message: schema_1.field
62
+ .string()
63
+ .description("The response from the AI")
64
+ .version("1.0"),
65
+ }, "textCompletion", {
66
+ version: "1.0",
67
+ table: "completions",
68
+ schemaValidator: () => {
69
+ return true;
70
+ },
71
+ });
72
+ exports.imageCompletionSchema = (0, schema_1.createSchema)({
73
+ url: schema_1.field
74
+ .string()
75
+ .description("The response from the AI")
76
+ .version("1.0")
77
+ .as(),
78
+ }, "imageCompletion", {
79
+ version: "1.0",
80
+ table: "completions",
81
+ schemaValidator: () => {
82
+ return true;
83
+ },
84
+ });
85
+ exports.speechCompletionSchema = (0, schema_1.createSchema)({
86
+ url: schema_1.field
87
+ .string()
88
+ .description("The response from the AI")
89
+ .version("1.0")
90
+ .as(),
91
+ }, "speechCompletion", {
92
+ version: "1.0",
93
+ table: "completions",
94
+ schemaValidator: () => {
95
+ return true;
96
+ },
97
+ });
98
+ exports.videoCompletionSchema = (0, schema_1.createSchema)({
99
+ url: schema_1.field
100
+ .string()
101
+ .description("The response from the AI")
102
+ .version("1.0")
103
+ .as(),
104
+ }, "videoCompletion", {
105
+ version: "1.0",
106
+ table: "completions",
107
+ schemaValidator: () => {
108
+ return true;
109
+ },
110
+ });
111
+ exports.balanceCompletionSchema = (0, schema_1.createSchema)({
112
+ balance: schema_1.field.number().description("Current balance").version("1.0"),
113
+ }, "balanceCompletion", {
114
+ version: "1.0",
115
+ table: "completions",
116
+ schemaValidator: () => {
117
+ return true;
118
+ },
119
+ });
120
+ var AICapability;
121
+ (function (AICapability) {
122
+ AICapability[AICapability["Chat"] = 0] = "Chat";
123
+ AICapability[AICapability["Text"] = 1] = "Text";
124
+ AICapability[AICapability["Speech"] = 2] = "Speech";
125
+ AICapability[AICapability["Image"] = 3] = "Image";
126
+ AICapability[AICapability["Video"] = 4] = "Video";
127
+ AICapability[AICapability["Balance"] = 5] = "Balance";
128
+ })(AICapability || (exports.AICapability = AICapability = {}));
@@ -0,0 +1,8 @@
1
+ import type { AIPlatform } from "./index.js";
2
+ export interface OpenAIPlatformProps {
3
+ openaiAPIKey: string;
4
+ openaiProjectKey: string;
5
+ openaiOrgID: string;
6
+ }
7
+ export declare function OpenAIPlatform(userId: string, props: OpenAIPlatformProps): Promise<AIPlatform>;
8
+ //# sourceMappingURL=openai.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../src/platform/openai.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EACV,UAAU,EAQX,MAAM,YAAY,CAAC;AAGpB,MAAM,WAAW,mBAAmB;IAClC,YAAY,EAAE,MAAM,CAAC;IACrB,gBAAgB,EAAE,MAAM,CAAC;IACzB,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,wBAAsB,cAAc,CAClC,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,mBAAmB,GACzB,OAAO,CAAC,UAAU,CAAC,CAoGrB"}
@@ -0,0 +1,67 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.OpenAIPlatform = OpenAIPlatform;
7
+ const openai_1 = __importDefault(require("openai"));
8
+ const react_1 = require("react");
9
+ async function OpenAIPlatform(userId, props) {
10
+ const openai = new openai_1.default({
11
+ apiKey: props.openaiAPIKey,
12
+ project: props.openaiProjectKey,
13
+ organization: props.openaiOrgID,
14
+ dangerouslyAllowBrowser: false,
15
+ });
16
+ void openai;
17
+ function baseCompletionData(type, model, requestor, duration) {
18
+ return {
19
+ partitionKey: requestor,
20
+ id: crypto.randomUUID(),
21
+ type,
22
+ model,
23
+ createdAt: new Date().toISOString(),
24
+ durationMs: duration,
25
+ usage: {},
26
+ };
27
+ }
28
+ const chatWithAI = (userId, input, context, model) => {
29
+ void [input, context, model];
30
+ const base = baseCompletionData("chat", "model", userId, 0);
31
+ return Promise.resolve({ ...base, message: "Something", outputUser: "" });
32
+ };
33
+ const synthesizeSpeech = (userId, input, voice, context, model) => {
34
+ void [input, voice, context, model];
35
+ const base = baseCompletionData("chat", "model", userId, 0);
36
+ return Promise.resolve({ ...base, url: new URL("Something") });
37
+ };
38
+ const transcribeSpeech = (userId, input, context, model) => {
39
+ void [input, context, model];
40
+ const base = baseCompletionData("chat", "model", userId, 0);
41
+ return Promise.resolve({ ...base, message: "Something" });
42
+ };
43
+ const generateImage = (userId, input, context, model) => {
44
+ void [input, context, model];
45
+ const base = baseCompletionData("chat", "model", userId, 0);
46
+ return Promise.resolve({ ...base, url: new URL("Something") });
47
+ };
48
+ const produceVideo = (userId, imput, image, context, model) => {
49
+ void [imput, image, context, model];
50
+ const base = baseCompletionData("chat", "model", userId, 0);
51
+ return Promise.resolve({ ...base, url: new URL("Something") });
52
+ };
53
+ const checkBalance = (userId) => {
54
+ const base = baseCompletionData("balanceCompletion", "", userId, 0);
55
+ return Promise.resolve({ ...base, balance: 0.0 });
56
+ };
57
+ const [currentBalance] = (0, react_1.useState)((await checkBalance(userId)).balance);
58
+ return {
59
+ chatWithAI,
60
+ synthesizeSpeech,
61
+ transcribeSpeech,
62
+ generateImage,
63
+ produceVideo,
64
+ checkBalance,
65
+ currentBalance,
66
+ };
67
+ }
@@ -0,0 +1,6 @@
1
+ import type { AIPlatform } from "./index.js";
2
+ export interface PixelVersePlatformProps {
3
+ pixelVerseAPIKey: string;
4
+ }
5
+ export declare function PixelVersePlatform(userId: string, props: PixelVersePlatformProps): Promise<AIPlatform>;
6
+ //# sourceMappingURL=pixelverse.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pixelverse.d.ts","sourceRoot":"","sources":["../../src/platform/pixelverse.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EACV,UAAU,EAQX,MAAM,YAAY,CAAC;AAmBpB,MAAM,WAAW,uBAAuB;IACtC,gBAAgB,EAAE,MAAM,CAAC;CAC1B;AAED,wBAAsB,kBAAkB,CACtC,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,uBAAuB,GAC7B,OAAO,CAAC,UAAU,CAAC,CA8QrB"}
@@ -0,0 +1,199 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.PixelVersePlatform = PixelVersePlatform;
4
+ const uuid_1 = require("uuid");
5
+ const perf_hooks_1 = require("perf_hooks");
6
+ const react_1 = require("react");
7
+ async function PixelVersePlatform(userId, props) {
8
+ async function uploadImage(image, apiKey) {
9
+ const headers = new Headers();
10
+ headers.append("API-KEY", apiKey);
11
+ headers.append("Ai-trace-id", (0, uuid_1.v4)());
12
+ headers.append("Access-Control-Allow-Origin", "*");
13
+ const formData = new FormData();
14
+ if (image instanceof File) {
15
+ formData.append("image", image, "");
16
+ }
17
+ else {
18
+ const blob = await fetch(image.toString()).then((r) => r.blob());
19
+ formData.append("image", blob, "image-from-url");
20
+ }
21
+ // pixelapi is proxied through the vite.config.ts file
22
+ // to avoid CORS issues and to allow for local development
23
+ const response = await fetch("/pixelapi/openapi/v2/image/upload", {
24
+ method: "POST",
25
+ headers,
26
+ body: formData,
27
+ redirect: "follow",
28
+ });
29
+ const data = (await response.json());
30
+ return data;
31
+ }
32
+ async function generateVideo(imgId, prompt, apiKey, seed, template_id, negative_prompt) {
33
+ const headers = new Headers();
34
+ headers.append("API-KEY", apiKey);
35
+ headers.append("Ai-trace-id", (0, uuid_1.v4)());
36
+ headers.append("Content-Type", "application/json");
37
+ headers.append("Access-Control-Allow-Origin", "*");
38
+ headers.append("Accept", "application/json");
39
+ const values = {
40
+ duration: 5,
41
+ img_id: imgId,
42
+ model: "v3.5",
43
+ motion_mode: "normal",
44
+ prompt: prompt,
45
+ quality: "720p",
46
+ water_mark: false,
47
+ };
48
+ if (seed) {
49
+ values.seed = seed;
50
+ }
51
+ if (template_id) {
52
+ values.template_id = template_id;
53
+ }
54
+ if (negative_prompt) {
55
+ values.negative_prompt = negative_prompt;
56
+ }
57
+ const body = JSON.stringify(values);
58
+ // pixelapi is proxied through the vite.config.ts file
59
+ // to avoid CORS issues and to allow for local development
60
+ const response = await fetch("/pixelapi/openapi/v2/video/img/generate", {
61
+ method: "POST",
62
+ headers: headers,
63
+ referrerPolicy: "no-referrer",
64
+ body,
65
+ });
66
+ const data = (await response.json());
67
+ return data;
68
+ }
69
+ async function checkVideoStatus(id, apiKey) {
70
+ const headers = new Headers();
71
+ headers.append("API-KEY", apiKey);
72
+ headers.append("Ai-trace-id", (0, uuid_1.v4)());
73
+ headers.append("Access-Control-Allow-Origin", "*");
74
+ headers.append("Accept", "application/json");
75
+ // pixelapi is proxied through the vite.config.ts file
76
+ // to avoid CORS issues and to allow for local development
77
+ const response = await fetch(`/pixelapi/openapi/v2/video/result/${id}`, {
78
+ method: "GET",
79
+ headers,
80
+ referrerPolicy: "no-referrer",
81
+ });
82
+ const data = (await response.json());
83
+ return data;
84
+ }
85
+ function baseCompletionData(type, model, requestor, duration) {
86
+ return {
87
+ partitionKey: requestor,
88
+ id: crypto.randomUUID(),
89
+ type,
90
+ model,
91
+ createdAt: new Date().toISOString(),
92
+ durationMs: duration,
93
+ usage: {},
94
+ };
95
+ }
96
+ const chatWithAI = (_userId, _input, _context, _model) => {
97
+ void [_userId, _input, _context, _model];
98
+ return Promise.reject(new Error("Not implemented"));
99
+ };
100
+ const synthesizeSpeech = (_userId, _input, _voice, _context, _model) => {
101
+ void [_userId, _input, _voice, _context, _model];
102
+ return Promise.reject(new Error("Not implemented"));
103
+ };
104
+ const transcribeSpeech = (_userId, _input, _context, _model) => {
105
+ void [_userId, _input, _context, _model];
106
+ return Promise.reject(new Error("Not implemented"));
107
+ };
108
+ const generateImage = (_userId, _input, _context, _model) => {
109
+ void [_userId, _input, _context, _model];
110
+ return Promise.reject(new Error("Not implemented"));
111
+ };
112
+ const produceVideo = (userId, input, image, context, model) => {
113
+ const start = perf_hooks_1.performance.now();
114
+ return uploadImage(image, props.pixelVerseAPIKey)
115
+ .then((uploadResult) => {
116
+ const imageId = uploadResult?.Resp?.id;
117
+ if (!imageId)
118
+ throw new Error("Invalid image upload response.");
119
+ return generateVideo(imageId, input, props.pixelVerseAPIKey);
120
+ })
121
+ .then((generated) => {
122
+ const videoId = generated?.Resp?.id;
123
+ if (!videoId)
124
+ throw new Error("Video generation did not return a valid ID.");
125
+ return waitForVideoCompletion(videoId, props.pixelVerseAPIKey);
126
+ })
127
+ .then((videoUrl) => {
128
+ const duration = perf_hooks_1.performance.now() - start;
129
+ const base = baseCompletionData("video", model, userId, duration);
130
+ return {
131
+ ...base,
132
+ url: new URL(videoUrl),
133
+ };
134
+ })
135
+ .catch((err) => {
136
+ // Optional: log or re-throw error for upstream handling
137
+ throw new Error(`produceVideo failed: ${err.message}`);
138
+ });
139
+ };
140
+ async function waitForVideoCompletion(videoId, apiKey, maxRetries = 20, delayMs = 3000) {
141
+ for (let attempt = 0; attempt < maxRetries; attempt++) {
142
+ await new Promise((res) => setTimeout(res, delayMs));
143
+ try {
144
+ const videoCheck = await checkVideoStatus(videoId, apiKey);
145
+ if (videoCheck?.Resp?.status === 1) {
146
+ const url = videoCheck?.Resp?.url;
147
+ if (!url)
148
+ throw new Error("Video marked complete but no URL returned.");
149
+ return url;
150
+ }
151
+ }
152
+ catch (err) {
153
+ console.warn(`Attempt ${attempt + 1} failed: ${err.message}`);
154
+ }
155
+ }
156
+ throw new Error("Timed out waiting for video to complete.");
157
+ }
158
+ const checkBalance = (userId) => {
159
+ const start = perf_hooks_1.performance.now();
160
+ const headers = new Headers();
161
+ headers.append("API-KEY", props.pixelVerseAPIKey);
162
+ headers.append("AI-trace-ID", (0, uuid_1.v4)());
163
+ headers.append("Access-Control-Allow-Origin", "*");
164
+ headers.append("Accept", "application/json");
165
+ headers.append("Content-Type", "application/json");
166
+ return fetch("/pixelapi/openapi/v2/account/balance", {
167
+ method: "GET",
168
+ headers,
169
+ referrerPolicy: "no-referrer",
170
+ })
171
+ .then(async (res) => (await res.json()))
172
+ .then((data) => {
173
+ if (!data?.Resp) {
174
+ throw new Error("Invalid balance response");
175
+ }
176
+ const duration = perf_hooks_1.performance.now() - start;
177
+ const base = baseCompletionData("balanceCompletion", "", userId, duration);
178
+ const monthly = data.Resp.credit_monthly ?? 0;
179
+ const pkg = data.Resp.credit_package ?? 0;
180
+ return {
181
+ ...base,
182
+ balance: monthly + pkg,
183
+ };
184
+ })
185
+ .catch((err) => {
186
+ throw new Error(`checkBalance failed: ${err.message}`);
187
+ });
188
+ };
189
+ const [currentBalance] = (0, react_1.useState)((await checkBalance(userId)).balance ?? 0);
190
+ return {
191
+ chatWithAI,
192
+ synthesizeSpeech,
193
+ transcribeSpeech,
194
+ generateImage,
195
+ produceVideo,
196
+ checkBalance,
197
+ currentBalance,
198
+ };
199
+ }
@@ -0,0 +1,21 @@
1
+ # ADR-0001: Standalone @plasius/ai Package Scope
2
+
3
+ - Date: 2026-02-11
4
+ - Status: Accepted
5
+
6
+ ## Context
7
+
8
+ This package was previously maintained as a workspace-only module inside
9
+ `plasius-ltd-site`. External consumers and remote builds require it to be
10
+ installable from npm without monorepo-local links.
11
+
12
+ ## Decision
13
+
14
+ Move `@plasius/ai` to a standalone root package with independent build,
15
+ test, governance, CI, and publish workflows.
16
+
17
+ ## Consequences
18
+
19
+ - The package can be versioned and released independently.
20
+ - `plasius-ltd-site` and other repositories can depend on npm-published versions.
21
+ - Build and lint rules must no longer rely on monorepo-relative tsconfig paths.
@@ -0,0 +1,24 @@
1
+ # ADR-0002: Public Repository Governance Baseline
2
+
3
+ - Date: 2026-02-11
4
+ - Status: Accepted
5
+
6
+ ## Context
7
+
8
+ Public npm distribution requires transparent contributor and security policy
9
+ artifacts and consistent release automation.
10
+
11
+ ## Decision
12
+
13
+ Include these baseline governance assets:
14
+
15
+ - `CODE_OF_CONDUCT.md`
16
+ - `CONTRIBUTORS.md`
17
+ - `SECURITY.md`
18
+ - `legal/` CLA documents
19
+ - CI/CD GitHub Actions workflows
20
+
21
+ ## Consequences
22
+
23
+ - Public contributors and consumers can follow a predictable governance process.
24
+ - Release quality gates (build, test, coverage, publish) are standardized.
@@ -0,0 +1,25 @@
1
+ # ADR-0003: Contracts-First Documentation Baseline
2
+
3
+ - Date: 2026-02-11
4
+ - Status: Accepted
5
+
6
+ ## Context
7
+
8
+ As `@plasius/ai` moves to public npm usage, consumers need clear guidance on
9
+ what is stable API versus what is provisional runtime code. Previous
10
+ documentation did not clearly separate these concerns.
11
+
12
+ ## Decision
13
+
14
+ Document `@plasius/ai` around the current stable contract surface:
15
+
16
+ - Publish architecture guidance for contracts-first usage.
17
+ - Provide explicit API reference for `AICapability`, `AIPlatform`, completion
18
+ models, and schemas.
19
+ - Add provider guidance for host-app adapter composition.
20
+ - Record known limitations and provisional runtime areas in README/docs.
21
+
22
+ ## Consequences
23
+
24
+ - Integrators can safely adopt stable exports without relying on unfinished internals.
25
+ - Migration to future provider packages can happen with less ambiguity.
@@ -0,0 +1,35 @@
1
+ # Architectural Decision Record (ADR)
2
+
3
+ ## Title
4
+
5
+ > Concise, descriptive title of the decision.
6
+
7
+ ## Status
8
+
9
+ - Proposed | Accepted | Rejected | Superseded | Deprecated
10
+ - Date: YYYY-MM-DD
11
+ - Version: 1.0
12
+
13
+ ## Context
14
+
15
+ Describe the problem, constraints, and relevant background.
16
+
17
+ ## Decision
18
+
19
+ Clear statement of the selected approach.
20
+
21
+ ## Alternatives Considered
22
+
23
+ - Option A
24
+ - Option B
25
+ - Option C
26
+
27
+ ## Consequences
28
+
29
+ - Positive outcomes
30
+ - Negative outcomes / trade-offs
31
+ - Follow-up work
32
+
33
+ ## References
34
+
35
+ - https://adr.github.io/
@@ -0,0 +1,64 @@
1
+ # API Reference
2
+
3
+ ## Enums
4
+
5
+ ### `AICapability`
6
+
7
+ Routing capability enum:
8
+
9
+ - `Chat`
10
+ - `Text`
11
+ - `Speech`
12
+ - `Image`
13
+ - `Video`
14
+ - `Balance`
15
+
16
+ ## Core Interfaces
17
+
18
+ ### `AIPlatform`
19
+
20
+ Contract for runtime adapters:
21
+
22
+ - `chatWithAI(userId, input, context, model)`
23
+ - `synthesizeSpeech(userId, input, voice, context, model)`
24
+ - `transcribeSpeech(userId, input, context, model)`
25
+ - `generateImage(userId, input, context, model)`
26
+ - `produceVideo(userId, input, image, context, model)`
27
+ - `checkBalance(userId)`
28
+ - `currentBalance`
29
+
30
+ Optional:
31
+
32
+ - `canHandle(userId, capabilities)`
33
+
34
+ ## Completion Models
35
+
36
+ Base type:
37
+
38
+ - `Completion`
39
+ - `id`
40
+ - `partitionKey`
41
+ - `type`
42
+ - `model`
43
+ - `durationMs`
44
+ - `createdAt`
45
+ - `usage?`
46
+
47
+ Specialized variants:
48
+
49
+ - `ChatCompletion`
50
+ - `TextCompletion`
51
+ - `ImageCompletion`
52
+ - `SpeechCompletion`
53
+ - `VideoCompletion`
54
+ - `BalanceCompletion`
55
+
56
+ ## Exported Schemas
57
+
58
+ - `completionSchema`
59
+ - `chatCompletionSchema`
60
+ - `textCompletionSchema`
61
+ - `imageCompletionSchema`
62
+ - `speechCompletionSchema`
63
+ - `videoCompletionSchema`
64
+ - `balanceCompletionSchema`