@elizaos/plugin-elizacloud 1.7.0-alpha.0 → 1.7.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -42,40 +42,13 @@ var __export = (target, all) => {
42
42
  // src/index.node.ts
43
43
  var exports_index_node = {};
44
44
  __export(exports_index_node, {
45
- worldTable: () => worldTable,
46
- taskTable: () => taskTable,
47
- serverTable: () => serverTable,
48
- serverAgentsTable: () => serverAgentsTable,
49
- roomTable: () => roomTable,
50
- relationshipTable: () => relationshipTable,
51
- pluginSql: () => import_node3.default,
52
- participantTable: () => participantTable,
53
- messageTable: () => messageTable,
54
- messageServerTable: () => messageServerTable,
55
- messageServerAgentsTable: () => messageServerAgentsTable,
56
- memoryTable: () => memoryTable,
57
- logTable: () => logTable,
58
- getCloudStorage: () => getCloudStorage,
59
- entityTable: () => entityTable,
60
- embeddingTable: () => embeddingTable,
61
45
  elizaOSCloudPlugin: () => elizaOSCloudPlugin,
62
- default: () => src_default,
63
- createDirectDatabaseAdapter: () => createDirectDatabaseAdapter,
64
- createDatabaseAdapter: () => createDatabaseAdapter,
65
- createCloudStorageService: () => createCloudStorageService,
66
- createCloudDatabaseAdapter: () => createCloudDatabaseAdapter,
67
- componentTable: () => componentTable,
68
- channelTable: () => channelTable,
69
- channelParticipantsTable: () => channelParticipantsTable,
70
- cacheTable: () => cacheTable,
71
- agentTable: () => agentTable,
72
- CloudStorageService: () => CloudStorageService,
73
- CloudDatabaseAdapter: () => CloudDatabaseAdapter
46
+ default: () => src_default
74
47
  });
75
48
  module.exports = __toCommonJS(exports_index_node);
76
49
 
77
50
  // src/index.ts
78
- var import_core15 = require("@elizaos/core");
51
+ var import_core10 = require("@elizaos/core");
79
52
 
80
53
  // src/init.ts
81
54
  var import_core2 = require("@elizaos/core");
@@ -83,7 +56,11 @@ var import_core2 = require("@elizaos/core");
83
56
  // src/utils/config.ts
84
57
  var import_core = require("@elizaos/core");
85
58
  function getSetting(runtime, key, defaultValue) {
86
- return runtime.getSetting(key) ?? process.env[key] ?? defaultValue;
59
+ const value = runtime.getSetting(key);
60
+ if (value !== undefined && value !== null) {
61
+ return String(value);
62
+ }
63
+ return process.env[key] ?? defaultValue;
87
64
  }
88
65
  function isBrowser() {
89
66
  return typeof globalThis !== "undefined" && typeof globalThis.document !== "undefined";
@@ -194,33 +171,36 @@ function createOpenAIClient(runtime) {
194
171
  // src/utils/events.ts
195
172
  var import_core3 = require("@elizaos/core");
196
173
  function emitModelUsageEvent(runtime, type, prompt, usage) {
174
+ const truncatedPrompt = typeof prompt === "string" ? prompt.length > 200 ? `${prompt.slice(0, 200)}…` : prompt : "";
175
+ const inputTokens = Number(usage.inputTokens || 0);
176
+ const outputTokens = Number(usage.outputTokens || 0);
177
+ const totalTokens = Number(usage.totalTokens != null ? usage.totalTokens : inputTokens + outputTokens);
197
178
  runtime.emitEvent(import_core3.EventType.MODEL_USED, {
198
- provider: "openai",
179
+ runtime,
180
+ source: "elizacloud",
181
+ provider: "elizacloud",
199
182
  type,
200
- prompt,
183
+ prompt: truncatedPrompt,
201
184
  tokens: {
202
- prompt: usage.inputTokens,
203
- completion: usage.outputTokens,
204
- total: usage.totalTokens
185
+ prompt: inputTokens,
186
+ completion: outputTokens,
187
+ total: totalTokens
205
188
  }
206
189
  });
207
190
  }
208
191
 
209
192
  // src/models/text.ts
210
- async function handleTextSmall(runtime, {
211
- prompt,
212
- stopSequences = [],
213
- maxTokens = 8192,
214
- temperature = 0.7,
215
- frequencyPenalty = 0.7,
216
- presencePenalty = 0.7
217
- }) {
193
+ function buildGenerateParams(runtime, modelType, params) {
194
+ const { prompt, stopSequences = [] } = params;
195
+ const temperature = params.temperature ?? 0.7;
196
+ const frequencyPenalty = params.frequencyPenalty ?? 0.7;
197
+ const presencePenalty = params.presencePenalty ?? 0.7;
198
+ const maxTokens = params.maxOutputTokens ?? params.maxTokens ?? 8192;
218
199
  const openai = createOpenAIClient(runtime);
219
- const modelName = getSmallModel(runtime);
200
+ const modelName = modelType === import_core4.ModelType.TEXT_SMALL ? getSmallModel(runtime) : getLargeModel(runtime);
201
+ const modelLabel = modelType === import_core4.ModelType.TEXT_SMALL ? "TEXT_SMALL" : "TEXT_LARGE";
220
202
  const experimentalTelemetry = getExperimentalTelemetry(runtime);
221
- import_core4.logger.log(`[ELIZAOS_CLOUD] Using TEXT_SMALL model: ${modelName}`);
222
- import_core4.logger.log(prompt);
223
- const { text: openaiResponse, usage } = await import_ai.generateText({
203
+ const generateParams = {
224
204
  model: openai.languageModel(modelName),
225
205
  prompt,
226
206
  system: runtime.character.system ?? undefined,
@@ -232,42 +212,50 @@ async function handleTextSmall(runtime, {
232
212
  experimental_telemetry: {
233
213
  isEnabled: experimentalTelemetry
234
214
  }
235
- });
236
- if (usage) {
237
- emitModelUsageEvent(runtime, import_core4.ModelType.TEXT_SMALL, prompt, usage);
238
- }
239
- return openaiResponse;
215
+ };
216
+ return { generateParams, modelName, modelLabel, prompt };
217
+ }
218
+ function handleStreamingGeneration(runtime, modelType, generateParams, prompt, modelLabel) {
219
+ import_core4.logger.debug(`[ELIZAOS_CLOUD] Streaming text with ${modelLabel} model`);
220
+ const streamResult = import_ai.streamText(generateParams);
221
+ return {
222
+ textStream: streamResult.textStream,
223
+ text: streamResult.text,
224
+ usage: streamResult.usage.then((usage) => {
225
+ if (usage) {
226
+ emitModelUsageEvent(runtime, modelType, prompt, usage);
227
+ const inputTokens = usage.inputTokens ?? 0;
228
+ const outputTokens = usage.outputTokens ?? 0;
229
+ return {
230
+ promptTokens: inputTokens,
231
+ completionTokens: outputTokens,
232
+ totalTokens: inputTokens + outputTokens
233
+ };
234
+ }
235
+ return;
236
+ }),
237
+ finishReason: streamResult.finishReason
238
+ };
240
239
  }
241
- async function handleTextLarge(runtime, {
242
- prompt,
243
- stopSequences = [],
244
- maxTokens = 8192,
245
- temperature = 0.7,
246
- frequencyPenalty = 0.7,
247
- presencePenalty = 0.7
248
- }) {
249
- const openai = createOpenAIClient(runtime);
250
- const modelName = getLargeModel(runtime);
251
- const experimentalTelemetry = getExperimentalTelemetry(runtime);
252
- import_core4.logger.log(`[ELIZAOS_CLOUD] Using TEXT_LARGE model: ${modelName}`);
240
+ async function generateTextWithModel(runtime, modelType, params) {
241
+ const { generateParams, modelName, modelLabel, prompt } = buildGenerateParams(runtime, modelType, params);
242
+ import_core4.logger.debug(`[ELIZAOS_CLOUD] Generating text with ${modelLabel} model: ${modelName}`);
243
+ if (params.stream) {
244
+ return handleStreamingGeneration(runtime, modelType, generateParams, prompt, modelLabel);
245
+ }
246
+ import_core4.logger.log(`[ELIZAOS_CLOUD] Using ${modelLabel} model: ${modelName}`);
253
247
  import_core4.logger.log(prompt);
254
- const { text: openaiResponse, usage } = await import_ai.generateText({
255
- model: openai.languageModel(modelName),
256
- prompt,
257
- system: runtime.character.system ?? undefined,
258
- temperature,
259
- maxOutputTokens: maxTokens,
260
- frequencyPenalty,
261
- presencePenalty,
262
- stopSequences,
263
- experimental_telemetry: {
264
- isEnabled: experimentalTelemetry
265
- }
266
- });
267
- if (usage) {
268
- emitModelUsageEvent(runtime, import_core4.ModelType.TEXT_LARGE, prompt, usage);
248
+ const response = await import_ai.generateText(generateParams);
249
+ if (response.usage) {
250
+ emitModelUsageEvent(runtime, modelType, prompt, response.usage);
269
251
  }
270
- return openaiResponse;
252
+ return response.text;
253
+ }
254
+ async function handleTextSmall(runtime, params) {
255
+ return generateTextWithModel(runtime, import_core4.ModelType.TEXT_SMALL, params);
256
+ }
257
+ async function handleTextLarge(runtime, params) {
258
+ return generateTextWithModel(runtime, import_core4.ModelType.TEXT_LARGE, params);
271
259
  }
272
260
  // src/models/object.ts
273
261
  var import_core6 = require("@elizaos/core");
@@ -292,31 +280,6 @@ function getJsonRepairFunction() {
292
280
  }
293
281
  };
294
282
  }
295
- function detectAudioMimeType(buffer) {
296
- if (buffer.length < 12) {
297
- return "application/octet-stream";
298
- }
299
- if (buffer[0] === 82 && buffer[1] === 73 && buffer[2] === 70 && buffer[3] === 70 && buffer[8] === 87 && buffer[9] === 65 && buffer[10] === 86 && buffer[11] === 69) {
300
- return "audio/wav";
301
- }
302
- if (buffer[0] === 73 && buffer[1] === 68 && buffer[2] === 51 || buffer[0] === 255 && (buffer[1] & 224) === 224) {
303
- return "audio/mpeg";
304
- }
305
- if (buffer[0] === 79 && buffer[1] === 103 && buffer[2] === 103 && buffer[3] === 83) {
306
- return "audio/ogg";
307
- }
308
- if (buffer[0] === 102 && buffer[1] === 76 && buffer[2] === 97 && buffer[3] === 67) {
309
- return "audio/flac";
310
- }
311
- if (buffer[4] === 102 && buffer[5] === 116 && buffer[6] === 121 && buffer[7] === 112) {
312
- return "audio/mp4";
313
- }
314
- if (buffer[0] === 26 && buffer[1] === 69 && buffer[2] === 223 && buffer[3] === 163) {
315
- return "audio/webm";
316
- }
317
- import_core5.logger.warn("Could not detect audio format from buffer, using generic binary type");
318
- return "application/octet-stream";
319
- }
320
283
  async function webStreamToNodeStream(webStream) {
321
284
  try {
322
285
  const { Readable } = await import("node:stream");
@@ -489,7 +452,7 @@ async function handleTextEmbedding(runtime, params) {
489
452
  // src/models/image.ts
490
453
  var import_core8 = require("@elizaos/core");
491
454
  async function handleImageGeneration(runtime, params) {
492
- const numImages = params.n || 1;
455
+ const numImages = params.count || 1;
493
456
  const size = params.size || "1024x1024";
494
457
  const prompt = params.prompt;
495
458
  const modelName = getImageGenerationModel(runtime);
@@ -585,10 +548,6 @@ async function handleImageDescription(runtime, params) {
585
548
  description: "No response from API"
586
549
  };
587
550
  }
588
- const isCustomPrompt = typeof params === "object" && params.prompt && params.prompt !== "Please analyze this image and provide a title and detailed description.";
589
- if (isCustomPrompt) {
590
- return content;
591
- }
592
551
  const processedResult = parseImageDescriptionResponse(content);
593
552
  return processedResult;
594
553
  } catch (error) {
@@ -600,89 +559,8 @@ async function handleImageDescription(runtime, params) {
600
559
  };
601
560
  }
602
561
  }
603
- // src/models/transcription.ts
604
- var import_core9 = require("@elizaos/core");
605
- async function handleTranscription(runtime, input) {
606
- let modelName = getSetting(runtime, "ELIZAOS_CLOUD_TRANSCRIPTION_MODEL", "gpt-4o-mini-transcribe");
607
- import_core9.logger.log(`[ELIZAOS_CLOUD] Using TRANSCRIPTION model: ${modelName}`);
608
- const baseURL = getBaseURL(runtime);
609
- let blob;
610
- let extraParams = null;
611
- if (input instanceof Blob || input instanceof File) {
612
- blob = input;
613
- } else if (Buffer.isBuffer(input)) {
614
- const detectedMimeType = detectAudioMimeType(input);
615
- import_core9.logger.debug(`Auto-detected audio MIME type: ${detectedMimeType}`);
616
- blob = new Blob([input], { type: detectedMimeType });
617
- } else if (typeof input === "object" && input !== null && "audio" in input && input.audio != null) {
618
- const params = input;
619
- if (!(params.audio instanceof Blob) && !(params.audio instanceof File) && !Buffer.isBuffer(params.audio)) {
620
- throw new Error("TRANSCRIPTION param 'audio' must be a Blob/File/Buffer.");
621
- }
622
- if (Buffer.isBuffer(params.audio)) {
623
- let mimeType = params.mimeType;
624
- if (!mimeType) {
625
- mimeType = detectAudioMimeType(params.audio);
626
- import_core9.logger.debug(`Auto-detected audio MIME type: ${mimeType}`);
627
- } else {
628
- import_core9.logger.debug(`Using provided MIME type: ${mimeType}`);
629
- }
630
- blob = new Blob([params.audio], { type: mimeType });
631
- } else {
632
- blob = params.audio;
633
- }
634
- extraParams = params;
635
- if (typeof params.model === "string" && params.model) {
636
- modelName = params.model;
637
- }
638
- } else {
639
- throw new Error("TRANSCRIPTION expects a Blob/File/Buffer or an object { audio: Blob/File/Buffer, mimeType?, language?, response_format?, timestampGranularities?, prompt?, temperature?, model? }");
640
- }
641
- const mime = blob.type || "audio/webm";
642
- const filename = blob.name || (mime.includes("mp3") || mime.includes("mpeg") ? "recording.mp3" : mime.includes("ogg") ? "recording.ogg" : mime.includes("wav") ? "recording.wav" : mime.includes("webm") ? "recording.webm" : "recording.bin");
643
- const formData = new FormData;
644
- formData.append("file", blob, filename);
645
- formData.append("model", String(modelName));
646
- if (extraParams) {
647
- if (typeof extraParams.language === "string") {
648
- formData.append("language", String(extraParams.language));
649
- }
650
- if (typeof extraParams.response_format === "string") {
651
- formData.append("response_format", String(extraParams.response_format));
652
- }
653
- if (typeof extraParams.prompt === "string") {
654
- formData.append("prompt", String(extraParams.prompt));
655
- }
656
- if (typeof extraParams.temperature === "number") {
657
- formData.append("temperature", String(extraParams.temperature));
658
- }
659
- if (Array.isArray(extraParams.timestampGranularities)) {
660
- for (const g of extraParams.timestampGranularities) {
661
- formData.append("timestamp_granularities[]", String(g));
662
- }
663
- }
664
- }
665
- try {
666
- const response = await fetch(`${baseURL}/audio/transcriptions`, {
667
- method: "POST",
668
- headers: {
669
- ...getAuthHeader(runtime)
670
- },
671
- body: formData
672
- });
673
- if (!response.ok) {
674
- throw new Error(`Failed to transcribe audio: ${response.status} ${response.statusText}`);
675
- }
676
- const data = await response.json();
677
- return data.text || "";
678
- } catch (error) {
679
- const message = error instanceof Error ? error.message : String(error);
680
- import_core9.logger.error(`TRANSCRIPTION error: ${message}`);
681
- throw error;
682
- }
683
- }
684
562
  // src/models/speech.ts
685
- var import_core10 = require("@elizaos/core");
563
+ var import_core9 = require("@elizaos/core");
686
564
  async function fetchTextToSpeech(runtime, options) {
687
565
  const defaultModel = getSetting(runtime, "ELIZAOS_CLOUD_TTS_MODEL", "gpt-4o-mini-tts");
688
566
  const defaultVoice = getSetting(runtime, "ELIZAOS_CLOUD_TTS_VOICE", "nova");
@@ -724,367 +602,10 @@ async function fetchTextToSpeech(runtime, options) {
724
602
  throw new Error(`Failed to fetch speech from ElizaOS Cloud TTS: ${message}`);
725
603
  }
726
604
  }
727
- async function handleTextToSpeech(runtime, input) {
728
- const options = typeof input === "string" ? { text: input } : input;
729
- const resolvedModel = options.model || getSetting(runtime, "ELIZAOS_CLOUD_TTS_MODEL", "gpt-4o-mini-tts");
730
- import_core10.logger.log(`[ELIZAOS_CLOUD] Using TEXT_TO_SPEECH model: ${resolvedModel}`);
731
- try {
732
- const speechStream = await fetchTextToSpeech(runtime, options);
733
- return speechStream;
734
- } catch (error) {
735
- const message = error instanceof Error ? error.message : String(error);
736
- import_core10.logger.error(`Error in TEXT_TO_SPEECH: ${message}`);
737
- throw error;
738
- }
739
- }
740
- // src/models/tokenization.ts
741
- var import_core11 = require("@elizaos/core");
742
- var import_js_tiktoken = require("js-tiktoken");
743
- async function tokenizeText(model, prompt) {
744
- const modelName = model === import_core11.ModelType.TEXT_SMALL ? process.env.ELIZAOS_CLOUD_SMALL_MODEL ?? process.env.SMALL_MODEL ?? "gpt-5-nano" : process.env.LARGE_MODEL ?? "gpt-5-mini";
745
- const tokens = import_js_tiktoken.encodingForModel(modelName).encode(prompt);
746
- return tokens;
747
- }
748
- async function detokenizeText(model, tokens) {
749
- const modelName = model === import_core11.ModelType.TEXT_SMALL ? process.env.ELIZAOS_CLOUD_SMALL_MODEL ?? process.env.SMALL_MODEL ?? "gpt-5-nano" : process.env.ELIZAOS_CLOUD_LARGE_MODEL ?? process.env.LARGE_MODEL ?? "gpt-5-mini";
750
- return import_js_tiktoken.encodingForModel(modelName).decode(tokens);
751
- }
752
- async function handleTokenizerEncode(_runtime, { prompt, modelType = import_core11.ModelType.TEXT_LARGE }) {
753
- return await tokenizeText(modelType ?? import_core11.ModelType.TEXT_LARGE, prompt);
754
- }
755
- async function handleTokenizerDecode(_runtime, { tokens, modelType = import_core11.ModelType.TEXT_LARGE }) {
756
- return await detokenizeText(modelType ?? import_core11.ModelType.TEXT_LARGE, tokens);
757
- }
758
- // src/database/adapter.ts
759
- var import_core12 = require("@elizaos/core");
760
- var import_node = __toESM(require("@elizaos/plugin-sql/node"));
761
- var DEFAULT_CLOUD_URL = "https://www.elizacloud.ai";
762
- async function createCloudDatabaseAdapter(config) {
763
- const baseUrl = config.baseUrl || DEFAULT_CLOUD_URL;
764
- import_core12.logger.info({ src: "plugin:elizacloud", agentId: config.agentId }, "Provisioning cloud database");
765
- const response = await provisionCloudDatabase(config.apiKey, baseUrl, config.agentId);
766
- if (!response.success || !response.connectionUrl) {
767
- import_core12.logger.error({
768
- src: "plugin:elizacloud",
769
- error: response.error,
770
- agentId: config.agentId
771
- }, "Failed to provision cloud database");
772
- return null;
773
- }
774
- import_core12.logger.info({ src: "plugin:elizacloud", agentId: config.agentId }, "Cloud database provisioned successfully");
775
- const adapter = import_node.default.createDatabaseAdapter({ postgresUrl: response.connectionUrl }, config.agentId);
776
- import_core12.logger.info({ src: "plugin:elizacloud", agentId: config.agentId }, "Cloud database adapter created using PostgreSQL connection");
777
- return adapter;
778
- }
779
- async function provisionCloudDatabase(apiKey, baseUrl, agentId) {
780
- try {
781
- const response = await fetch(`${baseUrl}/api/v1/database/provision`, {
782
- method: "POST",
783
- headers: {
784
- Authorization: `Bearer ${apiKey}`,
785
- "Content-Type": "application/json"
786
- },
787
- body: JSON.stringify({
788
- agentId,
789
- type: "postgresql"
790
- })
791
- });
792
- if (!response.ok) {
793
- const errorText = await response.text();
794
- return {
795
- success: false,
796
- error: `Cloud database provisioning failed: ${response.status} ${errorText}`
797
- };
798
- }
799
- const data = await response.json();
800
- return {
801
- success: true,
802
- connectionUrl: data.connectionUrl,
803
- expiresAt: data.expiresAt
804
- };
805
- } catch (error) {
806
- const message = error instanceof Error ? error.message : String(error);
807
- return {
808
- success: false,
809
- error: `Network error during database provisioning: ${message}`
810
- };
811
- }
812
- }
813
-
814
- class CloudDatabaseAdapter {
815
- config;
816
- adapter = null;
817
- constructor(config) {
818
- this.config = config;
819
- }
820
- async initialize() {
821
- if (this.adapter) {
822
- return this.adapter;
823
- }
824
- this.adapter = await createCloudDatabaseAdapter(this.config);
825
- return this.adapter;
826
- }
827
- getAdapter() {
828
- return this.adapter;
829
- }
830
- }
831
-
832
- // src/storage/service.ts
833
- var import_core13 = require("@elizaos/core");
834
- var DEFAULT_CLOUD_URL2 = "https://www.elizacloud.ai";
835
- var STORAGE_ENDPOINT = "/api/v1/storage/files";
836
- function createCloudStorageService(config) {
837
- return new CloudStorageService(config);
838
- }
839
-
840
- class CloudStorageService {
841
- apiKey;
842
- baseUrl;
843
- constructor(config) {
844
- this.apiKey = config.apiKey;
845
- this.baseUrl = config.baseUrl || DEFAULT_CLOUD_URL2;
846
- }
847
- async upload(file, options = {}) {
848
- try {
849
- const formData = new FormData;
850
- let blob;
851
- if (Buffer.isBuffer(file)) {
852
- blob = new Blob([file], {
853
- type: options.contentType || "application/octet-stream"
854
- });
855
- } else {
856
- blob = file;
857
- }
858
- const filename = options.filename || (file instanceof File ? file.name : "file") || "upload";
859
- formData.append("file", blob, filename);
860
- if (options.metadata) {
861
- formData.append("metadata", JSON.stringify(options.metadata));
862
- }
863
- const response = await fetch(`${this.baseUrl}${STORAGE_ENDPOINT}`, {
864
- method: "POST",
865
- headers: {
866
- Authorization: `Bearer ${this.apiKey}`
867
- },
868
- body: formData
869
- });
870
- if (!response.ok) {
871
- const errorData = await response.json().catch(() => ({}));
872
- if (response.status === 402) {
873
- return {
874
- success: false,
875
- error: `Insufficient credits. Required: ${errorData.required || "unknown"}, Available: ${errorData.available || "unknown"}. Top up at ${errorData.topUpUrl || "/dashboard/billing"}`
876
- };
877
- }
878
- return {
879
- success: false,
880
- error: `Upload failed: ${response.status} ${errorData.error || "Unknown error"}`
881
- };
882
- }
883
- const data = await response.json();
884
- import_core13.logger.info({ src: "plugin:elizacloud", cost: data.cost, remaining: data.creditsRemaining }, "Storage upload successful");
885
- return {
886
- success: true,
887
- id: data.id,
888
- url: data.url,
889
- pathname: data.pathname,
890
- contentType: data.contentType,
891
- size: data.size,
892
- cost: data.cost,
893
- creditsRemaining: data.creditsRemaining
894
- };
895
- } catch (error) {
896
- const message = error instanceof Error ? error.message : String(error);
897
- import_core13.logger.error({ src: "plugin:elizacloud", error }, "Storage upload failed");
898
- return {
899
- success: false,
900
- error: `Upload error: ${message}`
901
- };
902
- }
903
- }
904
- async download(id, url) {
905
- if (url) {
906
- try {
907
- const response = await fetch(url);
908
- if (!response.ok) {
909
- import_core13.logger.error({ src: "plugin:elizacloud", status: response.status, url }, "Storage direct download failed");
910
- return null;
911
- }
912
- const arrayBuffer = await response.arrayBuffer();
913
- return Buffer.from(arrayBuffer);
914
- } catch (error) {
915
- import_core13.logger.error({ src: "plugin:elizacloud", error }, "Storage direct download error");
916
- return null;
917
- }
918
- }
919
- try {
920
- const response = await fetch(`${this.baseUrl}${STORAGE_ENDPOINT}/${id}?download=true`, {
921
- headers: {
922
- Authorization: `Bearer ${this.apiKey}`
923
- },
924
- redirect: "follow"
925
- });
926
- if (!response.ok) {
927
- import_core13.logger.error({ src: "plugin:elizacloud", status: response.status }, "Storage download failed");
928
- return null;
929
- }
930
- const arrayBuffer = await response.arrayBuffer();
931
- return Buffer.from(arrayBuffer);
932
- } catch (error) {
933
- import_core13.logger.error({ src: "plugin:elizacloud", error }, "Storage download error");
934
- return null;
935
- }
936
- }
937
- async list(options = {}) {
938
- try {
939
- const params = new URLSearchParams;
940
- if (options.prefix)
941
- params.set("prefix", options.prefix);
942
- if (options.limit)
943
- params.set("limit", String(options.limit));
944
- if (options.cursor)
945
- params.set("cursor", options.cursor);
946
- const response = await fetch(`${this.baseUrl}${STORAGE_ENDPOINT}?${params.toString()}`, {
947
- headers: {
948
- Authorization: `Bearer ${this.apiKey}`
949
- }
950
- });
951
- if (!response.ok) {
952
- import_core13.logger.error({ src: "plugin:elizacloud", status: response.status }, "Storage list failed");
953
- return { items: [], hasMore: false };
954
- }
955
- const data = await response.json();
956
- return {
957
- items: data.items || [],
958
- cursor: data.cursor,
959
- hasMore: data.hasMore || false
960
- };
961
- } catch (error) {
962
- import_core13.logger.error({ src: "plugin:elizacloud", error }, "Storage list error");
963
- return { items: [], hasMore: false };
964
- }
965
- }
966
- async delete(id, url) {
967
- if (!url) {
968
- import_core13.logger.error({ src: "plugin:elizacloud" }, "Storage delete requires file URL");
969
- return false;
970
- }
971
- try {
972
- const params = new URLSearchParams({ url });
973
- const response = await fetch(`${this.baseUrl}${STORAGE_ENDPOINT}/${id}?${params.toString()}`, {
974
- method: "DELETE",
975
- headers: {
976
- Authorization: `Bearer ${this.apiKey}`
977
- }
978
- });
979
- if (!response.ok) {
980
- const errorData = await response.json().catch(() => ({}));
981
- import_core13.logger.error({ src: "plugin:elizacloud", status: response.status, error: errorData.error }, "Storage delete failed");
982
- return false;
983
- }
984
- return true;
985
- } catch (error) {
986
- import_core13.logger.error({ src: "plugin:elizacloud", error }, "Storage delete error");
987
- return false;
988
- }
989
- }
990
- async getStats() {
991
- try {
992
- const response = await fetch(`${this.baseUrl}${STORAGE_ENDPOINT}?stats=true`, {
993
- headers: {
994
- Authorization: `Bearer ${this.apiKey}`
995
- }
996
- });
997
- if (!response.ok) {
998
- return null;
999
- }
1000
- const data = await response.json();
1001
- return {
1002
- totalFiles: data.stats?.totalFiles || 0,
1003
- totalSize: data.stats?.totalSize || 0,
1004
- totalSizeGB: data.stats?.totalSizeGB || 0,
1005
- pricing: data.pricing || {}
1006
- };
1007
- } catch (error) {
1008
- import_core13.logger.error({ src: "plugin:elizacloud", error }, "Storage stats error");
1009
- return null;
1010
- }
1011
- }
1012
- }
1013
- // src/database/direct-adapter.ts
1014
- var import_core14 = require("@elizaos/core");
1015
- var import_node2 = __toESM(require("@elizaos/plugin-sql/node"));
1016
- function createDatabaseAdapter(config, agentId) {
1017
- const adapter = import_node2.default.createDatabaseAdapter({ postgresUrl: config.postgresUrl }, agentId);
1018
- import_core14.logger.info({ src: "plugin:elizacloud", agentId }, "Direct database adapter created");
1019
- return adapter;
1020
- }
1021
- async function createDirectDatabaseAdapter(config, agentId) {
1022
- return createDatabaseAdapter(config, agentId);
1023
- }
1024
- // src/database/schema.ts
1025
- var import_node3 = __toESM(require("@elizaos/plugin-sql/node"));
1026
- var {
1027
- agentTable,
1028
- roomTable,
1029
- participantTable,
1030
- memoryTable,
1031
- embeddingTable,
1032
- entityTable,
1033
- relationshipTable,
1034
- componentTable,
1035
- taskTable,
1036
- logTable,
1037
- cacheTable,
1038
- worldTable,
1039
- serverTable,
1040
- messageTable,
1041
- messageServerTable,
1042
- messageServerAgentsTable,
1043
- channelTable,
1044
- channelParticipantsTable
1045
- } = import_node3.default.schema;
1046
- var serverAgentsTable = serverTable;
1047
605
  // src/index.ts
1048
- var cloudStorageInstance = null;
1049
- function getCloudStorage() {
1050
- return cloudStorageInstance;
1051
- }
1052
- async function initializeCloudDatabase(runtime) {
1053
- const apiKey = getApiKey(runtime);
1054
- const baseUrl = getBaseURL(runtime);
1055
- if (!apiKey) {
1056
- import_core15.logger.warn({ src: "plugin:elizacloud" }, "Cloud database enabled but no API key found - skipping database initialization");
1057
- return;
1058
- }
1059
- import_core15.logger.info({ src: "plugin:elizacloud", agentId: runtime.agentId }, "Initializing cloud database");
1060
- const adapter = await createCloudDatabaseAdapter({
1061
- apiKey,
1062
- baseUrl,
1063
- agentId: runtime.agentId
1064
- });
1065
- if (adapter) {
1066
- runtime.registerDatabaseAdapter(adapter);
1067
- import_core15.logger.info({ src: "plugin:elizacloud", agentId: runtime.agentId }, "Cloud database adapter registered successfully");
1068
- } else {
1069
- import_core15.logger.error({ src: "plugin:elizacloud", agentId: runtime.agentId }, "Failed to initialize cloud database adapter");
1070
- }
1071
- }
1072
- function initializeCloudStorage(runtime) {
1073
- const apiKey = getApiKey(runtime);
1074
- const baseUrl = getBaseURL(runtime);
1075
- if (!apiKey) {
1076
- import_core15.logger.warn({ src: "plugin:elizacloud" }, "No API key found - cloud storage will not be available");
1077
- return;
1078
- }
1079
- cloudStorageInstance = new CloudStorageService({
1080
- apiKey,
1081
- baseUrl
1082
- });
1083
- import_core15.logger.info({ src: "plugin:elizacloud", agentId: runtime.agentId }, "Cloud storage service initialized");
1084
- }
1085
606
  var elizaOSCloudPlugin = {
1086
607
  name: "elizaOSCloud",
1087
- description: "ElizaOS Cloud plugin - Complete AI, storage, and database solution. Provides multi-model inference (GPT-4, Claude, Gemini), embeddings, image generation, transcription, TTS, managed PostgreSQL database, and cloud file storage. A single plugin that replaces all other AI and database plugins.",
608
+ description: "ElizaOS Cloud plugin - Multi-model AI generation with text, image, and video support",
1088
609
  config: {
1089
610
  ELIZAOS_CLOUD_API_KEY: process.env.ELIZAOS_CLOUD_API_KEY,
1090
611
  ELIZAOS_CLOUD_BASE_URL: process.env.ELIZAOS_CLOUD_BASE_URL,
@@ -1098,39 +619,20 @@ var elizaOSCloudPlugin = {
1098
619
  ELIZAOS_CLOUD_EMBEDDING_DIMENSIONS: process.env.ELIZAOS_CLOUD_EMBEDDING_DIMENSIONS,
1099
620
  ELIZAOS_CLOUD_IMAGE_DESCRIPTION_MODEL: process.env.ELIZAOS_CLOUD_IMAGE_DESCRIPTION_MODEL,
1100
621
  ELIZAOS_CLOUD_IMAGE_DESCRIPTION_MAX_TOKENS: process.env.ELIZAOS_CLOUD_IMAGE_DESCRIPTION_MAX_TOKENS,
1101
- ELIZAOS_CLOUD_IMAGE_GENERATION_MODEL: process.env.ELIZAOS_CLOUD_IMAGE_GENERATION_MODEL,
1102
- ELIZAOS_CLOUD_TTS_MODEL: process.env.ELIZAOS_CLOUD_TTS_MODEL,
1103
- ELIZAOS_CLOUD_TTS_VOICE: process.env.ELIZAOS_CLOUD_TTS_VOICE,
1104
- ELIZAOS_CLOUD_TRANSCRIPTION_MODEL: process.env.ELIZAOS_CLOUD_TRANSCRIPTION_MODEL,
1105
- ELIZAOS_CLOUD_DATABASE: process.env.ELIZAOS_CLOUD_DATABASE,
1106
- ELIZAOS_CLOUD_STORAGE: process.env.ELIZAOS_CLOUD_STORAGE,
1107
- ELIZAOS_CLOUD_EXPERIMENTAL_TELEMETRY: process.env.ELIZAOS_CLOUD_EXPERIMENTAL_TELEMETRY
622
+ ELIZAOS_CLOUD_EXPERIMENTAL_TELEMETRY: process.env.ELIZAOS_CLOUD_EXPERIMENTAL_TELEMETRY,
623
+ ELIZAOS_CLOUD_IMAGE_GENERATION_MODEL: process.env.ELIZAOS_CLOUD_IMAGE_GENERATION_MODEL
1108
624
  },
1109
- priority: -1,
1110
625
  async init(config, runtime) {
1111
626
  initializeOpenAI(config, runtime);
1112
- if (!isBrowser()) {
1113
- initializeCloudStorage(runtime);
1114
- }
1115
- const cloudDatabaseEnabled = runtime.getSetting("ELIZAOS_CLOUD_DATABASE") === "true" || process.env.ELIZAOS_CLOUD_DATABASE === "true";
1116
- if (cloudDatabaseEnabled && !isBrowser()) {
1117
- await initializeCloudDatabase(runtime);
1118
- }
1119
627
  },
1120
628
  models: {
1121
- [import_core15.ModelType.TEXT_SMALL]: handleTextSmall,
1122
- [import_core15.ModelType.TEXT_LARGE]: handleTextLarge,
1123
- [import_core15.ModelType.TEXT_REASONING_SMALL]: handleTextSmall,
1124
- [import_core15.ModelType.TEXT_REASONING_LARGE]: handleTextLarge,
1125
- [import_core15.ModelType.OBJECT_SMALL]: handleObjectSmall,
1126
- [import_core15.ModelType.OBJECT_LARGE]: handleObjectLarge,
1127
- [import_core15.ModelType.TEXT_EMBEDDING]: handleTextEmbedding,
1128
- [import_core15.ModelType.TEXT_TOKENIZER_ENCODE]: handleTokenizerEncode,
1129
- [import_core15.ModelType.TEXT_TOKENIZER_DECODE]: handleTokenizerDecode,
1130
- [import_core15.ModelType.IMAGE]: handleImageGeneration,
1131
- [import_core15.ModelType.IMAGE_DESCRIPTION]: handleImageDescription,
1132
- [import_core15.ModelType.TRANSCRIPTION]: handleTranscription,
1133
- [import_core15.ModelType.TEXT_TO_SPEECH]: handleTextToSpeech
629
+ [import_core10.ModelType.TEXT_EMBEDDING]: handleTextEmbedding,
630
+ [import_core10.ModelType.TEXT_SMALL]: handleTextSmall,
631
+ [import_core10.ModelType.TEXT_LARGE]: handleTextLarge,
632
+ [import_core10.ModelType.IMAGE]: handleImageGeneration,
633
+ [import_core10.ModelType.IMAGE_DESCRIPTION]: handleImageDescription,
634
+ [import_core10.ModelType.OBJECT_SMALL]: handleObjectSmall,
635
+ [import_core10.ModelType.OBJECT_LARGE]: handleObjectLarge
1134
636
  },
1135
637
  tests: [
1136
638
  {
@@ -1146,7 +648,7 @@ var elizaOSCloudPlugin = {
1146
648
  }
1147
649
  });
1148
650
  const data = await response.json();
1149
- import_core15.logger.log({ data: data?.data?.length ?? "N/A" }, "Models Available");
651
+ import_core10.logger.log({ data: data?.data?.length ?? "N/A" }, "Models Available");
1150
652
  if (!response.ok) {
1151
653
  throw new Error(`Failed to validate OpenAI API key: ${response.statusText}`);
1152
654
  }
@@ -1156,13 +658,13 @@ var elizaOSCloudPlugin = {
1156
658
  name: "ELIZAOS_CLOUD_test_text_embedding",
1157
659
  fn: async (runtime) => {
1158
660
  try {
1159
- const embedding = await runtime.useModel(import_core15.ModelType.TEXT_EMBEDDING, {
661
+ const embedding = await runtime.useModel(import_core10.ModelType.TEXT_EMBEDDING, {
1160
662
  text: "Hello, world!"
1161
663
  });
1162
- import_core15.logger.log({ embedding }, "embedding");
664
+ import_core10.logger.log({ embedding }, "embedding");
1163
665
  } catch (error) {
1164
666
  const message = error instanceof Error ? error.message : String(error);
1165
- import_core15.logger.error(`Error in test_text_embedding: ${message}`);
667
+ import_core10.logger.error(`Error in test_text_embedding: ${message}`);
1166
668
  throw error;
1167
669
  }
1168
670
  }
@@ -1171,16 +673,16 @@ var elizaOSCloudPlugin = {
1171
673
  name: "ELIZAOS_CLOUD_test_text_large",
1172
674
  fn: async (runtime) => {
1173
675
  try {
1174
- const text = await runtime.useModel(import_core15.ModelType.TEXT_LARGE, {
676
+ const text = await runtime.useModel(import_core10.ModelType.TEXT_LARGE, {
1175
677
  prompt: "What is the nature of reality in 10 words?"
1176
678
  });
1177
679
  if (text.length === 0) {
1178
680
  throw new Error("Failed to generate text");
1179
681
  }
1180
- import_core15.logger.log({ text }, "generated with test_text_large");
682
+ import_core10.logger.log({ text }, "generated with test_text_large");
1181
683
  } catch (error) {
1182
684
  const message = error instanceof Error ? error.message : String(error);
1183
- import_core15.logger.error(`Error in test_text_large: ${message}`);
685
+ import_core10.logger.error(`Error in test_text_large: ${message}`);
1184
686
  throw error;
1185
687
  }
1186
688
  }
@@ -1189,16 +691,16 @@ var elizaOSCloudPlugin = {
1189
691
  name: "ELIZAOS_CLOUD_test_text_small",
1190
692
  fn: async (runtime) => {
1191
693
  try {
1192
- const text = await runtime.useModel(import_core15.ModelType.TEXT_SMALL, {
694
+ const text = await runtime.useModel(import_core10.ModelType.TEXT_SMALL, {
1193
695
  prompt: "What is the nature of reality in 10 words?"
1194
696
  });
1195
697
  if (text.length === 0) {
1196
698
  throw new Error("Failed to generate text");
1197
699
  }
1198
- import_core15.logger.log({ text }, "generated with test_text_small");
700
+ import_core10.logger.log({ text }, "generated with test_text_small");
1199
701
  } catch (error) {
1200
702
  const message = error instanceof Error ? error.message : String(error);
1201
- import_core15.logger.error(`Error in test_text_small: ${message}`);
703
+ import_core10.logger.error(`Error in test_text_small: ${message}`);
1202
704
  throw error;
1203
705
  }
1204
706
  }
@@ -1206,17 +708,17 @@ var elizaOSCloudPlugin = {
1206
708
  {
1207
709
  name: "ELIZAOS_CLOUD_test_image_generation",
1208
710
  fn: async (runtime) => {
1209
- import_core15.logger.log("ELIZAOS_CLOUD_test_image_generation");
711
+ import_core10.logger.log("ELIZAOS_CLOUD_test_image_generation");
1210
712
  try {
1211
- const image = await runtime.useModel(import_core15.ModelType.IMAGE, {
713
+ const image = await runtime.useModel(import_core10.ModelType.IMAGE, {
1212
714
  prompt: "A beautiful sunset over a calm ocean",
1213
- n: 1,
715
+ count: 1,
1214
716
  size: "1024x1024"
1215
717
  });
1216
- import_core15.logger.log({ image }, "generated with test_image_generation");
718
+ import_core10.logger.log({ image }, "generated with test_image_generation");
1217
719
  } catch (error) {
1218
720
  const message = error instanceof Error ? error.message : String(error);
1219
- import_core15.logger.error(`Error in test_image_generation: ${message}`);
721
+ import_core10.logger.error(`Error in test_image_generation: ${message}`);
1220
722
  throw error;
1221
723
  }
1222
724
  }
@@ -1225,36 +727,36 @@ var elizaOSCloudPlugin = {
1225
727
  name: "image-description",
1226
728
  fn: async (runtime) => {
1227
729
  try {
1228
- import_core15.logger.log("ELIZAOS_CLOUD_test_image_description");
730
+ import_core10.logger.log("ELIZAOS_CLOUD_test_image_description");
1229
731
  try {
1230
- const result = await runtime.useModel(import_core15.ModelType.IMAGE_DESCRIPTION, "https://upload.wikimedia.org/wikipedia/commons/thumb/1/1c/Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg/537px-Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg");
732
+ const result = await runtime.useModel(import_core10.ModelType.IMAGE_DESCRIPTION, "https://upload.wikimedia.org/wikipedia/commons/thumb/1/1c/Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg/537px-Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg");
1231
733
  if (result && typeof result === "object" && "title" in result && "description" in result) {
1232
- import_core15.logger.log({ result }, "Image description");
734
+ import_core10.logger.log({ result }, "Image description");
1233
735
  } else {
1234
- import_core15.logger.error("Invalid image description result format:", result);
736
+ import_core10.logger.error("Invalid image description result format:", result);
1235
737
  }
1236
738
  } catch (e) {
1237
739
  const message = e instanceof Error ? e.message : String(e);
1238
- import_core15.logger.error(`Error in image description test: ${message}`);
740
+ import_core10.logger.error(`Error in image description test: ${message}`);
1239
741
  }
1240
742
  } catch (e) {
1241
743
  const message = e instanceof Error ? e.message : String(e);
1242
- import_core15.logger.error(`Error in ELIZAOS_CLOUD_test_image_description: ${message}`);
744
+ import_core10.logger.error(`Error in ELIZAOS_CLOUD_test_image_description: ${message}`);
1243
745
  }
1244
746
  }
1245
747
  },
1246
748
  {
1247
749
  name: "ELIZAOS_CLOUD_test_transcription",
1248
750
  fn: async (runtime) => {
1249
- import_core15.logger.log("ELIZAOS_CLOUD_test_transcription");
751
+ import_core10.logger.log("ELIZAOS_CLOUD_test_transcription");
1250
752
  try {
1251
753
  const response = await fetch("https://upload.wikimedia.org/wikipedia/en/4/40/Chris_Benoit_Voice_Message.ogg");
1252
754
  const arrayBuffer = await response.arrayBuffer();
1253
- const transcription = await runtime.useModel(import_core15.ModelType.TRANSCRIPTION, Buffer.from(new Uint8Array(arrayBuffer)));
1254
- import_core15.logger.log({ transcription }, "generated with test_transcription");
755
+ const transcription = await runtime.useModel(import_core10.ModelType.TRANSCRIPTION, Buffer.from(new Uint8Array(arrayBuffer)));
756
+ import_core10.logger.log({ transcription }, "generated with test_transcription");
1255
757
  } catch (error) {
1256
758
  const message = error instanceof Error ? error.message : String(error);
1257
- import_core15.logger.error(`Error in test_transcription: ${message}`);
759
+ import_core10.logger.error(`Error in test_transcription: ${message}`);
1258
760
  throw error;
1259
761
  }
1260
762
  }
@@ -1263,23 +765,23 @@ var elizaOSCloudPlugin = {
1263
765
  name: "ELIZAOS_CLOUD_test_text_tokenizer_encode",
1264
766
  fn: async (runtime) => {
1265
767
  const prompt = "Hello tokenizer encode!";
1266
- const tokens = await runtime.useModel(import_core15.ModelType.TEXT_TOKENIZER_ENCODE, { prompt });
768
+ const tokens = await runtime.useModel(import_core10.ModelType.TEXT_TOKENIZER_ENCODE, { prompt, modelType: import_core10.ModelType.TEXT_SMALL });
1267
769
  if (!Array.isArray(tokens) || tokens.length === 0) {
1268
770
  throw new Error("Failed to tokenize text: expected non-empty array of tokens");
1269
771
  }
1270
- import_core15.logger.log({ tokens }, "Tokenized output");
772
+ import_core10.logger.log({ tokens }, "Tokenized output");
1271
773
  }
1272
774
  },
1273
775
  {
1274
776
  name: "ELIZAOS_CLOUD_test_text_tokenizer_decode",
1275
777
  fn: async (runtime) => {
1276
778
  const prompt = "Hello tokenizer decode!";
1277
- const tokens = await runtime.useModel(import_core15.ModelType.TEXT_TOKENIZER_ENCODE, { prompt });
1278
- const decodedText = await runtime.useModel(import_core15.ModelType.TEXT_TOKENIZER_DECODE, { tokens });
779
+ const tokens = await runtime.useModel(import_core10.ModelType.TEXT_TOKENIZER_ENCODE, { prompt, modelType: import_core10.ModelType.TEXT_SMALL });
780
+ const decodedText = await runtime.useModel(import_core10.ModelType.TEXT_TOKENIZER_DECODE, { tokens, modelType: import_core10.ModelType.TEXT_SMALL });
1279
781
  if (decodedText !== prompt) {
1280
782
  throw new Error(`Decoded text does not match original. Expected "${prompt}", got "${decodedText}"`);
1281
783
  }
1282
- import_core15.logger.log({ decodedText }, "Decoded text");
784
+ import_core10.logger.log({ decodedText }, "Decoded text");
1283
785
  }
1284
786
  },
1285
787
  {
@@ -1292,10 +794,10 @@ var elizaOSCloudPlugin = {
1292
794
  if (!response) {
1293
795
  throw new Error("Failed to generate speech");
1294
796
  }
1295
- import_core15.logger.log("Generated speech successfully");
797
+ import_core10.logger.log("Generated speech successfully");
1296
798
  } catch (error) {
1297
799
  const message = error instanceof Error ? error.message : String(error);
1298
- import_core15.logger.error(`Error in ELIZAOS_CLOUD_test_text_to_speech: ${message}`);
800
+ import_core10.logger.error(`Error in ELIZAOS_CLOUD_test_text_to_speech: ${message}`);
1299
801
  throw error;
1300
802
  }
1301
803
  }
@@ -1306,4 +808,4 @@ var elizaOSCloudPlugin = {
1306
808
  };
1307
809
  var src_default = elizaOSCloudPlugin;
1308
810
 
1309
- //# debugId=7F91E5A78C775CA264756E2164756E21
811
+ //# debugId=811C63D22E2DA02364756E2164756E21