@elizaos/plugin-elizacloud 1.7.0-alpha.0 → 1.7.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,7 +18,7 @@ var __toESM = (mod, isNodeMode, target) => {
18
18
  var __require = /* @__PURE__ */ createRequire(import.meta.url);
19
19
 
20
20
  // src/index.ts
21
- import { logger as logger13, ModelType as ModelType6 } from "@elizaos/core";
21
+ import { logger as logger9, ModelType as ModelType5 } from "@elizaos/core";
22
22
 
23
23
  // src/init.ts
24
24
  import { logger as logger2 } from "@elizaos/core";
@@ -26,7 +26,11 @@ import { logger as logger2 } from "@elizaos/core";
26
26
  // src/utils/config.ts
27
27
  import { logger } from "@elizaos/core";
28
28
  function getSetting(runtime, key, defaultValue) {
29
- return runtime.getSetting(key) ?? process.env[key] ?? defaultValue;
29
+ const value = runtime.getSetting(key);
30
+ if (value !== undefined && value !== null) {
31
+ return String(value);
32
+ }
33
+ return process.env[key] ?? defaultValue;
30
34
  }
31
35
  function isBrowser() {
32
36
  return typeof globalThis !== "undefined" && typeof globalThis.document !== "undefined";
@@ -124,7 +128,7 @@ function initializeOpenAI(_config, runtime) {
124
128
 
125
129
  // src/models/text.ts
126
130
  import { logger as logger3, ModelType } from "@elizaos/core";
127
- import { generateText } from "ai";
131
+ import { generateText, streamText } from "ai";
128
132
 
129
133
  // src/providers/openai.ts
130
134
  import { createOpenAI } from "@ai-sdk/openai";
@@ -139,33 +143,36 @@ import {
139
143
  EventType
140
144
  } from "@elizaos/core";
141
145
  function emitModelUsageEvent(runtime, type, prompt, usage) {
146
+ const truncatedPrompt = typeof prompt === "string" ? prompt.length > 200 ? `${prompt.slice(0, 200)}…` : prompt : "";
147
+ const inputTokens = Number(usage.inputTokens || 0);
148
+ const outputTokens = Number(usage.outputTokens || 0);
149
+ const totalTokens = Number(usage.totalTokens != null ? usage.totalTokens : inputTokens + outputTokens);
142
150
  runtime.emitEvent(EventType.MODEL_USED, {
143
- provider: "openai",
151
+ runtime,
152
+ source: "elizacloud",
153
+ provider: "elizacloud",
144
154
  type,
145
- prompt,
155
+ prompt: truncatedPrompt,
146
156
  tokens: {
147
- prompt: usage.inputTokens,
148
- completion: usage.outputTokens,
149
- total: usage.totalTokens
157
+ prompt: inputTokens,
158
+ completion: outputTokens,
159
+ total: totalTokens
150
160
  }
151
161
  });
152
162
  }
153
163
 
154
164
  // src/models/text.ts
155
- async function handleTextSmall(runtime, {
156
- prompt,
157
- stopSequences = [],
158
- maxTokens = 8192,
159
- temperature = 0.7,
160
- frequencyPenalty = 0.7,
161
- presencePenalty = 0.7
162
- }) {
165
+ function buildGenerateParams(runtime, modelType, params) {
166
+ const { prompt, stopSequences = [] } = params;
167
+ const temperature = params.temperature ?? 0.7;
168
+ const frequencyPenalty = params.frequencyPenalty ?? 0.7;
169
+ const presencePenalty = params.presencePenalty ?? 0.7;
170
+ const maxTokens = params.maxOutputTokens ?? params.maxTokens ?? 8192;
163
171
  const openai = createOpenAIClient(runtime);
164
- const modelName = getSmallModel(runtime);
172
+ const modelName = modelType === ModelType.TEXT_SMALL ? getSmallModel(runtime) : getLargeModel(runtime);
173
+ const modelLabel = modelType === ModelType.TEXT_SMALL ? "TEXT_SMALL" : "TEXT_LARGE";
165
174
  const experimentalTelemetry = getExperimentalTelemetry(runtime);
166
- logger3.log(`[ELIZAOS_CLOUD] Using TEXT_SMALL model: ${modelName}`);
167
- logger3.log(prompt);
168
- const { text: openaiResponse, usage } = await generateText({
175
+ const generateParams = {
169
176
  model: openai.languageModel(modelName),
170
177
  prompt,
171
178
  system: runtime.character.system ?? undefined,
@@ -177,42 +184,50 @@ async function handleTextSmall(runtime, {
177
184
  experimental_telemetry: {
178
185
  isEnabled: experimentalTelemetry
179
186
  }
180
- });
181
- if (usage) {
182
- emitModelUsageEvent(runtime, ModelType.TEXT_SMALL, prompt, usage);
183
- }
184
- return openaiResponse;
187
+ };
188
+ return { generateParams, modelName, modelLabel, prompt };
189
+ }
190
+ function handleStreamingGeneration(runtime, modelType, generateParams, prompt, modelLabel) {
191
+ logger3.debug(`[ELIZAOS_CLOUD] Streaming text with ${modelLabel} model`);
192
+ const streamResult = streamText(generateParams);
193
+ return {
194
+ textStream: streamResult.textStream,
195
+ text: streamResult.text,
196
+ usage: streamResult.usage.then((usage) => {
197
+ if (usage) {
198
+ emitModelUsageEvent(runtime, modelType, prompt, usage);
199
+ const inputTokens = usage.inputTokens ?? 0;
200
+ const outputTokens = usage.outputTokens ?? 0;
201
+ return {
202
+ promptTokens: inputTokens,
203
+ completionTokens: outputTokens,
204
+ totalTokens: inputTokens + outputTokens
205
+ };
206
+ }
207
+ return;
208
+ }),
209
+ finishReason: streamResult.finishReason
210
+ };
185
211
  }
186
- async function handleTextLarge(runtime, {
187
- prompt,
188
- stopSequences = [],
189
- maxTokens = 8192,
190
- temperature = 0.7,
191
- frequencyPenalty = 0.7,
192
- presencePenalty = 0.7
193
- }) {
194
- const openai = createOpenAIClient(runtime);
195
- const modelName = getLargeModel(runtime);
196
- const experimentalTelemetry = getExperimentalTelemetry(runtime);
197
- logger3.log(`[ELIZAOS_CLOUD] Using TEXT_LARGE model: ${modelName}`);
212
+ async function generateTextWithModel(runtime, modelType, params) {
213
+ const { generateParams, modelName, modelLabel, prompt } = buildGenerateParams(runtime, modelType, params);
214
+ logger3.debug(`[ELIZAOS_CLOUD] Generating text with ${modelLabel} model: ${modelName}`);
215
+ if (params.stream) {
216
+ return handleStreamingGeneration(runtime, modelType, generateParams, prompt, modelLabel);
217
+ }
218
+ logger3.log(`[ELIZAOS_CLOUD] Using ${modelLabel} model: ${modelName}`);
198
219
  logger3.log(prompt);
199
- const { text: openaiResponse, usage } = await generateText({
200
- model: openai.languageModel(modelName),
201
- prompt,
202
- system: runtime.character.system ?? undefined,
203
- temperature,
204
- maxOutputTokens: maxTokens,
205
- frequencyPenalty,
206
- presencePenalty,
207
- stopSequences,
208
- experimental_telemetry: {
209
- isEnabled: experimentalTelemetry
210
- }
211
- });
212
- if (usage) {
213
- emitModelUsageEvent(runtime, ModelType.TEXT_LARGE, prompt, usage);
220
+ const response = await generateText(generateParams);
221
+ if (response.usage) {
222
+ emitModelUsageEvent(runtime, modelType, prompt, response.usage);
214
223
  }
215
- return openaiResponse;
224
+ return response.text;
225
+ }
226
+ async function handleTextSmall(runtime, params) {
227
+ return generateTextWithModel(runtime, ModelType.TEXT_SMALL, params);
228
+ }
229
+ async function handleTextLarge(runtime, params) {
230
+ return generateTextWithModel(runtime, ModelType.TEXT_LARGE, params);
216
231
  }
217
232
  // src/models/object.ts
218
233
  import { logger as logger5, ModelType as ModelType2 } from "@elizaos/core";
@@ -237,31 +252,6 @@ function getJsonRepairFunction() {
237
252
  }
238
253
  };
239
254
  }
240
- function detectAudioMimeType(buffer) {
241
- if (buffer.length < 12) {
242
- return "application/octet-stream";
243
- }
244
- if (buffer[0] === 82 && buffer[1] === 73 && buffer[2] === 70 && buffer[3] === 70 && buffer[8] === 87 && buffer[9] === 65 && buffer[10] === 86 && buffer[11] === 69) {
245
- return "audio/wav";
246
- }
247
- if (buffer[0] === 73 && buffer[1] === 68 && buffer[2] === 51 || buffer[0] === 255 && (buffer[1] & 224) === 224) {
248
- return "audio/mpeg";
249
- }
250
- if (buffer[0] === 79 && buffer[1] === 103 && buffer[2] === 103 && buffer[3] === 83) {
251
- return "audio/ogg";
252
- }
253
- if (buffer[0] === 102 && buffer[1] === 76 && buffer[2] === 97 && buffer[3] === 67) {
254
- return "audio/flac";
255
- }
256
- if (buffer[4] === 102 && buffer[5] === 116 && buffer[6] === 121 && buffer[7] === 112) {
257
- return "audio/mp4";
258
- }
259
- if (buffer[0] === 26 && buffer[1] === 69 && buffer[2] === 223 && buffer[3] === 163) {
260
- return "audio/webm";
261
- }
262
- logger4.warn("Could not detect audio format from buffer, using generic binary type");
263
- return "application/octet-stream";
264
- }
265
255
  async function webStreamToNodeStream(webStream) {
266
256
  try {
267
257
  const { Readable } = await import("node:stream");
@@ -434,7 +424,7 @@ async function handleTextEmbedding(runtime, params) {
434
424
  // src/models/image.ts
435
425
  import { logger as logger7, ModelType as ModelType4 } from "@elizaos/core";
436
426
  async function handleImageGeneration(runtime, params) {
437
- const numImages = params.n || 1;
427
+ const numImages = params.count || 1;
438
428
  const size = params.size || "1024x1024";
439
429
  const prompt = params.prompt;
440
430
  const modelName = getImageGenerationModel(runtime);
@@ -530,10 +520,6 @@ async function handleImageDescription(runtime, params) {
530
520
  description: "No response from API"
531
521
  };
532
522
  }
533
- const isCustomPrompt = typeof params === "object" && params.prompt && params.prompt !== "Please analyze this image and provide a title and detailed description.";
534
- if (isCustomPrompt) {
535
- return content;
536
- }
537
523
  const processedResult = parseImageDescriptionResponse(content);
538
524
  return processedResult;
539
525
  } catch (error) {
@@ -545,89 +531,8 @@ async function handleImageDescription(runtime, params) {
545
531
  };
546
532
  }
547
533
  }
548
- // src/models/transcription.ts
549
- import { logger as logger8 } from "@elizaos/core";
550
- async function handleTranscription(runtime, input) {
551
- let modelName = getSetting(runtime, "ELIZAOS_CLOUD_TRANSCRIPTION_MODEL", "gpt-4o-mini-transcribe");
552
- logger8.log(`[ELIZAOS_CLOUD] Using TRANSCRIPTION model: ${modelName}`);
553
- const baseURL = getBaseURL(runtime);
554
- let blob;
555
- let extraParams = null;
556
- if (input instanceof Blob || input instanceof File) {
557
- blob = input;
558
- } else if (Buffer.isBuffer(input)) {
559
- const detectedMimeType = detectAudioMimeType(input);
560
- logger8.debug(`Auto-detected audio MIME type: ${detectedMimeType}`);
561
- blob = new Blob([input], { type: detectedMimeType });
562
- } else if (typeof input === "object" && input !== null && "audio" in input && input.audio != null) {
563
- const params = input;
564
- if (!(params.audio instanceof Blob) && !(params.audio instanceof File) && !Buffer.isBuffer(params.audio)) {
565
- throw new Error("TRANSCRIPTION param 'audio' must be a Blob/File/Buffer.");
566
- }
567
- if (Buffer.isBuffer(params.audio)) {
568
- let mimeType = params.mimeType;
569
- if (!mimeType) {
570
- mimeType = detectAudioMimeType(params.audio);
571
- logger8.debug(`Auto-detected audio MIME type: ${mimeType}`);
572
- } else {
573
- logger8.debug(`Using provided MIME type: ${mimeType}`);
574
- }
575
- blob = new Blob([params.audio], { type: mimeType });
576
- } else {
577
- blob = params.audio;
578
- }
579
- extraParams = params;
580
- if (typeof params.model === "string" && params.model) {
581
- modelName = params.model;
582
- }
583
- } else {
584
- throw new Error("TRANSCRIPTION expects a Blob/File/Buffer or an object { audio: Blob/File/Buffer, mimeType?, language?, response_format?, timestampGranularities?, prompt?, temperature?, model? }");
585
- }
586
- const mime = blob.type || "audio/webm";
587
- const filename = blob.name || (mime.includes("mp3") || mime.includes("mpeg") ? "recording.mp3" : mime.includes("ogg") ? "recording.ogg" : mime.includes("wav") ? "recording.wav" : mime.includes("webm") ? "recording.webm" : "recording.bin");
588
- const formData = new FormData;
589
- formData.append("file", blob, filename);
590
- formData.append("model", String(modelName));
591
- if (extraParams) {
592
- if (typeof extraParams.language === "string") {
593
- formData.append("language", String(extraParams.language));
594
- }
595
- if (typeof extraParams.response_format === "string") {
596
- formData.append("response_format", String(extraParams.response_format));
597
- }
598
- if (typeof extraParams.prompt === "string") {
599
- formData.append("prompt", String(extraParams.prompt));
600
- }
601
- if (typeof extraParams.temperature === "number") {
602
- formData.append("temperature", String(extraParams.temperature));
603
- }
604
- if (Array.isArray(extraParams.timestampGranularities)) {
605
- for (const g of extraParams.timestampGranularities) {
606
- formData.append("timestamp_granularities[]", String(g));
607
- }
608
- }
609
- }
610
- try {
611
- const response = await fetch(`${baseURL}/audio/transcriptions`, {
612
- method: "POST",
613
- headers: {
614
- ...getAuthHeader(runtime)
615
- },
616
- body: formData
617
- });
618
- if (!response.ok) {
619
- throw new Error(`Failed to transcribe audio: ${response.status} ${response.statusText}`);
620
- }
621
- const data = await response.json();
622
- return data.text || "";
623
- } catch (error) {
624
- const message = error instanceof Error ? error.message : String(error);
625
- logger8.error(`TRANSCRIPTION error: ${message}`);
626
- throw error;
627
- }
628
- }
629
534
  // src/models/speech.ts
630
- import { logger as logger9 } from "@elizaos/core";
535
+ import { logger as logger8 } from "@elizaos/core";
631
536
  async function fetchTextToSpeech(runtime, options) {
632
537
  const defaultModel = getSetting(runtime, "ELIZAOS_CLOUD_TTS_MODEL", "gpt-4o-mini-tts");
633
538
  const defaultVoice = getSetting(runtime, "ELIZAOS_CLOUD_TTS_VOICE", "nova");
@@ -669,367 +574,10 @@ async function fetchTextToSpeech(runtime, options) {
669
574
  throw new Error(`Failed to fetch speech from ElizaOS Cloud TTS: ${message}`);
670
575
  }
671
576
  }
672
- async function handleTextToSpeech(runtime, input) {
673
- const options = typeof input === "string" ? { text: input } : input;
674
- const resolvedModel = options.model || getSetting(runtime, "ELIZAOS_CLOUD_TTS_MODEL", "gpt-4o-mini-tts");
675
- logger9.log(`[ELIZAOS_CLOUD] Using TEXT_TO_SPEECH model: ${resolvedModel}`);
676
- try {
677
- const speechStream = await fetchTextToSpeech(runtime, options);
678
- return speechStream;
679
- } catch (error) {
680
- const message = error instanceof Error ? error.message : String(error);
681
- logger9.error(`Error in TEXT_TO_SPEECH: ${message}`);
682
- throw error;
683
- }
684
- }
685
- // src/models/tokenization.ts
686
- import { ModelType as ModelType5 } from "@elizaos/core";
687
- import { encodingForModel } from "js-tiktoken";
688
- async function tokenizeText(model, prompt) {
689
- const modelName = model === ModelType5.TEXT_SMALL ? process.env.ELIZAOS_CLOUD_SMALL_MODEL ?? process.env.SMALL_MODEL ?? "gpt-5-nano" : process.env.LARGE_MODEL ?? "gpt-5-mini";
690
- const tokens = encodingForModel(modelName).encode(prompt);
691
- return tokens;
692
- }
693
- async function detokenizeText(model, tokens) {
694
- const modelName = model === ModelType5.TEXT_SMALL ? process.env.ELIZAOS_CLOUD_SMALL_MODEL ?? process.env.SMALL_MODEL ?? "gpt-5-nano" : process.env.ELIZAOS_CLOUD_LARGE_MODEL ?? process.env.LARGE_MODEL ?? "gpt-5-mini";
695
- return encodingForModel(modelName).decode(tokens);
696
- }
697
- async function handleTokenizerEncode(_runtime, { prompt, modelType = ModelType5.TEXT_LARGE }) {
698
- return await tokenizeText(modelType ?? ModelType5.TEXT_LARGE, prompt);
699
- }
700
- async function handleTokenizerDecode(_runtime, { tokens, modelType = ModelType5.TEXT_LARGE }) {
701
- return await detokenizeText(modelType ?? ModelType5.TEXT_LARGE, tokens);
702
- }
703
- // src/database/adapter.ts
704
- import { logger as logger10 } from "@elizaos/core";
705
- import pluginSql from "@elizaos/plugin-sql/node";
706
- var DEFAULT_CLOUD_URL = "https://www.elizacloud.ai";
707
- async function createCloudDatabaseAdapter(config) {
708
- const baseUrl = config.baseUrl || DEFAULT_CLOUD_URL;
709
- logger10.info({ src: "plugin:elizacloud", agentId: config.agentId }, "Provisioning cloud database");
710
- const response = await provisionCloudDatabase(config.apiKey, baseUrl, config.agentId);
711
- if (!response.success || !response.connectionUrl) {
712
- logger10.error({
713
- src: "plugin:elizacloud",
714
- error: response.error,
715
- agentId: config.agentId
716
- }, "Failed to provision cloud database");
717
- return null;
718
- }
719
- logger10.info({ src: "plugin:elizacloud", agentId: config.agentId }, "Cloud database provisioned successfully");
720
- const adapter = pluginSql.createDatabaseAdapter({ postgresUrl: response.connectionUrl }, config.agentId);
721
- logger10.info({ src: "plugin:elizacloud", agentId: config.agentId }, "Cloud database adapter created using PostgreSQL connection");
722
- return adapter;
723
- }
724
- async function provisionCloudDatabase(apiKey, baseUrl, agentId) {
725
- try {
726
- const response = await fetch(`${baseUrl}/api/v1/database/provision`, {
727
- method: "POST",
728
- headers: {
729
- Authorization: `Bearer ${apiKey}`,
730
- "Content-Type": "application/json"
731
- },
732
- body: JSON.stringify({
733
- agentId,
734
- type: "postgresql"
735
- })
736
- });
737
- if (!response.ok) {
738
- const errorText = await response.text();
739
- return {
740
- success: false,
741
- error: `Cloud database provisioning failed: ${response.status} ${errorText}`
742
- };
743
- }
744
- const data = await response.json();
745
- return {
746
- success: true,
747
- connectionUrl: data.connectionUrl,
748
- expiresAt: data.expiresAt
749
- };
750
- } catch (error) {
751
- const message = error instanceof Error ? error.message : String(error);
752
- return {
753
- success: false,
754
- error: `Network error during database provisioning: ${message}`
755
- };
756
- }
757
- }
758
-
759
- class CloudDatabaseAdapter {
760
- config;
761
- adapter = null;
762
- constructor(config) {
763
- this.config = config;
764
- }
765
- async initialize() {
766
- if (this.adapter) {
767
- return this.adapter;
768
- }
769
- this.adapter = await createCloudDatabaseAdapter(this.config);
770
- return this.adapter;
771
- }
772
- getAdapter() {
773
- return this.adapter;
774
- }
775
- }
776
-
777
- // src/storage/service.ts
778
- import { logger as logger11 } from "@elizaos/core";
779
- var DEFAULT_CLOUD_URL2 = "https://www.elizacloud.ai";
780
- var STORAGE_ENDPOINT = "/api/v1/storage/files";
781
- function createCloudStorageService(config) {
782
- return new CloudStorageService(config);
783
- }
784
-
785
- class CloudStorageService {
786
- apiKey;
787
- baseUrl;
788
- constructor(config) {
789
- this.apiKey = config.apiKey;
790
- this.baseUrl = config.baseUrl || DEFAULT_CLOUD_URL2;
791
- }
792
- async upload(file, options = {}) {
793
- try {
794
- const formData = new FormData;
795
- let blob;
796
- if (Buffer.isBuffer(file)) {
797
- blob = new Blob([file], {
798
- type: options.contentType || "application/octet-stream"
799
- });
800
- } else {
801
- blob = file;
802
- }
803
- const filename = options.filename || (file instanceof File ? file.name : "file") || "upload";
804
- formData.append("file", blob, filename);
805
- if (options.metadata) {
806
- formData.append("metadata", JSON.stringify(options.metadata));
807
- }
808
- const response = await fetch(`${this.baseUrl}${STORAGE_ENDPOINT}`, {
809
- method: "POST",
810
- headers: {
811
- Authorization: `Bearer ${this.apiKey}`
812
- },
813
- body: formData
814
- });
815
- if (!response.ok) {
816
- const errorData = await response.json().catch(() => ({}));
817
- if (response.status === 402) {
818
- return {
819
- success: false,
820
- error: `Insufficient credits. Required: ${errorData.required || "unknown"}, Available: ${errorData.available || "unknown"}. Top up at ${errorData.topUpUrl || "/dashboard/billing"}`
821
- };
822
- }
823
- return {
824
- success: false,
825
- error: `Upload failed: ${response.status} ${errorData.error || "Unknown error"}`
826
- };
827
- }
828
- const data = await response.json();
829
- logger11.info({ src: "plugin:elizacloud", cost: data.cost, remaining: data.creditsRemaining }, "Storage upload successful");
830
- return {
831
- success: true,
832
- id: data.id,
833
- url: data.url,
834
- pathname: data.pathname,
835
- contentType: data.contentType,
836
- size: data.size,
837
- cost: data.cost,
838
- creditsRemaining: data.creditsRemaining
839
- };
840
- } catch (error) {
841
- const message = error instanceof Error ? error.message : String(error);
842
- logger11.error({ src: "plugin:elizacloud", error }, "Storage upload failed");
843
- return {
844
- success: false,
845
- error: `Upload error: ${message}`
846
- };
847
- }
848
- }
849
- async download(id, url) {
850
- if (url) {
851
- try {
852
- const response = await fetch(url);
853
- if (!response.ok) {
854
- logger11.error({ src: "plugin:elizacloud", status: response.status, url }, "Storage direct download failed");
855
- return null;
856
- }
857
- const arrayBuffer = await response.arrayBuffer();
858
- return Buffer.from(arrayBuffer);
859
- } catch (error) {
860
- logger11.error({ src: "plugin:elizacloud", error }, "Storage direct download error");
861
- return null;
862
- }
863
- }
864
- try {
865
- const response = await fetch(`${this.baseUrl}${STORAGE_ENDPOINT}/${id}?download=true`, {
866
- headers: {
867
- Authorization: `Bearer ${this.apiKey}`
868
- },
869
- redirect: "follow"
870
- });
871
- if (!response.ok) {
872
- logger11.error({ src: "plugin:elizacloud", status: response.status }, "Storage download failed");
873
- return null;
874
- }
875
- const arrayBuffer = await response.arrayBuffer();
876
- return Buffer.from(arrayBuffer);
877
- } catch (error) {
878
- logger11.error({ src: "plugin:elizacloud", error }, "Storage download error");
879
- return null;
880
- }
881
- }
882
- async list(options = {}) {
883
- try {
884
- const params = new URLSearchParams;
885
- if (options.prefix)
886
- params.set("prefix", options.prefix);
887
- if (options.limit)
888
- params.set("limit", String(options.limit));
889
- if (options.cursor)
890
- params.set("cursor", options.cursor);
891
- const response = await fetch(`${this.baseUrl}${STORAGE_ENDPOINT}?${params.toString()}`, {
892
- headers: {
893
- Authorization: `Bearer ${this.apiKey}`
894
- }
895
- });
896
- if (!response.ok) {
897
- logger11.error({ src: "plugin:elizacloud", status: response.status }, "Storage list failed");
898
- return { items: [], hasMore: false };
899
- }
900
- const data = await response.json();
901
- return {
902
- items: data.items || [],
903
- cursor: data.cursor,
904
- hasMore: data.hasMore || false
905
- };
906
- } catch (error) {
907
- logger11.error({ src: "plugin:elizacloud", error }, "Storage list error");
908
- return { items: [], hasMore: false };
909
- }
910
- }
911
- async delete(id, url) {
912
- if (!url) {
913
- logger11.error({ src: "plugin:elizacloud" }, "Storage delete requires file URL");
914
- return false;
915
- }
916
- try {
917
- const params = new URLSearchParams({ url });
918
- const response = await fetch(`${this.baseUrl}${STORAGE_ENDPOINT}/${id}?${params.toString()}`, {
919
- method: "DELETE",
920
- headers: {
921
- Authorization: `Bearer ${this.apiKey}`
922
- }
923
- });
924
- if (!response.ok) {
925
- const errorData = await response.json().catch(() => ({}));
926
- logger11.error({ src: "plugin:elizacloud", status: response.status, error: errorData.error }, "Storage delete failed");
927
- return false;
928
- }
929
- return true;
930
- } catch (error) {
931
- logger11.error({ src: "plugin:elizacloud", error }, "Storage delete error");
932
- return false;
933
- }
934
- }
935
- async getStats() {
936
- try {
937
- const response = await fetch(`${this.baseUrl}${STORAGE_ENDPOINT}?stats=true`, {
938
- headers: {
939
- Authorization: `Bearer ${this.apiKey}`
940
- }
941
- });
942
- if (!response.ok) {
943
- return null;
944
- }
945
- const data = await response.json();
946
- return {
947
- totalFiles: data.stats?.totalFiles || 0,
948
- totalSize: data.stats?.totalSize || 0,
949
- totalSizeGB: data.stats?.totalSizeGB || 0,
950
- pricing: data.pricing || {}
951
- };
952
- } catch (error) {
953
- logger11.error({ src: "plugin:elizacloud", error }, "Storage stats error");
954
- return null;
955
- }
956
- }
957
- }
958
- // src/database/direct-adapter.ts
959
- import { logger as logger12 } from "@elizaos/core";
960
- import pluginSql2 from "@elizaos/plugin-sql/node";
961
- function createDatabaseAdapter(config, agentId) {
962
- const adapter = pluginSql2.createDatabaseAdapter({ postgresUrl: config.postgresUrl }, agentId);
963
- logger12.info({ src: "plugin:elizacloud", agentId }, "Direct database adapter created");
964
- return adapter;
965
- }
966
- async function createDirectDatabaseAdapter(config, agentId) {
967
- return createDatabaseAdapter(config, agentId);
968
- }
969
- // src/database/schema.ts
970
- import pluginSql3 from "@elizaos/plugin-sql/node";
971
- var {
972
- agentTable,
973
- roomTable,
974
- participantTable,
975
- memoryTable,
976
- embeddingTable,
977
- entityTable,
978
- relationshipTable,
979
- componentTable,
980
- taskTable,
981
- logTable,
982
- cacheTable,
983
- worldTable,
984
- serverTable,
985
- messageTable,
986
- messageServerTable,
987
- messageServerAgentsTable,
988
- channelTable,
989
- channelParticipantsTable
990
- } = pluginSql3.schema;
991
- var serverAgentsTable = serverTable;
992
577
  // src/index.ts
993
- var cloudStorageInstance = null;
994
- function getCloudStorage() {
995
- return cloudStorageInstance;
996
- }
997
- async function initializeCloudDatabase(runtime) {
998
- const apiKey = getApiKey(runtime);
999
- const baseUrl = getBaseURL(runtime);
1000
- if (!apiKey) {
1001
- logger13.warn({ src: "plugin:elizacloud" }, "Cloud database enabled but no API key found - skipping database initialization");
1002
- return;
1003
- }
1004
- logger13.info({ src: "plugin:elizacloud", agentId: runtime.agentId }, "Initializing cloud database");
1005
- const adapter = await createCloudDatabaseAdapter({
1006
- apiKey,
1007
- baseUrl,
1008
- agentId: runtime.agentId
1009
- });
1010
- if (adapter) {
1011
- runtime.registerDatabaseAdapter(adapter);
1012
- logger13.info({ src: "plugin:elizacloud", agentId: runtime.agentId }, "Cloud database adapter registered successfully");
1013
- } else {
1014
- logger13.error({ src: "plugin:elizacloud", agentId: runtime.agentId }, "Failed to initialize cloud database adapter");
1015
- }
1016
- }
1017
- function initializeCloudStorage(runtime) {
1018
- const apiKey = getApiKey(runtime);
1019
- const baseUrl = getBaseURL(runtime);
1020
- if (!apiKey) {
1021
- logger13.warn({ src: "plugin:elizacloud" }, "No API key found - cloud storage will not be available");
1022
- return;
1023
- }
1024
- cloudStorageInstance = new CloudStorageService({
1025
- apiKey,
1026
- baseUrl
1027
- });
1028
- logger13.info({ src: "plugin:elizacloud", agentId: runtime.agentId }, "Cloud storage service initialized");
1029
- }
1030
578
  var elizaOSCloudPlugin = {
1031
579
  name: "elizaOSCloud",
1032
- description: "ElizaOS Cloud plugin - Complete AI, storage, and database solution. Provides multi-model inference (GPT-4, Claude, Gemini), embeddings, image generation, transcription, TTS, managed PostgreSQL database, and cloud file storage. A single plugin that replaces all other AI and database plugins.",
580
+ description: "ElizaOS Cloud plugin - Multi-model AI generation with text, image, and video support",
1033
581
  config: {
1034
582
  ELIZAOS_CLOUD_API_KEY: process.env.ELIZAOS_CLOUD_API_KEY,
1035
583
  ELIZAOS_CLOUD_BASE_URL: process.env.ELIZAOS_CLOUD_BASE_URL,
@@ -1043,39 +591,20 @@ var elizaOSCloudPlugin = {
1043
591
  ELIZAOS_CLOUD_EMBEDDING_DIMENSIONS: process.env.ELIZAOS_CLOUD_EMBEDDING_DIMENSIONS,
1044
592
  ELIZAOS_CLOUD_IMAGE_DESCRIPTION_MODEL: process.env.ELIZAOS_CLOUD_IMAGE_DESCRIPTION_MODEL,
1045
593
  ELIZAOS_CLOUD_IMAGE_DESCRIPTION_MAX_TOKENS: process.env.ELIZAOS_CLOUD_IMAGE_DESCRIPTION_MAX_TOKENS,
1046
- ELIZAOS_CLOUD_IMAGE_GENERATION_MODEL: process.env.ELIZAOS_CLOUD_IMAGE_GENERATION_MODEL,
1047
- ELIZAOS_CLOUD_TTS_MODEL: process.env.ELIZAOS_CLOUD_TTS_MODEL,
1048
- ELIZAOS_CLOUD_TTS_VOICE: process.env.ELIZAOS_CLOUD_TTS_VOICE,
1049
- ELIZAOS_CLOUD_TRANSCRIPTION_MODEL: process.env.ELIZAOS_CLOUD_TRANSCRIPTION_MODEL,
1050
- ELIZAOS_CLOUD_DATABASE: process.env.ELIZAOS_CLOUD_DATABASE,
1051
- ELIZAOS_CLOUD_STORAGE: process.env.ELIZAOS_CLOUD_STORAGE,
1052
- ELIZAOS_CLOUD_EXPERIMENTAL_TELEMETRY: process.env.ELIZAOS_CLOUD_EXPERIMENTAL_TELEMETRY
594
+ ELIZAOS_CLOUD_EXPERIMENTAL_TELEMETRY: process.env.ELIZAOS_CLOUD_EXPERIMENTAL_TELEMETRY,
595
+ ELIZAOS_CLOUD_IMAGE_GENERATION_MODEL: process.env.ELIZAOS_CLOUD_IMAGE_GENERATION_MODEL
1053
596
  },
1054
- priority: -1,
1055
597
  async init(config, runtime) {
1056
598
  initializeOpenAI(config, runtime);
1057
- if (!isBrowser()) {
1058
- initializeCloudStorage(runtime);
1059
- }
1060
- const cloudDatabaseEnabled = runtime.getSetting("ELIZAOS_CLOUD_DATABASE") === "true" || process.env.ELIZAOS_CLOUD_DATABASE === "true";
1061
- if (cloudDatabaseEnabled && !isBrowser()) {
1062
- await initializeCloudDatabase(runtime);
1063
- }
1064
599
  },
1065
600
  models: {
1066
- [ModelType6.TEXT_SMALL]: handleTextSmall,
1067
- [ModelType6.TEXT_LARGE]: handleTextLarge,
1068
- [ModelType6.TEXT_REASONING_SMALL]: handleTextSmall,
1069
- [ModelType6.TEXT_REASONING_LARGE]: handleTextLarge,
1070
- [ModelType6.OBJECT_SMALL]: handleObjectSmall,
1071
- [ModelType6.OBJECT_LARGE]: handleObjectLarge,
1072
- [ModelType6.TEXT_EMBEDDING]: handleTextEmbedding,
1073
- [ModelType6.TEXT_TOKENIZER_ENCODE]: handleTokenizerEncode,
1074
- [ModelType6.TEXT_TOKENIZER_DECODE]: handleTokenizerDecode,
1075
- [ModelType6.IMAGE]: handleImageGeneration,
1076
- [ModelType6.IMAGE_DESCRIPTION]: handleImageDescription,
1077
- [ModelType6.TRANSCRIPTION]: handleTranscription,
1078
- [ModelType6.TEXT_TO_SPEECH]: handleTextToSpeech
601
+ [ModelType5.TEXT_EMBEDDING]: handleTextEmbedding,
602
+ [ModelType5.TEXT_SMALL]: handleTextSmall,
603
+ [ModelType5.TEXT_LARGE]: handleTextLarge,
604
+ [ModelType5.IMAGE]: handleImageGeneration,
605
+ [ModelType5.IMAGE_DESCRIPTION]: handleImageDescription,
606
+ [ModelType5.OBJECT_SMALL]: handleObjectSmall,
607
+ [ModelType5.OBJECT_LARGE]: handleObjectLarge
1079
608
  },
1080
609
  tests: [
1081
610
  {
@@ -1091,7 +620,7 @@ var elizaOSCloudPlugin = {
1091
620
  }
1092
621
  });
1093
622
  const data = await response.json();
1094
- logger13.log({ data: data?.data?.length ?? "N/A" }, "Models Available");
623
+ logger9.log({ data: data?.data?.length ?? "N/A" }, "Models Available");
1095
624
  if (!response.ok) {
1096
625
  throw new Error(`Failed to validate OpenAI API key: ${response.statusText}`);
1097
626
  }
@@ -1101,13 +630,13 @@ var elizaOSCloudPlugin = {
1101
630
  name: "ELIZAOS_CLOUD_test_text_embedding",
1102
631
  fn: async (runtime) => {
1103
632
  try {
1104
- const embedding = await runtime.useModel(ModelType6.TEXT_EMBEDDING, {
633
+ const embedding = await runtime.useModel(ModelType5.TEXT_EMBEDDING, {
1105
634
  text: "Hello, world!"
1106
635
  });
1107
- logger13.log({ embedding }, "embedding");
636
+ logger9.log({ embedding }, "embedding");
1108
637
  } catch (error) {
1109
638
  const message = error instanceof Error ? error.message : String(error);
1110
- logger13.error(`Error in test_text_embedding: ${message}`);
639
+ logger9.error(`Error in test_text_embedding: ${message}`);
1111
640
  throw error;
1112
641
  }
1113
642
  }
@@ -1116,16 +645,16 @@ var elizaOSCloudPlugin = {
1116
645
  name: "ELIZAOS_CLOUD_test_text_large",
1117
646
  fn: async (runtime) => {
1118
647
  try {
1119
- const text = await runtime.useModel(ModelType6.TEXT_LARGE, {
648
+ const text = await runtime.useModel(ModelType5.TEXT_LARGE, {
1120
649
  prompt: "What is the nature of reality in 10 words?"
1121
650
  });
1122
651
  if (text.length === 0) {
1123
652
  throw new Error("Failed to generate text");
1124
653
  }
1125
- logger13.log({ text }, "generated with test_text_large");
654
+ logger9.log({ text }, "generated with test_text_large");
1126
655
  } catch (error) {
1127
656
  const message = error instanceof Error ? error.message : String(error);
1128
- logger13.error(`Error in test_text_large: ${message}`);
657
+ logger9.error(`Error in test_text_large: ${message}`);
1129
658
  throw error;
1130
659
  }
1131
660
  }
@@ -1134,16 +663,16 @@ var elizaOSCloudPlugin = {
1134
663
  name: "ELIZAOS_CLOUD_test_text_small",
1135
664
  fn: async (runtime) => {
1136
665
  try {
1137
- const text = await runtime.useModel(ModelType6.TEXT_SMALL, {
666
+ const text = await runtime.useModel(ModelType5.TEXT_SMALL, {
1138
667
  prompt: "What is the nature of reality in 10 words?"
1139
668
  });
1140
669
  if (text.length === 0) {
1141
670
  throw new Error("Failed to generate text");
1142
671
  }
1143
- logger13.log({ text }, "generated with test_text_small");
672
+ logger9.log({ text }, "generated with test_text_small");
1144
673
  } catch (error) {
1145
674
  const message = error instanceof Error ? error.message : String(error);
1146
- logger13.error(`Error in test_text_small: ${message}`);
675
+ logger9.error(`Error in test_text_small: ${message}`);
1147
676
  throw error;
1148
677
  }
1149
678
  }
@@ -1151,17 +680,17 @@ var elizaOSCloudPlugin = {
1151
680
  {
1152
681
  name: "ELIZAOS_CLOUD_test_image_generation",
1153
682
  fn: async (runtime) => {
1154
- logger13.log("ELIZAOS_CLOUD_test_image_generation");
683
+ logger9.log("ELIZAOS_CLOUD_test_image_generation");
1155
684
  try {
1156
- const image = await runtime.useModel(ModelType6.IMAGE, {
685
+ const image = await runtime.useModel(ModelType5.IMAGE, {
1157
686
  prompt: "A beautiful sunset over a calm ocean",
1158
- n: 1,
687
+ count: 1,
1159
688
  size: "1024x1024"
1160
689
  });
1161
- logger13.log({ image }, "generated with test_image_generation");
690
+ logger9.log({ image }, "generated with test_image_generation");
1162
691
  } catch (error) {
1163
692
  const message = error instanceof Error ? error.message : String(error);
1164
- logger13.error(`Error in test_image_generation: ${message}`);
693
+ logger9.error(`Error in test_image_generation: ${message}`);
1165
694
  throw error;
1166
695
  }
1167
696
  }
@@ -1170,36 +699,36 @@ var elizaOSCloudPlugin = {
1170
699
  name: "image-description",
1171
700
  fn: async (runtime) => {
1172
701
  try {
1173
- logger13.log("ELIZAOS_CLOUD_test_image_description");
702
+ logger9.log("ELIZAOS_CLOUD_test_image_description");
1174
703
  try {
1175
- const result = await runtime.useModel(ModelType6.IMAGE_DESCRIPTION, "https://upload.wikimedia.org/wikipedia/commons/thumb/1/1c/Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg/537px-Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg");
704
+ const result = await runtime.useModel(ModelType5.IMAGE_DESCRIPTION, "https://upload.wikimedia.org/wikipedia/commons/thumb/1/1c/Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg/537px-Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg");
1176
705
  if (result && typeof result === "object" && "title" in result && "description" in result) {
1177
- logger13.log({ result }, "Image description");
706
+ logger9.log({ result }, "Image description");
1178
707
  } else {
1179
- logger13.error("Invalid image description result format:", result);
708
+ logger9.error("Invalid image description result format:", result);
1180
709
  }
1181
710
  } catch (e) {
1182
711
  const message = e instanceof Error ? e.message : String(e);
1183
- logger13.error(`Error in image description test: ${message}`);
712
+ logger9.error(`Error in image description test: ${message}`);
1184
713
  }
1185
714
  } catch (e) {
1186
715
  const message = e instanceof Error ? e.message : String(e);
1187
- logger13.error(`Error in ELIZAOS_CLOUD_test_image_description: ${message}`);
716
+ logger9.error(`Error in ELIZAOS_CLOUD_test_image_description: ${message}`);
1188
717
  }
1189
718
  }
1190
719
  },
1191
720
  {
1192
721
  name: "ELIZAOS_CLOUD_test_transcription",
1193
722
  fn: async (runtime) => {
1194
- logger13.log("ELIZAOS_CLOUD_test_transcription");
723
+ logger9.log("ELIZAOS_CLOUD_test_transcription");
1195
724
  try {
1196
725
  const response = await fetch("https://upload.wikimedia.org/wikipedia/en/4/40/Chris_Benoit_Voice_Message.ogg");
1197
726
  const arrayBuffer = await response.arrayBuffer();
1198
- const transcription = await runtime.useModel(ModelType6.TRANSCRIPTION, Buffer.from(new Uint8Array(arrayBuffer)));
1199
- logger13.log({ transcription }, "generated with test_transcription");
727
+ const transcription = await runtime.useModel(ModelType5.TRANSCRIPTION, Buffer.from(new Uint8Array(arrayBuffer)));
728
+ logger9.log({ transcription }, "generated with test_transcription");
1200
729
  } catch (error) {
1201
730
  const message = error instanceof Error ? error.message : String(error);
1202
- logger13.error(`Error in test_transcription: ${message}`);
731
+ logger9.error(`Error in test_transcription: ${message}`);
1203
732
  throw error;
1204
733
  }
1205
734
  }
@@ -1208,23 +737,23 @@ var elizaOSCloudPlugin = {
1208
737
  name: "ELIZAOS_CLOUD_test_text_tokenizer_encode",
1209
738
  fn: async (runtime) => {
1210
739
  const prompt = "Hello tokenizer encode!";
1211
- const tokens = await runtime.useModel(ModelType6.TEXT_TOKENIZER_ENCODE, { prompt });
740
+ const tokens = await runtime.useModel(ModelType5.TEXT_TOKENIZER_ENCODE, { prompt, modelType: ModelType5.TEXT_SMALL });
1212
741
  if (!Array.isArray(tokens) || tokens.length === 0) {
1213
742
  throw new Error("Failed to tokenize text: expected non-empty array of tokens");
1214
743
  }
1215
- logger13.log({ tokens }, "Tokenized output");
744
+ logger9.log({ tokens }, "Tokenized output");
1216
745
  }
1217
746
  },
1218
747
  {
1219
748
  name: "ELIZAOS_CLOUD_test_text_tokenizer_decode",
1220
749
  fn: async (runtime) => {
1221
750
  const prompt = "Hello tokenizer decode!";
1222
- const tokens = await runtime.useModel(ModelType6.TEXT_TOKENIZER_ENCODE, { prompt });
1223
- const decodedText = await runtime.useModel(ModelType6.TEXT_TOKENIZER_DECODE, { tokens });
751
+ const tokens = await runtime.useModel(ModelType5.TEXT_TOKENIZER_ENCODE, { prompt, modelType: ModelType5.TEXT_SMALL });
752
+ const decodedText = await runtime.useModel(ModelType5.TEXT_TOKENIZER_DECODE, { tokens, modelType: ModelType5.TEXT_SMALL });
1224
753
  if (decodedText !== prompt) {
1225
754
  throw new Error(`Decoded text does not match original. Expected "${prompt}", got "${decodedText}"`);
1226
755
  }
1227
- logger13.log({ decodedText }, "Decoded text");
756
+ logger9.log({ decodedText }, "Decoded text");
1228
757
  }
1229
758
  },
1230
759
  {
@@ -1237,10 +766,10 @@ var elizaOSCloudPlugin = {
1237
766
  if (!response) {
1238
767
  throw new Error("Failed to generate speech");
1239
768
  }
1240
- logger13.log("Generated speech successfully");
769
+ logger9.log("Generated speech successfully");
1241
770
  } catch (error) {
1242
771
  const message = error instanceof Error ? error.message : String(error);
1243
- logger13.error(`Error in ELIZAOS_CLOUD_test_text_to_speech: ${message}`);
772
+ logger9.error(`Error in ELIZAOS_CLOUD_test_text_to_speech: ${message}`);
1244
773
  throw error;
1245
774
  }
1246
775
  }
@@ -1251,35 +780,8 @@ var elizaOSCloudPlugin = {
1251
780
  };
1252
781
  var src_default = elizaOSCloudPlugin;
1253
782
  export {
1254
- worldTable,
1255
- taskTable,
1256
- serverTable,
1257
- serverAgentsTable,
1258
- roomTable,
1259
- relationshipTable,
1260
- pluginSql3 as pluginSql,
1261
- participantTable,
1262
- messageTable,
1263
- messageServerTable,
1264
- messageServerAgentsTable,
1265
- memoryTable,
1266
- logTable,
1267
- getCloudStorage,
1268
- entityTable,
1269
- embeddingTable,
1270
783
  elizaOSCloudPlugin,
1271
- src_default as default,
1272
- createDirectDatabaseAdapter,
1273
- createDatabaseAdapter,
1274
- createCloudStorageService,
1275
- createCloudDatabaseAdapter,
1276
- componentTable,
1277
- channelTable,
1278
- channelParticipantsTable,
1279
- cacheTable,
1280
- agentTable,
1281
- CloudStorageService,
1282
- CloudDatabaseAdapter
784
+ src_default as default
1283
785
  };
1284
786
 
1285
- //# debugId=1EA438C51CA2A03164756E2164756E21
787
+ //# debugId=8EE90E4B202F55B064756E2164756E21