apexify.js 4.7.95 → 4.7.96

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/dist/cjs/ai/ApexAI.d.ts.map +1 -1
  2. package/dist/cjs/ai/ApexAI.js +41 -22
  3. package/dist/cjs/ai/ApexAI.js.map +1 -1
  4. package/dist/cjs/ai/buttons/tools.js +8 -8
  5. package/dist/cjs/ai/buttons/tools.js.map +1 -1
  6. package/dist/cjs/ai/modals-chat/groq/whisper.d.ts.map +1 -1
  7. package/dist/cjs/ai/modals-chat/groq/whisper.js +34 -33
  8. package/dist/cjs/ai/modals-chat/groq/whisper.js.map +1 -1
  9. package/dist/cjs/canvas/ApexPainter.d.ts +19 -14
  10. package/dist/cjs/canvas/ApexPainter.d.ts.map +1 -1
  11. package/dist/cjs/canvas/ApexPainter.js +132 -15
  12. package/dist/cjs/canvas/ApexPainter.js.map +1 -1
  13. package/dist/cjs/canvas/utils/types.d.ts +20 -12
  14. package/dist/cjs/canvas/utils/types.d.ts.map +1 -1
  15. package/dist/cjs/canvas/utils/types.js.map +1 -1
  16. package/dist/cjs/canvas/utils/utils.d.ts +2 -2
  17. package/dist/cjs/canvas/utils/utils.d.ts.map +1 -1
  18. package/dist/esm/ai/ApexAI.d.ts.map +1 -1
  19. package/dist/esm/ai/ApexAI.js +41 -22
  20. package/dist/esm/ai/ApexAI.js.map +1 -1
  21. package/dist/esm/ai/buttons/tools.js +8 -8
  22. package/dist/esm/ai/buttons/tools.js.map +1 -1
  23. package/dist/esm/ai/modals-chat/groq/whisper.d.ts.map +1 -1
  24. package/dist/esm/ai/modals-chat/groq/whisper.js +34 -33
  25. package/dist/esm/ai/modals-chat/groq/whisper.js.map +1 -1
  26. package/dist/esm/canvas/ApexPainter.d.ts +19 -14
  27. package/dist/esm/canvas/ApexPainter.d.ts.map +1 -1
  28. package/dist/esm/canvas/ApexPainter.js +132 -15
  29. package/dist/esm/canvas/ApexPainter.js.map +1 -1
  30. package/dist/esm/canvas/utils/types.d.ts +20 -12
  31. package/dist/esm/canvas/utils/types.d.ts.map +1 -1
  32. package/dist/esm/canvas/utils/types.js.map +1 -1
  33. package/dist/esm/canvas/utils/utils.d.ts +2 -2
  34. package/dist/esm/canvas/utils/utils.d.ts.map +1 -1
  35. package/lib/ai/ApexAI.ts +68 -47
  36. package/lib/ai/buttons/tools.ts +8 -8
  37. package/lib/ai/modals-chat/groq/whisper.ts +84 -83
  38. package/lib/canvas/ApexPainter.ts +180 -24
  39. package/lib/canvas/utils/types.ts +20 -13
  40. package/lib/canvas/utils/utils.ts +4 -2
  41. package/package.json +2 -2
@@ -7,7 +7,7 @@
7
7
  * @param drawBackgroundGradient The function for drawing a gradient background on the canvas.
8
8
  * @param customBackground The function for drawing a custom background image on the canvas.
9
9
  */
10
- import { OutputFormat, CanvasConfig, ImageProperties, TextObject, GIFOptions, GIFResults, CustomOptions, cropOptions, GradientConfig, Frame, PatternOptions, ExtractFramesOptions, ResizeOptions, CropOptions } from "./types";
10
+ import { OutputFormat, CanvasConfig, ImageProperties, TextObject, GIFOptions, GIFResults, CustomOptions, cropOptions, GradientConfig, Frame, PatternOptions, ExtractFramesOptions, ResizeOptions, CropOptions, MaskOptions, BlendOptions } from "./types";
11
11
  import { drawBackgroundColor, drawBackgroundGradient, customBackground, backgroundRadius } from "./Background/bg";
12
12
  import { applyRotation, imageRadius, applyStroke, applyZoom, applyShadow, objectRadius, drawShape, applyPerspective } from './Image/imageProperties';
13
13
  import { drawText, WrappedText } from "./Texts/textProperties";
@@ -15,5 +15,5 @@ import { loadImages, resizingImg, converter, applyColorFilters, imgEffects, crop
15
15
  import { customLines } from "./Custom/customLines";
16
16
  import { verticalBarChart, pieChart, lineChart } from './Charts/charts';
17
17
  import { url, arrayBuffer, base64, dataURL, blob } from "./General/conversion";
18
- export { url, OutputFormat, arrayBuffer, base64, dataURL, blob, CanvasConfig, ImageProperties, TextObject, GIFOptions, GIFResults, CustomOptions, cropOptions, customLines, drawBackgroundColor, drawBackgroundGradient, customBackground, applyRotation, imageRadius, applyStroke, applyZoom, applyShadow, objectRadius, backgroundRadius, drawShape, drawText, WrappedText, loadImages, resizingImg, converter, applyColorFilters, imgEffects, verticalBarChart, pieChart, lineChart, cropInner, cropOuter, detectColors, removeColor, bgRemoval, GradientConfig, Frame, PatternOptions, ExtractFramesOptions, ResizeOptions, CropOptions, applyPerspective };
18
+ export { url, OutputFormat, arrayBuffer, base64, dataURL, blob, CanvasConfig, ImageProperties, TextObject, GIFOptions, GIFResults, CustomOptions, cropOptions, customLines, drawBackgroundColor, drawBackgroundGradient, customBackground, applyRotation, imageRadius, applyStroke, applyZoom, applyShadow, objectRadius, backgroundRadius, drawShape, drawText, WrappedText, loadImages, resizingImg, converter, applyColorFilters, imgEffects, verticalBarChart, pieChart, lineChart, cropInner, cropOuter, detectColors, removeColor, bgRemoval, GradientConfig, Frame, PatternOptions, ExtractFramesOptions, ResizeOptions, CropOptions, applyPerspective, MaskOptions, BlendOptions };
19
19
  //# sourceMappingURL=utils.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../../lib/canvas/utils/utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAIH,OAAO,EAAE,YAAY,EAAE,YAAY,EAAE,eAAe,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,aAAa,EAAE,WAAW,EAAE,cAAc,EAAE,KAAK,EAAE,cAAc,EAAE,oBAAoB,EAAE,aAAa,EAAE,WAAW,EAAE,MAAM,SAAS,CAAC;AAC/N,OAAO,EAAE,mBAAmB,EAAE,sBAAsB,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAC;AAClH,OAAO,EAAE,aAAa,EAAE,WAAW,EAAE,WAAW,EAAE,SAAS,EAAE,WAAW,EAAE,YAAY,EAAE,SAAS,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAA;AACpJ,OAAO,EAAE,QAAQ,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAC/D,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,SAAS,EAAE,iBAAiB,EAAE,UAAU,EAAE,SAAS,EAAE,SAAS,EAAE,YAAY,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,6BAA6B,CAAC;AAC5K,OAAO,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AACnD,OAAO,EAAE,gBAAgB,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AACvE,OAAO,EAAE,GAAG,EAAE,WAAW,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAG,MAAM,sBAAsB,CAAC;AAEhF,OAAO,EACH,GAAG,EACH,YAAY,EACZ,WAAW,EACX,MAAM,EACN,OAAO,EACP,IAAI,EACJ,YAAY,EACZ,eAAe,EACf,UAAU,EACV,UAAU,EACV,UAAU,EACV,aAAa,EACb,WAAW,EACX,WAAW,EACX,mBAAmB,EACnB,sBAAsB,EACtB,gBAAgB,EAChB,aAAa,EACb,WAAW,EACX,WAAW,EACX,SAAS,EACT,WAAW,EACX,YAAY,EACZ,gBAAgB,EAChB,SAAS,EACT,QAAQ,EACR,WAAW,EACX,UAAU,EACV,WAAW,EACX,SAAS,EACT,iBAAiB,EACjB,UAAU,EACV,gBAAgB,EAChB,QAAQ,EACR,SAAS,EACT,SAAS,EACT,SAAS,EACT,YAAY,EACZ,WAAW,EACX,SAAS,EACT,cAAc,EACd,KAAK,EACL,cAAc,EACd,oBAAoB,EACpB,aAAa,EACb,WAAW,EACX,gBAAgB,EACnB,CAAC"}
1
+ {"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../../lib/canvas/utils/utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAIH,OAAO,EAAE,YAAY,EAAE,YAAY,EAAE,eAAe,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,aAAa,EAAE,WAAW,EAAE,cAAc,EAAE,KAAK,EAAE,cAAc,EAAE,oBAAoB,EAAE,aAAa,EAAE,WAAW,EAAE,WAAW,EAAE,YAAY,EAAE,MAAM,SAAS,CAAC;AAC1P,OAAO,EAAE,mBAAmB,EAAE,sBAAsB,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAC;AAClH,OAAO,EAAE,aAAa,EAAE,WAAW,EAAE,WAAW,EAAE,SAAS,EAAE,WAAW,EAAE,YAAY,EAAE,SAAS,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAA;AACpJ,OAAO,EAAE,QAAQ,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAC/D,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,SAAS,EAAE,iBAAiB,EAAE,UAAU,EAAE,SAAS,EAAE,SAAS,EAAE,YAAY,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,6BAA6B,CAAC;AAC5K,OAAO,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AACnD,OAAO,EAAE,gBAAgB,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AACvE,OAAO,EAAE,GAAG,EAAE,WAAW,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAG,MAAM,sBAAsB,CAAC;AAEhF,OAAO,EACH,GAAG,EACH,YAAY,EACZ,WAAW,EACX,MAAM,EACN,OAAO,EACP,IAAI,EACJ,YAAY,EACZ,eAAe,EACf,UAAU,EACV,UAAU,EACV,UAAU,EACV,aAAa,EACb,WAAW,EACX,WAAW,EACX,mBAAmB,EACnB,sBAAsB,EACtB,gBAAgB,EAChB,aAAa,EACb,WAAW,EACX,WAAW,EACX,SAAS,EACT,WAAW,EACX,YAAY,EACZ,gBAAgB,EAChB,SAAS,EACT,QAAQ,EACR,WAAW,EACX,UAAU,EACV,WAAW,EACX,SAAS,EACT,iBAAiB,EACjB,UAAU,EACV,gBAAgB,EAChB,QAAQ,EACR,SAAS,EACT,SAAS,EACT,SAAS,EACT,YAAY,EACZ,WAAW,EACX,SAAS,EACT,cAAc,EACd,KAAK,EACL,cAAc,EACd,oBAAoB,EACpB,aAAa,EACb,WAAW,EACX,gBAAgB,EAChB,WAAW,EACX,YAAY,EACf,CAAC"}
package/lib/ai/ApexAI.ts CHANGED
@@ -8,7 +8,7 @@ import {
8
8
  } from "discord.js";
9
9
  import {
10
10
  joinVoiceChannel, createAudioPlayer, createAudioResource, EndBehaviorType,
11
- VoiceConnection, DiscordGatewayAdapterCreator, AudioPlayerStatus
11
+ VoiceConnection, AudioPlayerStatus
12
12
  } from "@discordjs/voice";
13
13
 
14
14
  import { filters } from "./buttons/tools";
@@ -568,11 +568,12 @@ export async function ApexAI(message: Message, ApexOptions: Options) {
568
568
 
569
569
  };
570
570
 
571
- const voiceQueue: { userId: string; text: string }[] = [];
571
+ let voiceQueue: { userId: string; text: string }[] = [];
572
572
  let isProcessing = false;
573
573
  let voiceConnection: VoiceConnection | null = null;
574
574
  let activeUser: string | null = null;
575
575
  let isRecording = false;
576
+ let silenceTimer: NodeJS.Timeout | null = null;
576
577
 
577
578
  const recordingsDir = path.join(process.cwd(), "recordings");
578
579
  if (!fs.existsSync(recordingsDir)) {
@@ -581,17 +582,21 @@ if (!fs.existsSync(recordingsDir)) {
581
582
 
582
583
  try {
583
584
  execSync("ffmpeg -version > nul 2>&1");
584
- } catch (err) {
585
+ } catch (err) {
585
586
  console.error("🚨 FFmpeg is NOT installed or not in PATH! Install it first.");
586
587
  }
587
588
 
589
+ function resetSilenceTimer() {
590
+ if (silenceTimer) clearTimeout(silenceTimer);
591
+ }
592
+
588
593
  export async function handleVoiceAI(message: any, voiceChannelId: string, chat: any, modelName?: string, personality?: string) {
589
594
  const guild = message.guild;
590
595
  if (!guild) return;
591
596
 
592
597
  const channel = guild.channels.cache.get(voiceChannelId);
593
598
  if (!channel || channel.type !== 2) {
594
- return await message.reply(`🚫 Invalid voice channel ID: ${voiceChannelId}`);
599
+ return await message.reply(`🚫 Invalid voice channel ID: ${voiceChannelId}`);
595
600
  }
596
601
 
597
602
  const botMember = guild.members.me;
@@ -608,7 +613,7 @@ export async function handleVoiceAI(message: any, voiceChannelId: string, chat:
608
613
  }
609
614
 
610
615
  if (voiceConnection) {
611
- return await message.reply("⚠️ AI is already in a voice channel.");
616
+ return await message.reply("⚠️ AI is already in a voice channel.");
612
617
  }
613
618
 
614
619
  voiceConnection = joinVoiceChannel({
@@ -620,7 +625,6 @@ export async function handleVoiceAI(message: any, voiceChannelId: string, chat:
620
625
  });
621
626
 
622
627
  activeUser = message.author.id;
623
-
624
628
  captureAudio(voiceConnection, chat, modelName, personality);
625
629
  }
626
630
 
@@ -628,15 +632,18 @@ function captureAudio(connection: VoiceConnection, chat: any, modelName?: string
628
632
  const receiver = connection.receiver;
629
633
 
630
634
  receiver.speaking.on("start", async (userId) => {
631
- if (userId !== activeUser || isRecording) return;
632
- isRecording = true;
635
+ if (userId !== activeUser) {
636
+ activeUser = userId;
637
+ isRecording = false;
638
+ }
633
639
 
640
+ resetSilenceTimer();
634
641
 
635
642
  const rawFilePath = path.join(recordingsDir, `${userId}.pcm`);
636
643
  const wavFilePath = path.join(recordingsDir, `${userId}.wav`);
637
644
 
638
645
  const opusStream = receiver.subscribe(userId, {
639
- end: { behavior: EndBehaviorType.AfterSilence, duration: 2000 }
646
+ end: { behavior: EndBehaviorType.AfterSilence, duration: 2000 }
640
647
  });
641
648
 
642
649
  const pcmStream = new prism.opus.Decoder({
@@ -647,35 +654,55 @@ function captureAudio(connection: VoiceConnection, chat: any, modelName?: string
647
654
 
648
655
  const writeStream = fs.createWriteStream(rawFilePath);
649
656
  pipeline(opusStream, pcmStream, writeStream, (err) => {
650
- isRecording = false;
651
657
  if (err) {
652
658
  console.error("❌ Error writing PCM file:", err);
653
659
  return;
654
660
  }
655
-
656
661
  convertPCMtoWAV(rawFilePath, wavFilePath, chat, modelName, personality);
657
662
  });
658
663
  });
664
+
665
+ receiver.speaking.on("end", async (userId) => {
666
+ if (userId === activeUser) {
667
+ startSilenceTimer(chat, modelName, personality);
668
+ }
669
+ });
670
+ }
671
+
672
+ function startSilenceTimer(chat: any, modelName?: string, personality?: string) {
673
+ resetSilenceTimer();
674
+ silenceTimer = setTimeout(() => {
675
+ if (voiceQueue.length > 0) {
676
+ const nextUser = voiceQueue.shift();
677
+ if (nextUser) {
678
+ activeUser = nextUser.userId;
679
+ processQueue(chat, modelName, personality);
680
+ }
681
+ } else {
682
+ leaveVoiceChannel();
683
+ }
684
+ }, 5000);
659
685
  }
660
686
 
687
+
661
688
  function convertPCMtoWAV(inputPCM: string, outputWAV: string, chat: any, modelName?: string, personality?: string) {
662
- if (!fs.existsSync(inputPCM) || fs.statSync(inputPCM).size === 0) {
663
- return;
664
- }
689
+ if (!fs.existsSync(inputPCM) || fs.statSync(inputPCM).size === 0) {
690
+ return;
691
+ }
665
692
 
666
- try {
667
- execSync(`ffmpeg -y -f s16le -ar 48000 -ac 1 -i "${inputPCM}" -acodec pcm_s16le "${outputWAV}" > nul 2>&1`);
693
+ try {
694
+ execSync(`ffmpeg -y -f s16le -ar 48000 -ac 1 -i "${inputPCM}" -acodec pcm_s16le "${outputWAV}" > nul 2>&1`);
668
695
 
669
- if (fs.existsSync(outputWAV)) {
670
- transcribeAudio(outputWAV, chat, modelName, personality);
671
- }
696
+ if (fs.existsSync(outputWAV)) {
697
+ transcribeAudio(outputWAV, chat, modelName, personality);
698
+ }
672
699
 
673
- } catch (error) {
674
- console.error("❌ FFmpeg failed:", error);
675
- }
700
+ } catch (error) {
701
+ console.error("❌ FFmpeg failed:", error);
702
+ }
676
703
  }
677
704
 
678
- // 🛠 **Transcribe Audio using ApexListener**
705
+
679
706
  async function transcribeAudio(filePath: string, chat: any, modelName?: string, personality?: string) {
680
707
  try {
681
708
  const transcribedText = await ApexListener({
@@ -689,47 +716,42 @@ async function transcribeAudio(filePath: string, chat: any, modelName?: string,
689
716
  processQueue(chat, modelName, personality);
690
717
  }
691
718
 
692
- const pcmFile = filePath.replace(".wav", ".pcm");
693
- if (fs.existsSync(pcmFile)) fs.unlinkSync(pcmFile);
694
- if (fs.existsSync(filePath)) fs.unlinkSync(filePath);
695
-
719
+ fs.unlinkSync(filePath);
696
720
  } catch (error) {
697
721
  console.error("❌ Error in transcription:", error);
698
722
  }
699
723
  }
700
724
 
701
725
  async function processQueue(chat: any, modelName?: string, personality?: string) {
702
- if (isProcessing || voiceQueue.length === 0) {
703
- if (voiceQueue.length === 0) {
704
- leaveVoiceChannel();
705
- }
706
- return;
707
- }
726
+ if (isProcessing || voiceQueue.length === 0) return;
708
727
 
709
728
  isProcessing = true;
710
729
  const { userId, text } = voiceQueue.shift()!;
730
+ activeUser = userId;
731
+ resetSilenceTimer();
711
732
 
712
733
  try {
734
+ const aiResponse = await ApexChat(chat?.chatModel || "gpt-4o", text, {
735
+ instruction: chat.instruction,
736
+ memory: chat?.memory?.memoryOn,
737
+ userId,
738
+ limit: chat?.memory?.limit,
739
+ threshold: chat?.memory?.threshold
740
+ });
713
741
 
714
- const aiResponse = await ApexChat(chat?.chatModel as string || "gpt-4o", text, {
715
- instruction: chat.instruction,
716
- memory: chat?.memory?.memoryOn,
717
- userId: userId,
718
- limit: chat?.memory?.limit,
719
- threshold: chat?.memory?.threshold
720
- });
721
- const audioBuffer = await ApexText2Speech({ inputText: aiResponse, modelName, personality });
742
+ const audioBuffer = await ApexText2Speech({ inputText: aiResponse, modelName, personality });
722
743
 
723
744
  if (voiceConnection) {
724
745
  const player = createAudioPlayer();
725
- const audioStream = Readable.from(audioBuffer);
726
- const resource = createAudioResource(audioStream);
746
+ const resource = createAudioResource(Readable.from(audioBuffer));
727
747
  voiceConnection.subscribe(player);
728
748
  player.play(resource);
729
749
 
730
750
  player.on(AudioPlayerStatus.Idle, () => {
731
751
  isProcessing = false;
732
- processQueue(chat);
752
+ if (voiceQueue.length > 0) {
753
+ processQueue(chat, modelName, personality);
754
+ }
733
755
  });
734
756
  }
735
757
  } catch (error) {
@@ -738,12 +760,11 @@ async function processQueue(chat: any, modelName?: string, personality?: string)
738
760
  }
739
761
  }
740
762
 
741
- // 🔄 **Leave Voice Channel When Done**
742
763
  function leaveVoiceChannel() {
743
764
  if (voiceConnection) {
744
- console.log("👋 AI is leaving the voice channel...");
745
765
  voiceConnection.destroy();
746
766
  voiceConnection = null;
747
767
  activeUser = null;
768
+ resetSilenceTimer();
748
769
  }
749
- }
770
+ }
@@ -175,7 +175,7 @@ async function filters(Apex: any) {
175
175
  }
176
176
 
177
177
  try {
178
- const processedBuffer = await apexPainter.processImage(imageURL, [
178
+ const processedBuffer = await apexPainter.effects(imageURL, [
179
179
  { type: "brightness", value: parseFloat(brightnessDegree) },
180
180
  ]);
181
181
 
@@ -217,7 +217,7 @@ async function filters(Apex: any) {
217
217
  const imageURL = selectedAttachment.url;
218
218
 
219
219
  try {
220
- const processedBuffer = await apexPainter.processImage(imageURL, [
220
+ const processedBuffer = await apexPainter.effects(imageURL, [
221
221
  { type: "sepia" },
222
222
  ]);
223
223
  await a.editReply({
@@ -283,7 +283,7 @@ async function filters(Apex: any) {
283
283
  const [x, y] = xyArray;
284
284
  const [width, height] = whArray;
285
285
 
286
- const processedBuffer = await apexPainter.processImage(imageURL, [
286
+ const processedBuffer = await apexPainter.effects(imageURL, [
287
287
  {
288
288
  type: "pixelate",
289
289
  size: parseInt(size, 10),
@@ -340,7 +340,7 @@ async function filters(Apex: any) {
340
340
  }
341
341
 
342
342
  try {
343
- const processedBuffer = await apexPainter.processImage(imageURL, [
343
+ const processedBuffer = await apexPainter.effects(imageURL, [
344
344
  { type: "blur", radius: parseFloat(blurRadius) },
345
345
  ]);
346
346
  await a.editReply({
@@ -390,7 +390,7 @@ async function filters(Apex: any) {
390
390
  }
391
391
 
392
392
  try {
393
- const processedBuffer = await apexPainter.processImage(imageURL, [
393
+ const processedBuffer = await apexPainter.effects(imageURL, [
394
394
  { type: "fade", factor: parseFloat(fadeFactor) },
395
395
  ]);
396
396
  await a.editReply({
@@ -431,7 +431,7 @@ async function filters(Apex: any) {
431
431
  const imageURL = selectedAttachment.url;
432
432
 
433
433
  try {
434
- const processedBuffer = await apexPainter.processImage(imageURL, [
434
+ const processedBuffer = await apexPainter.effects(imageURL, [
435
435
  { type: "opaque" },
436
436
  ]);
437
437
  await a.editReply({
@@ -472,7 +472,7 @@ async function filters(Apex: any) {
472
472
  const imageURL = selectedAttachment.url;
473
473
 
474
474
  try {
475
- const processedBuffer = await apexPainter.processImage(imageURL, [
475
+ const processedBuffer = await apexPainter.effects(imageURL, [
476
476
  { type: "greyscale" },
477
477
  ]);
478
478
  await a.editReply({
@@ -522,7 +522,7 @@ async function filters(Apex: any) {
522
522
  }
523
523
 
524
524
  try {
525
- const processedBuffer = await apexPainter.processImage(imageURL, [
525
+ const processedBuffer = await apexPainter.effects(imageURL, [
526
526
  { type: "contrast", value: parseFloat(contrastValue) },
527
527
  ]);
528
528
  await a.editReply({
@@ -1,113 +1,114 @@
1
- import Groq from 'groq-sdk';
2
- import fs from 'fs';
3
- import path from 'path';
4
- import { URL } from 'url';
5
- import https from 'https';
6
- import { Uploadable } from 'groq-sdk/uploads.mjs';
1
+ import Groq from "groq-sdk";
2
+ import fs from "fs";
3
+ import path from "path";
4
+ import { URL } from "url";
5
+ import https from "https";
7
6
 
8
7
  /**
9
8
  * Gets the file size of a given path.
10
9
  */
11
10
  function getFileSize(filePath: string): number {
12
- const stats = fs.statSync(filePath);
13
- return stats.size;
11
+ return fs.statSync(filePath).size;
14
12
  }
15
13
 
16
14
  /**
17
15
  * Creates a readable stream from a local file or a remote URL.
18
16
  */
19
- async function createReadableStream(filepathOrUrl: string): Promise<NodeJS.ReadableStream | string> {
20
- const maxFileSizeBytes = 25 * 1024 * 1024;
17
+ async function createReadableStream(filepathOrUrl: string): Promise<{ stream: fs.ReadStream | null; tempFilePath?: string; error?: string }> {
18
+ const maxFileSizeBytes = 25 * 1024 * 1024; // 25MB limit
21
19
 
22
- if (filepathOrUrl.startsWith('http')) {
23
- const parsedUrl = new URL(filepathOrUrl);
24
- const fileExtension = path.extname(parsedUrl.pathname);
25
- const tempFilePath = `temp-file-${Date.now()}${fileExtension}`;
20
+ if (filepathOrUrl.startsWith("http")) {
21
+ const parsedUrl = new URL(filepathOrUrl);
22
+ const fileExtension = path.extname(parsedUrl.pathname);
23
+ const tempFilePath = `temp-file-${Date.now()}${fileExtension}`;
26
24
 
27
- return new Promise((resolve, reject) => {
28
- const file = fs.createWriteStream(tempFilePath);
29
- const request = https.get(filepathOrUrl, (response) => {
30
- let fileSize = 0;
25
+ return new Promise((resolve, reject) => {
26
+ const file = fs.createWriteStream(tempFilePath);
27
+ const request = https.get(filepathOrUrl, (response) => {
28
+ let fileSize = 0;
31
29
 
32
- response.on('data', (chunk) => {
33
- fileSize += chunk.length;
34
- if (fileSize > maxFileSizeBytes) {
35
- request.destroy(); // Stop downloading
36
- file.close();
37
- fs.unlink(tempFilePath, () => {
38
- resolve('File size exceeds the limit (25MB)');
39
- });
40
- return; // Ensure function stops here
41
- }
42
- });
30
+ response.on("data", (chunk) => {
31
+ fileSize += chunk.length;
32
+ if (fileSize > maxFileSizeBytes) {
33
+ request.destroy();
34
+ file.close();
35
+ fs.unlink(tempFilePath, () => resolve({ stream: null, error: "File size exceeds the limit (25MB)" }));
36
+ return;
37
+ }
38
+ });
43
39
 
44
- response.pipe(file);
45
-
46
- file.on('finish', () => {
47
- file.close(() => {
48
- if (fileSize <= maxFileSizeBytes) {
49
- resolve(fs.createReadStream(tempFilePath));
50
- }
51
- });
52
- });
53
- });
40
+ response.pipe(file);
41
+ file.on("finish", () => {
42
+ file.close();
43
+ resolve({ stream: fs.createReadStream(tempFilePath), tempFilePath }); // ✅ Ensure fs.ReadStream
44
+ });
45
+ });
54
46
 
55
- request.on('error', (err) => {
56
- fs.unlink(tempFilePath, () => {
57
- reject(err);
47
+ request.on("error", (err) => {
48
+ fs.unlink(tempFilePath, () => reject(err));
49
+ });
58
50
  });
59
- });
60
- });
61
- } else {
62
- if (getFileSize(filepathOrUrl) > maxFileSizeBytes) {
63
- return 'File size exceeds the limit (25MB)';
51
+ } else {
52
+ if (getFileSize(filepathOrUrl) > maxFileSizeBytes) {
53
+ return { stream: null, error: "File size exceeds the limit (25MB)" };
54
+ }
55
+ return { stream: fs.createReadStream(filepathOrUrl) }; // ✅ Ensure fs.ReadStream
64
56
  }
65
- return fs.createReadStream(filepathOrUrl);
66
- }
67
57
  }
68
58
 
69
59
  /**
70
60
  * Transcribes audio using Groq Whisper API.
71
61
  */
72
62
  export async function whisper(prompt: string, filepath: string, lang?: string, API_KEY?: string) {
73
- try {
74
- const groq = new Groq({
75
- apiKey: API_KEY || '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',
76
- });
63
+ try {
77
64
 
78
- // Ensure filepath is properly resolved
79
- const resolvedFilePath = filepath.startsWith('http') || path.isAbsolute(filepath)
80
- ? filepath
81
- : path.join(process.cwd(), filepath);
82
65
 
83
- const fileStream = await createReadableStream(resolvedFilePath);
66
+ const groq = new Groq({
67
+ apiKey: API_KEY || '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',
68
+ });
69
+
70
+
71
+ // Ensure filepath is properly resolved
72
+ const resolvedFilePath = filepath.startsWith("http") || path.isAbsolute(filepath)
73
+ ? filepath
74
+ : path.join(process.cwd(), filepath);
84
75
 
85
- if (typeof fileStream === 'string') {
86
- return fileStream;
87
- }
76
+ const { stream: fileStream, tempFilePath, error } = await createReadableStream(resolvedFilePath);
77
+
78
+ if (error) return error;
79
+ if (!fileStream) return "Failed to create a readable file stream.";
88
80
 
89
- const transcription = await groq.audio.transcriptions.create({
90
- file: fileStream as Uploadable,
91
- model: "whisper-large-v3-turbo",
92
- prompt: prompt,
93
- temperature: 1,
94
- language: lang || 'eng',
95
- response_format: "verbose_json",
96
- });
81
+ // Ensure `fileStream` is a valid `fs.ReadStream`
82
+ const transcription = await groq.audio.transcriptions.create({
83
+ file: fileStream as fs.ReadStream, // ✅ Ensure correct type
84
+ model: "whisper-large-v3-turbo",
85
+ prompt: prompt,
86
+ temperature: 1,
87
+ language: lang || "eng",
88
+ response_format: "verbose_json",
89
+ });
90
+
91
+ // ✅ Delete the temp file if it was downloaded from a URL
92
+ if (tempFilePath && fs.existsSync(tempFilePath)) {
93
+ fs.unlinkSync(tempFilePath);
94
+ }
97
95
 
98
- return transcription.text;
99
- } catch (err: any) {
100
- if (err?.status) {
101
- switch (err.status) {
102
- case 400: return 'Bad request, try again after a minute please.';
103
- case 429: return 'Rate limit exceeded, try again later or provide your own API key.';
104
- case 401: return 'Invalid API key provided.';
105
- default:
106
- console.error(err);
107
- return "An unknown error occurred.";
108
- }
96
+ return transcription.text;
97
+ } catch (err: any) {
98
+ if (err?.status) {
99
+ switch (err.status) {
100
+ case 400:
101
+ return "Bad request, try again after a minute.";
102
+ case 429:
103
+ return "Rate limit exceeded, try again later or provide your own API key.";
104
+ case 401:
105
+ return "Invalid API key provided.";
106
+ default:
107
+ console.error(err);
108
+ return "An unknown error occurred.";
109
+ }
110
+ }
111
+ console.error(err);
112
+ return "An unknown error occurred.";
109
113
  }
110
- console.error(err);
111
- return 'An unknown error occurred.';
112
- }
113
114
  }