apexify.js 2.2.0 → 2.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +89 -73
- package/index.js +8 -1
- package/lib/ai/apexAI.js +169 -113
- package/lib/ai/functions/draw.js +287 -107
- package/lib/ai/functions/generateVoiceResponse.js +2 -2
- package/lib/ai/models.js +406 -11
- package/lib/recoded-ai/ApexAI.js +37 -0
- package/lib/recoded-ai/modals.js +0 -0
- package/lib/recoded-ai/utils.js +0 -0
- package/package.json +3 -4
- package/declare.d.ts +0 -137
package/README.md
CHANGED
|
@@ -92,30 +92,35 @@ const { apexAI } = require('apexify.js');
|
|
|
92
92
|
// For es module (.mjs/(.js if "type": "module" in package.json) or ts
|
|
93
93
|
import { apexAI } from 'apexify.js';
|
|
94
94
|
|
|
95
|
-
const allowedChannelId = 'Channel_ID';
|
|
95
|
+
const allowedChannelId = 'Channel_ID'; // Whether to set a channel id or no
|
|
96
96
|
client.on('messageCreate', async (message) => {
|
|
97
97
|
if (message.author.bot || message.channel.id !== allowedChannelId) return;
|
|
98
98
|
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
99
|
+
/////Note: voice message for premium is beta but way better than google in voice speechs
|
|
100
|
+
|
|
101
|
+
const options = {
|
|
102
|
+
// voice: false, // Whether to generate voice response (true/false)
|
|
103
|
+
// voiceModel: "google", // Voice model to use ("google", (premium:"apexAI", "zenithAi") )
|
|
104
|
+
// voice_code: "en-US-3", // Voice code (applicable only if voiceModel is "zenithAi")
|
|
105
|
+
// apiKey: "", // API key (applicable only if voiceModel is "zenithAi" or "apexAI")
|
|
106
|
+
// type: "b", // Type of voice model (applicable only if voiceModel is "apexAI")
|
|
107
|
+
// Available types: "a", "b", "c", "d", "e", "f", "g", "h"
|
|
108
|
+
draw: false, // Whether to draw image (true/false)
|
|
109
|
+
drawTrigger: ["create", "رسم"], // Triggers to initiate image drawing
|
|
110
|
+
imageModel: "prodia", // Image model to use ("prodia" or any other supported model)
|
|
111
|
+
numOfImages: 2, // Number of images to draw (applicable only if draw is true)
|
|
112
|
+
chatModel: "v3", // Chat model to use ("v3" or "apexChat")
|
|
113
|
+
keywords: [], // Keywords to trigger specific responses
|
|
114
|
+
keywordResponses: {}, // Responses corresponding to keywords
|
|
115
|
+
loader: { // Loader settings (null to disable)
|
|
116
|
+
loadingMessage: 'loading...', // Message to show while loading
|
|
117
|
+
loadingTimer: 3000, // Loading timer (in milliseconds)
|
|
114
118
|
},
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
+
readFiles: false, // Whether to read attached files (true/false)
|
|
120
|
+
enhancer: false, // Whether to enhance text before processing (true/false)
|
|
121
|
+
};
|
|
122
|
+
|
|
123
|
+
await apexAI(message, options)
|
|
119
124
|
});
|
|
120
125
|
```
|
|
121
126
|
</details>
|
|
@@ -257,90 +262,101 @@ const mongoDb = await getDb();
|
|
|
257
262
|
|
|
258
263
|
const { save, updateData, find, remove, removeSpecific } = require('apexify.js');
|
|
259
264
|
|
|
265
|
+
|
|
260
266
|
///# Save (insertOne)
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
267
|
+
const collectionName = 'exampleCollection';
|
|
268
|
+
const document = { key: 'value' };
|
|
269
|
+
const options = {
|
|
270
|
+
uniqueFields: ['key'], // Example unique field
|
|
271
|
+
};
|
|
272
|
+
// Saving the document to MongoDB with options
|
|
273
|
+
const savedDocument = await save(collectionName, document, options);
|
|
274
|
+
|
|
268
275
|
|
|
269
276
|
///# Find (findOne)
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
277
|
+
const collectionName = 'exampleCollection';
|
|
278
|
+
const filter = { _id: '6063b0f4e8e8b652780e97e0' }; // Example filter, you can customize it
|
|
279
|
+
const projection = { name: 1, age: 1 }; // Example projection, you can customize it
|
|
280
|
+
const options = {
|
|
281
|
+
sort: { age: -1 }, // Example sort option, you can customize it
|
|
282
|
+
limit: 5, // Example limit option, you can customize it
|
|
283
|
+
};
|
|
284
|
+
// Finding a document in MongoDB with options
|
|
285
|
+
const foundDocument = await find(collectionName, filter, projection, options);
|
|
286
|
+
|
|
279
287
|
|
|
280
288
|
///# SearchMany (find().toArray)
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
289
|
+
const collectionFilters = [
|
|
290
|
+
{ collectionName: 'collection1', displayment: 5, filter: { status: 'active' } },
|
|
291
|
+
{ collectionName: 'collection2', displayment: null, filter: { type: 'public' } },
|
|
292
|
+
// Add more collection filters as needed to search for
|
|
293
|
+
];
|
|
294
|
+
// Searching for documents in multiple collections with specific filters
|
|
295
|
+
const searchResults = await searchMany(collectionFilters);
|
|
288
296
|
|
|
289
297
|
|
|
290
298
|
///# Remove (deleteOne)
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
299
|
+
const collectionName = 'exampleCollection';
|
|
300
|
+
const filter = { _id: '6063b0f4e8e8b652780e97e0' }; // Example filter, you can customize it
|
|
301
|
+
// Removing a document from MongoDB
|
|
302
|
+
const removalResult = await remove(collectionName, filter);//remove the document which mathces the filter
|
|
303
|
+
|
|
295
304
|
|
|
296
305
|
///# RemoveSpecific (deleteOne(filter))
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
306
|
+
const collectionName = 'exampleCollection';
|
|
307
|
+
const filter = { _id: '6063b0f4e8e8b652780e97e0', name: 'John Doe' }; // Example filter, you can customize it
|
|
308
|
+
const keyToRemove = { name: 1 }; // Example key to remove, you can customize it
|
|
309
|
+
// Removing a document from MongoDB with specific keys removed
|
|
310
|
+
const removalResult = await removeSpecific(collectionName, filter, keyToRemove);//remove the key without remvoe the full document
|
|
311
|
+
|
|
302
312
|
|
|
303
313
|
///# RemoveMany (deleteMany)
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
314
|
+
const collectionName = 'exampleCollection';
|
|
315
|
+
const filter = { status: 'inactive' }; // Example filter, you can customize it
|
|
316
|
+
// Removing multiple documents from MongoDB based on a filter
|
|
317
|
+
const removalResult = await removeMany(collectionName, filter);
|
|
318
|
+
|
|
308
319
|
|
|
309
320
|
///# UpdateData (updateOne)
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
321
|
+
const collectionName = 'yourCollectionName'; // Replace with your actual collection name
|
|
322
|
+
const filter = { _id: 'yourDocumentId', age: 23, name: 'Elias' }; // Replace with your actual filter criteria
|
|
323
|
+
const update = { age: 19, gender: 'male' }; // Replace with your actual update data
|
|
324
|
+
const uniqueFields = ['field1', 'field2']; // Replace with your unique fields, if any!
|
|
325
|
+
// Updating document data in the specified collection
|
|
326
|
+
const updateResult = await updateData(collectionName, filter, update, uniqueFields);
|
|
327
|
+
|
|
316
328
|
|
|
317
329
|
///# UpdateAll (updateMany)
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
330
|
+
const collectionName = 'yourCollectionName'; // Replace with your actual collection name
|
|
331
|
+
const filter = { level: 20, serverId: guildId, userId: userId }; // Replace with your actual filter criteria
|
|
332
|
+
const update = { level: 31 }; // Replace with your actual update data
|
|
333
|
+
const uniqueFields = ['userId', 'serverId']; // Replace with your unique fields, if any!
|
|
334
|
+
// Updating all documents matching the filter in the specified collection
|
|
335
|
+
const updateResult = await updateAll(collectionName, filter, update, options);
|
|
336
|
+
|
|
324
337
|
|
|
325
338
|
///# MigrateData
|
|
326
|
-
|
|
339
|
+
const sourceCollection = 'yourSourceCollection'; // Replace with your actual source collection name
|
|
327
340
|
const destinationCollection = 'yourDestinationCollection'; // Replace with your actual destination collection name
|
|
328
341
|
// Transferring data from the source collection to the destination collection
|
|
329
342
|
const transferResult = await migrateData(sourceCollection, destinationCollection);
|
|
330
343
|
|
|
344
|
+
|
|
331
345
|
///# MigrateAndPrune
|
|
332
|
-
|
|
346
|
+
const sourceCollection = 'yourSourceCollection'; // Replace with your actual source collection name
|
|
333
347
|
const destinationCollection = 'yourDestinationCollection'; // Replace with your actual destination collection name
|
|
334
348
|
// Transferring data from the source collection to the destination collection
|
|
335
349
|
const transferResult = await migrateAndPrune(sourceCollection, destinationCollection);
|
|
336
350
|
|
|
351
|
+
|
|
337
352
|
///# Drop (drop)
|
|
338
|
-
|
|
353
|
+
const dropCollectiom = 'collectionToRemove'; // Replace with your actual collection name
|
|
339
354
|
// Removing collection from mongoDb
|
|
340
355
|
const dropResults = await drop(dropCollectiom);
|
|
341
356
|
|
|
357
|
+
|
|
342
358
|
///# DataSize
|
|
343
|
-
|
|
359
|
+
const dataToCheck = 'collectioNameToCheck'; // Replace with your actual collection name
|
|
344
360
|
// Check collection size in db
|
|
345
361
|
const size = await dataSize(dataToCheck);
|
|
346
362
|
```
|
|
@@ -358,7 +374,7 @@ const { NanoDb } = require('apexify.js');
|
|
|
358
374
|
import { NanoDb } from 'apexify.js';
|
|
359
375
|
|
|
360
376
|
/// Setting Which data json to configure
|
|
361
|
-
const db =
|
|
377
|
+
const db = await NanoDb('path/file/to/json'); // Example: ../data.json
|
|
362
378
|
|
|
363
379
|
|
|
364
380
|
///# AddOne (inserting new data)
|
package/index.js
CHANGED
|
@@ -106,4 +106,11 @@ axios_1.default
|
|
|
106
106
|
Object.defineProperty(exports, "resumeCommand", { enumerable: true, get: function () { return utils_1.resumeCommand; } });
|
|
107
107
|
Object.defineProperty(exports, "previousCommand", { enumerable: true, get: function () { return utils_1.previousCommand; } });
|
|
108
108
|
Object.defineProperty(exports, "starter", { enumerable: true, get: function () { return utils_1.starter; } });
|
|
109
|
-
Object.defineProperty(exports, "typeWriter", { enumerable: true, get: function () { return utils_1.typeWriter; } });
|
|
109
|
+
Object.defineProperty(exports, "typeWriter", { enumerable: true, get: function () { return utils_1.typeWriter; } });
|
|
110
|
+
|
|
111
|
+
const { apexAI } = require('apexify.js');
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
(async () => {
|
|
115
|
+
|
|
116
|
+
})();
|
package/lib/ai/apexAI.js
CHANGED
|
@@ -36,14 +36,13 @@ var __generator = (this && this.__generator) || function (thisArg, body) {
|
|
|
36
36
|
}
|
|
37
37
|
};
|
|
38
38
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
-
exports.apexAI = void 0;
|
|
39
|
+
exports.apexai = exports.apexAI = void 0;
|
|
40
40
|
var hercai_1 = require("hercai");
|
|
41
|
-
var axios_1 = require("axios");
|
|
42
41
|
var utils_1 = require("./utils");
|
|
42
|
+
var axios_1 = require("axios");
|
|
43
43
|
var discord_js_1 = require("discord.js");
|
|
44
|
-
var tools_1 = require("../discord/events/
|
|
45
|
-
var drawMenu_1 = require("../discord/events/
|
|
46
|
-
//var btn_1 = require("../discord/events/handler/btn");
|
|
44
|
+
var tools_1 = require("../discord/events/tools");
|
|
45
|
+
var drawMenu_1 = require("../discord/events/drawMenu");
|
|
47
46
|
var hercai = new hercai_1.Hercai();
|
|
48
47
|
var unclearPhrases = [
|
|
49
48
|
"Can you please provide more information or clarify your question?",
|
|
@@ -64,92 +63,103 @@ function apexAI(message, options) {
|
|
|
64
63
|
var _a, _b, _c;
|
|
65
64
|
if (options === void 0) { options = {}; }
|
|
66
65
|
return __awaiter(this, void 0, void 0, function () {
|
|
67
|
-
var _d, voice, _e, voiceModel, _f, voice_code, _g, apiKey, _h, type, _j, draw, _k, drawTrigger, _l, imageModel, _m, numOfImages, _o, chatModel, _p, keywords, _q, keywordResponses, _r,
|
|
68
|
-
return __generator(this, function (
|
|
69
|
-
switch (
|
|
66
|
+
var _d, voice, _e, voiceModel, _f, voice_code, _g, apiKey, _h, type, _j, draw, _k, drawTrigger, _l, imageModel, _m, numOfImages, _o, chatModel, _p, keywords, _q, keywordResponses, _r, loader, _s, readFiles, _t, nsfw, _u, enhancer, loadingMessage, loadingTimer_1, combinedText, _v, attachment, pdfContent, txtContent, replied, fetchedMessage, _w, finalText, response, textEnhancer, _i, keywords_1, keyword, textToDraw, drawValid, error_1, imageUrl, imageCaption, parts, _x, parts_1, part, error_2;
|
|
67
|
+
return __generator(this, function (_y) {
|
|
68
|
+
switch (_y.label) {
|
|
70
69
|
case 0:
|
|
71
70
|
if (!message) {
|
|
72
71
|
throw new Error("Invalid Usage, message is not defined. Please define the message parameter correctly");
|
|
73
72
|
}
|
|
74
73
|
return [4 /*yield*/, (0, drawMenu_1.imageTools)(message.client, discord_js_1.ModalBuilder, discord_js_1.TextInputBuilder, discord_js_1.TextInputStyle, discord_js_1.ActionRowBuilder)];
|
|
75
74
|
case 1:
|
|
76
|
-
|
|
77
|
-
|
|
75
|
+
_y.sent();
|
|
76
|
+
return [4 /*yield*/, (0, tools_1.filters)(message.client)];
|
|
78
77
|
case 2:
|
|
79
|
-
|
|
80
|
-
|
|
78
|
+
_y.sent();
|
|
79
|
+
_y.label = 3;
|
|
81
80
|
case 3:
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
if (message.attachments.some(function (attachment) {
|
|
81
|
+
_y.trys.push([3, 45, , 46]);
|
|
82
|
+
_d = options.voice, voice = _d === void 0 ? false : _d, _e = options.voiceModel, voiceModel = _e === void 0 ? "google" : _e, _f = options.voice_code, voice_code = _f === void 0 ? "en-US-3" : _f, _g = options.apiKey, apiKey = _g === void 0 ? "" : _g, _h = options.type, type = _h === void 0 ? "b" : _h, _j = options.draw, draw = _j === void 0 ? false : _j, _k = options.drawTrigger, drawTrigger = _k === void 0 ? ["create", "رسم"] : _k, _l = options.imageModel, imageModel = _l === void 0 ? "prodia" : _l, _m = options.numOfImages, numOfImages = _m === void 0 ? 2 : _m, _o = options.chatModel, chatModel = _o === void 0 ? "v3" : _o, _p = options.keywords, keywords = _p === void 0 ? [] : _p, _q = options.keywordResponses, keywordResponses = _q === void 0 ? {} : _q, _r = options.loader, loader = _r === void 0 ? {
|
|
83
|
+
loadingMessage: 'loading...',
|
|
84
|
+
loadingTimer: 3000
|
|
85
|
+
} : _r, _s = options.readFiles, readFiles = _s === void 0 ? false : _s, _t = options.nsfw, nsfw = _t === void 0 ? false : _t, _u = options.enhancer, enhancer = _u === void 0 ? false : _u;
|
|
86
|
+
if (!message.attachments.some(function (attachment) {
|
|
88
87
|
return attachment.contentType.startsWith("audio/");
|
|
89
|
-
}))
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
88
|
+
})) return [3 /*break*/, 5];
|
|
89
|
+
return [4 /*yield*/, message.reply({
|
|
90
|
+
content: "Voice messages are not supported at the moment. Stay tuned for future updates!"
|
|
91
|
+
})];
|
|
92
|
+
case 4: return [2 /*return*/, _y.sent()];
|
|
93
|
+
case 5:
|
|
94
|
+
if (!(loader !== null)) return [3 /*break*/, 7];
|
|
95
|
+
loadingMessage = loader.loadingMessage, loadingTimer_1 = loader.loadingTimer;
|
|
93
96
|
return [4 /*yield*/, message.reply({
|
|
94
97
|
content: loadingMessage,
|
|
95
98
|
allowedMentions: { repliedUser: false },
|
|
96
|
-
}).then((
|
|
97
|
-
setTimeout(()
|
|
98
|
-
|
|
99
|
-
},
|
|
100
|
-
})
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
case 6: return [4 /*yield*/, (0, utils_1.processMessageContent)(message.content)];
|
|
106
|
-
case 7:
|
|
107
|
-
combinedText = _w.sent();
|
|
108
|
-
_t = combinedText;
|
|
109
|
-
return [4 /*yield*/, (0, utils_1.processImageAttachments)(message.attachments)];
|
|
99
|
+
}).then(function (replyMessage) {
|
|
100
|
+
setTimeout(function () {
|
|
101
|
+
replyMessage.delete().catch(console.error);
|
|
102
|
+
}, loadingTimer_1);
|
|
103
|
+
})];
|
|
104
|
+
case 6:
|
|
105
|
+
_y.sent();
|
|
106
|
+
_y.label = 7;
|
|
107
|
+
case 7: return [4 /*yield*/, (0, utils_1.processMessageContent)(message.content)];
|
|
110
108
|
case 8:
|
|
111
|
-
combinedText =
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
if (
|
|
109
|
+
combinedText = _y.sent();
|
|
110
|
+
_v = combinedText;
|
|
111
|
+
return [4 /*yield*/, (0, utils_1.processImageAttachments)(message.attachments)];
|
|
112
|
+
case 9:
|
|
113
|
+
combinedText = _v + _y.sent();
|
|
114
|
+
if (!!combinedText) return [3 /*break*/, 11];
|
|
115
|
+
return [4 /*yield*/, message.reply({ content: "Please provide a message or an attachment." })];
|
|
116
|
+
case 10: return [2 /*return*/, _y.sent()];
|
|
117
|
+
case 11: return [4 /*yield*/, message.channel.sendTyping()];
|
|
118
|
+
case 12:
|
|
119
|
+
_y.sent();
|
|
120
|
+
if (!readFiles) return [3 /*break*/, 16];
|
|
121
|
+
if (!(message.attachments.size > 0)) return [3 /*break*/, 16];
|
|
117
122
|
attachment = message.attachments.first();
|
|
118
|
-
if (!attachment.name.endsWith('.pdf')) return [3 /*break*/,
|
|
123
|
+
if (!attachment.name.endsWith('.pdf')) return [3 /*break*/, 14];
|
|
119
124
|
return [4 /*yield*/, (0, utils_1.readPdf)(attachment.url)];
|
|
120
|
-
case
|
|
121
|
-
pdfContent =
|
|
125
|
+
case 13:
|
|
126
|
+
pdfContent = _y.sent();
|
|
122
127
|
combinedText += pdfContent;
|
|
123
|
-
return [3 /*break*/,
|
|
124
|
-
case
|
|
125
|
-
if (!attachment.name.endsWith('.txt')) return [3 /*break*/,
|
|
128
|
+
return [3 /*break*/, 16];
|
|
129
|
+
case 14:
|
|
130
|
+
if (!attachment.name.endsWith('.txt')) return [3 /*break*/, 16];
|
|
126
131
|
return [4 /*yield*/, (0, utils_1.readTextFile)(attachment.url)];
|
|
127
|
-
case
|
|
128
|
-
txtContent =
|
|
132
|
+
case 15:
|
|
133
|
+
txtContent = _y.sent();
|
|
129
134
|
combinedText += txtContent;
|
|
130
|
-
|
|
131
|
-
case
|
|
132
|
-
case 13:
|
|
133
|
-
_w.sent();
|
|
135
|
+
_y.label = 16;
|
|
136
|
+
case 16:
|
|
134
137
|
replied = "";
|
|
135
|
-
if (!((_a = message.reference) === null || _a === void 0 ? void 0 : _a.messageId)) return [3 /*break*/,
|
|
138
|
+
if (!((_a = message.reference) === null || _a === void 0 ? void 0 : _a.messageId)) return [3 /*break*/, 19];
|
|
136
139
|
return [4 /*yield*/, message.guild.channels.cache
|
|
137
140
|
.get(message.channel.id)
|
|
138
141
|
.messages.fetch((_b = message.reference) === null || _b === void 0 ? void 0 : _b.messageId)];
|
|
139
|
-
case
|
|
140
|
-
fetchedMessage =
|
|
142
|
+
case 17:
|
|
143
|
+
fetchedMessage = _y.sent();
|
|
141
144
|
if (fetchedMessage.content) {
|
|
142
145
|
replied += fetchedMessage.content;
|
|
143
146
|
}
|
|
144
|
-
if (!fetchedMessage.attachments) return [3 /*break*/,
|
|
145
|
-
|
|
147
|
+
if (!fetchedMessage.attachments) return [3 /*break*/, 19];
|
|
148
|
+
_w = replied;
|
|
146
149
|
return [4 /*yield*/, (0, utils_1.processImageAttachments)(fetchedMessage.attachments)];
|
|
147
|
-
case
|
|
148
|
-
replied =
|
|
149
|
-
|
|
150
|
-
case
|
|
150
|
+
case 18:
|
|
151
|
+
replied = _w + _y.sent();
|
|
152
|
+
_y.label = 19;
|
|
153
|
+
case 19:
|
|
151
154
|
finalText = "".concat(combinedText, "\n\n").concat(replied);
|
|
152
|
-
response =
|
|
155
|
+
response = '';
|
|
156
|
+
textEnhancer = void 0;
|
|
157
|
+
if (!enhancer) return [3 /*break*/, 21];
|
|
158
|
+
return [4 /*yield*/, apexai(finalText)];
|
|
159
|
+
case 20:
|
|
160
|
+
textEnhancer = _y.sent();
|
|
161
|
+
_y.label = 21;
|
|
162
|
+
case 21:
|
|
153
163
|
for (_i = 0, keywords_1 = keywords; _i < keywords_1.length; _i++) {
|
|
154
164
|
keyword = keywords_1[_i];
|
|
155
165
|
if (finalText.toLowerCase().includes(keyword.toLowerCase())) {
|
|
@@ -161,39 +171,50 @@ function apexAI(message, options) {
|
|
|
161
171
|
return [2 /*return*/];
|
|
162
172
|
}
|
|
163
173
|
}
|
|
174
|
+
textToDraw = '';
|
|
164
175
|
drawValid = draw && (0, utils_1.shouldDrawImage)(finalText, drawTrigger);
|
|
165
|
-
if (!drawValid) return [3 /*break*/,
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
case
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
case
|
|
173
|
-
|
|
176
|
+
if (!drawValid) return [3 /*break*/, 23];
|
|
177
|
+
textToDraw = textEnhancer || finalText;
|
|
178
|
+
return [4 /*yield*/, (0, utils_1.drawImage)(message, numOfImages, textToDraw, hercai, response, imageModel, nsfw)];
|
|
179
|
+
case 22: return [2 /*return*/, _y.sent()];
|
|
180
|
+
case 23:
|
|
181
|
+
if (!(voice === true)) return [3 /*break*/, 25];
|
|
182
|
+
return [4 /*yield*/, (0, utils_1.generateVoiceResponse)(message, numOfImages, textToDraw, hercai, response, imageModel, draw, drawValid, chatModel, voiceModel, voice_code, type, apiKey, nsfw)];
|
|
183
|
+
case 24: return [2 /*return*/, _y.sent()];
|
|
184
|
+
case 25:
|
|
185
|
+
_y.trys.push([25, 31, , 36]);
|
|
186
|
+
if (!(chatModel === 'apexChat')) return [3 /*break*/, 27];
|
|
187
|
+
return [4 /*yield*/, apexai(finalText)];
|
|
188
|
+
case 26:
|
|
189
|
+
response = _y.sent();
|
|
190
|
+
return [3 /*break*/, 30];
|
|
191
|
+
case 27:
|
|
192
|
+
if (!(chatModel === 'v3' || chatModel === 'v3-32k' || chatModel === 'turbo' || chatModel === 'turbo-16k' || chatModel === 'gemini')) return [3 /*break*/, 29];
|
|
174
193
|
return [4 /*yield*/, hercai.question({
|
|
175
194
|
model: chatModel,
|
|
176
195
|
content: finalText,
|
|
177
196
|
})];
|
|
178
|
-
case
|
|
179
|
-
response =
|
|
180
|
-
return [3 /*break*/,
|
|
181
|
-
case
|
|
182
|
-
|
|
183
|
-
|
|
197
|
+
case 28:
|
|
198
|
+
response = _y.sent();
|
|
199
|
+
return [3 /*break*/, 30];
|
|
200
|
+
case 29: throw new Error('Invalid chat model');
|
|
201
|
+
case 30: return [3 /*break*/, 36];
|
|
202
|
+
case 31:
|
|
203
|
+
error_1 = _y.sent();
|
|
204
|
+
if (!(error_1.response && error_1.response.status === 429)) return [3 /*break*/, 32];
|
|
184
205
|
console.error("Too many requests. Please try again later.");
|
|
185
206
|
return [2 /*return*/, message.reply("Please wait i am in a cool down for a minute")];
|
|
186
|
-
case
|
|
187
|
-
if (!(error_1.response && error_1.response.status === 500)) return [3 /*break*/,
|
|
207
|
+
case 32:
|
|
208
|
+
if (!(error_1.response && error_1.response.status === 500)) return [3 /*break*/, 33];
|
|
188
209
|
console.error("Internal server error. Please try again later.");
|
|
189
210
|
return [2 /*return*/, message.reply("Please wait i am in a cool down for a minute")];
|
|
190
|
-
case
|
|
191
|
-
case
|
|
192
|
-
|
|
211
|
+
case 33: return [4 /*yield*/, message.reply("Please wait i am in a cool down for a minute")];
|
|
212
|
+
case 34:
|
|
213
|
+
_y.sent();
|
|
193
214
|
throw new Error("The Api is on a cool down for 10 seconds");
|
|
194
|
-
case
|
|
195
|
-
case
|
|
196
|
-
if (!containsUnclearPhrases(response.reply)) return [3 /*break*/,
|
|
215
|
+
case 35: return [3 /*break*/, 36];
|
|
216
|
+
case 36:
|
|
217
|
+
if (!containsUnclearPhrases(response.reply)) return [3 /*break*/, 38];
|
|
197
218
|
console.log("The response appears to be random or unclear!");
|
|
198
219
|
imageUrl = (_c = message.attachments.first()) === null || _c === void 0 ? void 0 : _c.url;
|
|
199
220
|
if (!imageUrl) {
|
|
@@ -201,48 +222,48 @@ function apexAI(message, options) {
|
|
|
201
222
|
return [2 /*return*/];
|
|
202
223
|
}
|
|
203
224
|
return [4 /*yield*/, attemptImageCaptioning(imageUrl)];
|
|
204
|
-
case
|
|
205
|
-
imageCaption =
|
|
225
|
+
case 37:
|
|
226
|
+
imageCaption = _y.sent();
|
|
206
227
|
if (imageCaption) {
|
|
207
228
|
response.reply = imageCaption + response.reply;
|
|
208
229
|
}
|
|
209
|
-
|
|
210
|
-
case
|
|
211
|
-
if (!(response.reply.length <= 2000)) return [3 /*break*/,
|
|
230
|
+
_y.label = 38;
|
|
231
|
+
case 38:
|
|
232
|
+
if (!(response.reply.length <= 2000)) return [3 /*break*/, 40];
|
|
212
233
|
return [4 /*yield*/, message.reply({
|
|
213
234
|
content: response.reply,
|
|
214
235
|
allowedMentions: { repliedUser: false },
|
|
215
236
|
})];
|
|
216
|
-
case
|
|
217
|
-
|
|
218
|
-
return [3 /*break*/,
|
|
219
|
-
case
|
|
237
|
+
case 39:
|
|
238
|
+
_y.sent();
|
|
239
|
+
return [3 /*break*/, 44];
|
|
240
|
+
case 40:
|
|
220
241
|
parts = [];
|
|
221
242
|
while (response.reply.length > 0) {
|
|
222
243
|
parts.push(response.reply.substring(0, 1999));
|
|
223
244
|
response.reply = response.reply.substring(1999);
|
|
224
245
|
}
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
case
|
|
228
|
-
if (!(
|
|
229
|
-
part = parts_1[
|
|
246
|
+
_x = 0, parts_1 = parts;
|
|
247
|
+
_y.label = 41;
|
|
248
|
+
case 41:
|
|
249
|
+
if (!(_x < parts_1.length)) return [3 /*break*/, 44];
|
|
250
|
+
part = parts_1[_x];
|
|
230
251
|
return [4 /*yield*/, message.reply({
|
|
231
252
|
content: part,
|
|
232
253
|
allowedMentions: { repliedUser: false },
|
|
233
254
|
})];
|
|
234
|
-
case
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
case
|
|
238
|
-
|
|
239
|
-
return [3 /*break*/,
|
|
240
|
-
case
|
|
241
|
-
case
|
|
242
|
-
error_2 =
|
|
255
|
+
case 42:
|
|
256
|
+
_y.sent();
|
|
257
|
+
_y.label = 43;
|
|
258
|
+
case 43:
|
|
259
|
+
_x++;
|
|
260
|
+
return [3 /*break*/, 41];
|
|
261
|
+
case 44: return [3 /*break*/, 46];
|
|
262
|
+
case 45:
|
|
263
|
+
error_2 = _y.sent();
|
|
243
264
|
console.error("Error processing message in file");
|
|
244
265
|
throw error_2;
|
|
245
|
-
case
|
|
266
|
+
case 46: return [2 /*return*/];
|
|
246
267
|
}
|
|
247
268
|
});
|
|
248
269
|
});
|
|
@@ -267,7 +288,7 @@ function attemptImageCaptioning(imageUrl) {
|
|
|
267
288
|
switch (_a.label) {
|
|
268
289
|
case 0:
|
|
269
290
|
_a.trys.push([0, 2, , 3]);
|
|
270
|
-
return [4 /*yield*/, axios_1.post("https://api-inference.huggingface.co/models/Salesforce/blip-image-captioning-base", { image: imageUrl }, {
|
|
291
|
+
return [4 /*yield*/, axios_1.default.post("https://api-inference.huggingface.co/models/Salesforce/blip-image-captioning-base", { image: imageUrl }, {
|
|
271
292
|
headers: {
|
|
272
293
|
"Content-Type": "application/json",
|
|
273
294
|
Authorization: "Bearer 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",
|
|
@@ -314,4 +335,39 @@ function attemptImageCaptioning(imageUrl) {
|
|
|
314
335
|
}
|
|
315
336
|
});
|
|
316
337
|
});
|
|
317
|
-
}
|
|
338
|
+
}
|
|
339
|
+
function apexai(prompt) {
|
|
340
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
341
|
+
var messages, formattedMessages, response, generatedText, lines, output, error_6;
|
|
342
|
+
return __generator(this, function (_a) {
|
|
343
|
+
switch (_a.label) {
|
|
344
|
+
case 0:
|
|
345
|
+
_a.trys.push([0, 2, , 3]);
|
|
346
|
+
messages = [
|
|
347
|
+
{ "role": "user", "content": "".concat(prompt) }
|
|
348
|
+
];
|
|
349
|
+
formattedMessages = messages.map(function (message) { return "[".concat(message.role, "] ").concat(message.content); }).join('\n');
|
|
350
|
+
return [4 /*yield*/, axios_1.default.post("https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1", {
|
|
351
|
+
inputs: formattedMessages
|
|
352
|
+
}, {
|
|
353
|
+
headers: {
|
|
354
|
+
'Authorization': "Bearer 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",
|
|
355
|
+
'Content-Type': 'application/json'
|
|
356
|
+
}
|
|
357
|
+
})];
|
|
358
|
+
case 1:
|
|
359
|
+
response = _a.sent();
|
|
360
|
+
generatedText = response.data[0].generated_text;
|
|
361
|
+
lines = generatedText.split('\n').slice(1);
|
|
362
|
+
output = lines.join('\n');
|
|
363
|
+
return [2 /*return*/, output];
|
|
364
|
+
case 2:
|
|
365
|
+
error_6 = _a.sent();
|
|
366
|
+
console.error('Error:', error_6.response.data);
|
|
367
|
+
return [2 /*return*/, 'Please wait i am on cooldown.'];
|
|
368
|
+
case 3: return [2 /*return*/];
|
|
369
|
+
}
|
|
370
|
+
});
|
|
371
|
+
});
|
|
372
|
+
}
|
|
373
|
+
exports.apexai = apexai;
|