apexify.js 2.2.3 → 2.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/lib/ai/apexAI.js +61 -125
- package/lib/ai/functions/draw.js +2 -2
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -118,6 +118,7 @@ const options = {
|
|
|
118
118
|
},
|
|
119
119
|
readFiles: false, // Whether to read attached files (true/false)
|
|
120
120
|
enhancer: false, // Whether to enhance text before processing (true/false)
|
|
121
|
+
nsfw: false, // Whether to prevent nsfw generated images (true/false)
|
|
121
122
|
};
|
|
122
123
|
|
|
123
124
|
await apexAI(message, options)
|
|
@@ -991,6 +992,7 @@ Keep experimenting, and feel free to contact me for assistance! Suggestions and
|
|
|
991
992
|
- turbo
|
|
992
993
|
- turbo-16k
|
|
993
994
|
- gemini
|
|
995
|
+
- apexChat
|
|
994
996
|
</details>
|
|
995
997
|
|
|
996
998
|
|
package/lib/ai/apexAI.js
CHANGED
|
@@ -44,26 +44,11 @@ var discord_js_1 = require("discord.js");
|
|
|
44
44
|
var tools_1 = require("../discord/events/handler/tools");
|
|
45
45
|
var drawMenu_1 = require("../discord/events/handler/drawMenu");
|
|
46
46
|
var hercai = new hercai_1.Hercai();
|
|
47
|
-
var unclearPhrases = [
|
|
48
|
-
"Can you please provide more information or clarify your question?",
|
|
49
|
-
"Could you please provide more context or clarify your question?",
|
|
50
|
-
"Could you please provide more context or clarify what you mean?",
|
|
51
|
-
"I'm not sure I understand the question.",
|
|
52
|
-
"Could you please provide more context or clarify your question",
|
|
53
|
-
"I'm not sure what you're asking for.",
|
|
54
|
-
"Could you please provide more context or rephrase your question?",
|
|
55
|
-
"I'm not sure what you're asking",
|
|
56
|
-
"Can you please rephrase or provide more context?",
|
|
57
|
-
"Could you please provide more context or clarify your request?",
|
|
58
|
-
"I couldn't understand your input",
|
|
59
|
-
"Could you please provide some more information or clarify what you mean?",
|
|
60
|
-
"Can you please provide more information or clarify your request?",
|
|
61
|
-
];
|
|
62
47
|
function apexAI(message, options) {
|
|
63
|
-
var _a, _b
|
|
48
|
+
var _a, _b;
|
|
64
49
|
if (options === void 0) { options = {}; }
|
|
65
50
|
return __awaiter(this, void 0, void 0, function () {
|
|
66
|
-
var
|
|
51
|
+
var _c, voice, _d, voiceModel, _e, voice_code, _f, apiKey, _g, type, _h, draw, _j, drawTrigger, _k, imageModel, _l, numOfImages, _m, chatModel, _o, keywords, _p, keywordResponses, _q, loader, _r, readFiles, _s, nsfw, _t, enhancer, loadingMessage, loadingTimer_1, combinedText, _u, attachment, pdfContent, txtContent, replied, fetchedMessage, _v, finalText, response, textEnhancer, _i, keywords_1, keyword, textToDraw, drawValid, error_1, parts, _w, parts_1, part, parts, _x, parts_2, part, error_2;
|
|
67
52
|
return __generator(this, function (_y) {
|
|
68
53
|
switch (_y.label) {
|
|
69
54
|
case 0:
|
|
@@ -78,11 +63,11 @@ function apexAI(message, options) {
|
|
|
78
63
|
_y.sent();
|
|
79
64
|
_y.label = 3;
|
|
80
65
|
case 3:
|
|
81
|
-
_y.trys.push([3,
|
|
82
|
-
|
|
66
|
+
_y.trys.push([3, 50, , 51]);
|
|
67
|
+
_c = options.voice, voice = _c === void 0 ? false : _c, _d = options.voiceModel, voiceModel = _d === void 0 ? "google" : _d, _e = options.voice_code, voice_code = _e === void 0 ? "en-US-3" : _e, _f = options.apiKey, apiKey = _f === void 0 ? "" : _f, _g = options.type, type = _g === void 0 ? "b" : _g, _h = options.draw, draw = _h === void 0 ? false : _h, _j = options.drawTrigger, drawTrigger = _j === void 0 ? ["create", "رسم"] : _j, _k = options.imageModel, imageModel = _k === void 0 ? "prodia" : _k, _l = options.numOfImages, numOfImages = _l === void 0 ? 2 : _l, _m = options.chatModel, chatModel = _m === void 0 ? "v3" : _m, _o = options.keywords, keywords = _o === void 0 ? [] : _o, _p = options.keywordResponses, keywordResponses = _p === void 0 ? {} : _p, _q = options.loader, loader = _q === void 0 ? {
|
|
83
68
|
loadingMessage: 'loading...',
|
|
84
69
|
loadingTimer: 3000
|
|
85
|
-
} :
|
|
70
|
+
} : _q, _r = options.readFiles, readFiles = _r === void 0 ? false : _r, _s = options.nsfw, nsfw = _s === void 0 ? false : _s, _t = options.enhancer, enhancer = _t === void 0 ? false : _t;
|
|
86
71
|
if (!message.attachments.some(function (attachment) {
|
|
87
72
|
return attachment.contentType.startsWith("audio/");
|
|
88
73
|
})) return [3 /*break*/, 5];
|
|
@@ -99,7 +84,7 @@ function apexAI(message, options) {
|
|
|
99
84
|
}).then(function (replyMessage) {
|
|
100
85
|
setTimeout(function () {
|
|
101
86
|
replyMessage.delete().catch(console.error);
|
|
102
|
-
}, loadingTimer_1);
|
|
87
|
+
}, loadingTimer_1 || 3000);
|
|
103
88
|
})];
|
|
104
89
|
case 6:
|
|
105
90
|
_y.sent();
|
|
@@ -107,10 +92,10 @@ function apexAI(message, options) {
|
|
|
107
92
|
case 7: return [4 /*yield*/, (0, utils_1.processMessageContent)(message.content)];
|
|
108
93
|
case 8:
|
|
109
94
|
combinedText = _y.sent();
|
|
110
|
-
|
|
95
|
+
_u = combinedText;
|
|
111
96
|
return [4 /*yield*/, (0, utils_1.processImageAttachments)(message.attachments)];
|
|
112
97
|
case 9:
|
|
113
|
-
combinedText =
|
|
98
|
+
combinedText = _u + _y.sent();
|
|
114
99
|
if (!!combinedText) return [3 /*break*/, 11];
|
|
115
100
|
return [4 /*yield*/, message.reply({ content: "Please provide a message or an attachment." })];
|
|
116
101
|
case 10: return [2 /*return*/, _y.sent()];
|
|
@@ -145,10 +130,10 @@ function apexAI(message, options) {
|
|
|
145
130
|
replied += fetchedMessage.content;
|
|
146
131
|
}
|
|
147
132
|
if (!fetchedMessage.attachments) return [3 /*break*/, 19];
|
|
148
|
-
|
|
133
|
+
_v = replied;
|
|
149
134
|
return [4 /*yield*/, (0, utils_1.processImageAttachments)(fetchedMessage.attachments)];
|
|
150
135
|
case 18:
|
|
151
|
-
replied =
|
|
136
|
+
replied = _v + _y.sent();
|
|
152
137
|
_y.label = 19;
|
|
153
138
|
case 19:
|
|
154
139
|
finalText = "".concat(combinedText, "\n\n").concat(replied);
|
|
@@ -214,131 +199,82 @@ function apexAI(message, options) {
|
|
|
214
199
|
throw new Error("The Api is on a cool down for 10 seconds");
|
|
215
200
|
case 35: return [3 /*break*/, 36];
|
|
216
201
|
case 36:
|
|
217
|
-
if (!
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
}
|
|
224
|
-
return [4 /*yield*/, attemptImageCaptioning(imageUrl)];
|
|
202
|
+
if (!(typeof response === 'string')) return [3 /*break*/, 43];
|
|
203
|
+
if (!(response.length <= 2000)) return [3 /*break*/, 38];
|
|
204
|
+
return [4 /*yield*/, message.reply({
|
|
205
|
+
content: response,
|
|
206
|
+
allowedMentions: { repliedUser: false },
|
|
207
|
+
})];
|
|
225
208
|
case 37:
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
response.reply = imageCaption + response.reply;
|
|
229
|
-
}
|
|
230
|
-
_y.label = 38;
|
|
209
|
+
_y.sent();
|
|
210
|
+
return [3 /*break*/, 42];
|
|
231
211
|
case 38:
|
|
232
|
-
|
|
212
|
+
parts = [];
|
|
213
|
+
while (response.length > 0) {
|
|
214
|
+
parts.push(response.substring(0, 1999));
|
|
215
|
+
response = response.substring(1999);
|
|
216
|
+
}
|
|
217
|
+
_w = 0, parts_1 = parts;
|
|
218
|
+
_y.label = 39;
|
|
219
|
+
case 39:
|
|
220
|
+
if (!(_w < parts_1.length)) return [3 /*break*/, 42];
|
|
221
|
+
part = parts_1[_w];
|
|
222
|
+
return [4 /*yield*/, message.reply({
|
|
223
|
+
content: part,
|
|
224
|
+
allowedMentions: { repliedUser: false },
|
|
225
|
+
})];
|
|
226
|
+
case 40:
|
|
227
|
+
_y.sent();
|
|
228
|
+
_y.label = 41;
|
|
229
|
+
case 41:
|
|
230
|
+
_w++;
|
|
231
|
+
return [3 /*break*/, 39];
|
|
232
|
+
case 42: return [3 /*break*/, 49];
|
|
233
|
+
case 43:
|
|
234
|
+
if (!(typeof response === 'object' && response.reply)) return [3 /*break*/, 49];
|
|
235
|
+
if (!(response.reply.length <= 2000)) return [3 /*break*/, 45];
|
|
233
236
|
return [4 /*yield*/, message.reply({
|
|
234
237
|
content: response.reply,
|
|
235
238
|
allowedMentions: { repliedUser: false },
|
|
236
239
|
})];
|
|
237
|
-
case
|
|
240
|
+
case 44:
|
|
238
241
|
_y.sent();
|
|
239
|
-
return [3 /*break*/,
|
|
240
|
-
case
|
|
242
|
+
return [3 /*break*/, 49];
|
|
243
|
+
case 45:
|
|
241
244
|
parts = [];
|
|
242
245
|
while (response.reply.length > 0) {
|
|
243
246
|
parts.push(response.reply.substring(0, 1999));
|
|
244
247
|
response.reply = response.reply.substring(1999);
|
|
245
248
|
}
|
|
246
|
-
_x = 0,
|
|
247
|
-
_y.label =
|
|
248
|
-
case
|
|
249
|
-
if (!(_x <
|
|
250
|
-
part =
|
|
249
|
+
_x = 0, parts_2 = parts;
|
|
250
|
+
_y.label = 46;
|
|
251
|
+
case 46:
|
|
252
|
+
if (!(_x < parts_2.length)) return [3 /*break*/, 49];
|
|
253
|
+
part = parts_2[_x];
|
|
251
254
|
return [4 /*yield*/, message.reply({
|
|
252
255
|
content: part,
|
|
253
256
|
allowedMentions: { repliedUser: false },
|
|
254
257
|
})];
|
|
255
|
-
case
|
|
258
|
+
case 47:
|
|
256
259
|
_y.sent();
|
|
257
|
-
_y.label =
|
|
258
|
-
case
|
|
260
|
+
_y.label = 48;
|
|
261
|
+
case 48:
|
|
259
262
|
_x++;
|
|
260
|
-
return [3 /*break*/,
|
|
261
|
-
case
|
|
262
|
-
case
|
|
263
|
+
return [3 /*break*/, 46];
|
|
264
|
+
case 49: return [3 /*break*/, 51];
|
|
265
|
+
case 50:
|
|
263
266
|
error_2 = _y.sent();
|
|
264
267
|
console.error("Error processing message in file");
|
|
265
268
|
throw error_2;
|
|
266
|
-
case
|
|
269
|
+
case 51: return [2 /*return*/];
|
|
267
270
|
}
|
|
268
271
|
});
|
|
269
272
|
});
|
|
270
273
|
}
|
|
271
274
|
exports.apexAI = apexAI;
|
|
272
|
-
function containsUnclearPhrases(text) {
|
|
273
|
-
return unclearPhrases.some(function (phrase) { return text.includes(phrase); });
|
|
274
|
-
}
|
|
275
|
-
function attemptImageCaptioning(imageUrl) {
|
|
276
|
-
return __awaiter(this, void 0, void 0, function () {
|
|
277
|
-
var retryCount, maxRetries, fetchData, error_3, error_4;
|
|
278
|
-
var _this = this;
|
|
279
|
-
return __generator(this, function (_a) {
|
|
280
|
-
switch (_a.label) {
|
|
281
|
-
case 0:
|
|
282
|
-
_a.trys.push([0, 7, , 8]);
|
|
283
|
-
retryCount = 0;
|
|
284
|
-
maxRetries = 3;
|
|
285
|
-
fetchData = function () { return __awaiter(_this, void 0, void 0, function () {
|
|
286
|
-
var response, error_5;
|
|
287
|
-
return __generator(this, function (_a) {
|
|
288
|
-
switch (_a.label) {
|
|
289
|
-
case 0:
|
|
290
|
-
_a.trys.push([0, 2, , 3]);
|
|
291
|
-
return [4 /*yield*/, axios_1.default.post("https://api-inference.huggingface.co/models/Salesforce/blip-image-captioning-base", { image: imageUrl }, {
|
|
292
|
-
headers: {
|
|
293
|
-
"Content-Type": "application/json",
|
|
294
|
-
Authorization: "Bearer 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",
|
|
295
|
-
},
|
|
296
|
-
})];
|
|
297
|
-
case 1:
|
|
298
|
-
response = _a.sent();
|
|
299
|
-
if (response.status === 200) {
|
|
300
|
-
return [2 /*return*/, response.data[0].generated_text];
|
|
301
|
-
}
|
|
302
|
-
else {
|
|
303
|
-
console.error("Failed to fetch image captioning API: ".concat(response.statusText));
|
|
304
|
-
return [2 /*return*/, null];
|
|
305
|
-
}
|
|
306
|
-
return [3 /*break*/, 3];
|
|
307
|
-
case 2:
|
|
308
|
-
error_5 = _a.sent();
|
|
309
|
-
console.error("Error fetching data: ".concat(error_5.message));
|
|
310
|
-
throw error_5;
|
|
311
|
-
case 3: return [2 /*return*/];
|
|
312
|
-
}
|
|
313
|
-
});
|
|
314
|
-
}); };
|
|
315
|
-
_a.label = 1;
|
|
316
|
-
case 1:
|
|
317
|
-
if (!(retryCount < maxRetries)) return [3 /*break*/, 6];
|
|
318
|
-
_a.label = 2;
|
|
319
|
-
case 2:
|
|
320
|
-
_a.trys.push([2, 4, , 5]);
|
|
321
|
-
return [4 /*yield*/, fetchData()];
|
|
322
|
-
case 3: return [2 /*return*/, _a.sent()];
|
|
323
|
-
case 4:
|
|
324
|
-
error_3 = _a.sent();
|
|
325
|
-
console.error("Error fetching data (Retry ".concat(retryCount + 1, "): ").concat(error_3.message));
|
|
326
|
-
retryCount++;
|
|
327
|
-
return [3 /*break*/, 5];
|
|
328
|
-
case 5: return [3 /*break*/, 1];
|
|
329
|
-
case 6: return [2 /*return*/, null];
|
|
330
|
-
case 7:
|
|
331
|
-
error_4 = _a.sent();
|
|
332
|
-
console.error("Error in attemptImageCaptioning: ".concat(error_4.message));
|
|
333
|
-
return [2 /*return*/, null];
|
|
334
|
-
case 8: return [2 /*return*/];
|
|
335
|
-
}
|
|
336
|
-
});
|
|
337
|
-
});
|
|
338
|
-
}
|
|
339
275
|
function apexai(prompt) {
|
|
340
276
|
return __awaiter(this, void 0, void 0, function () {
|
|
341
|
-
var messages, formattedMessages, response, generatedText, lines, output,
|
|
277
|
+
var messages, formattedMessages, response, generatedText, lines, output, error_3;
|
|
342
278
|
return __generator(this, function (_a) {
|
|
343
279
|
switch (_a.label) {
|
|
344
280
|
case 0:
|
|
@@ -347,7 +283,7 @@ function apexai(prompt) {
|
|
|
347
283
|
{ "role": "user", "content": "".concat(prompt) }
|
|
348
284
|
];
|
|
349
285
|
formattedMessages = messages.map(function (message) { return "[".concat(message.role, "] ").concat(message.content); }).join('\n');
|
|
350
|
-
return [4 /*yield*/, axios_1.
|
|
286
|
+
return [4 /*yield*/, axios_1.post("https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1", {
|
|
351
287
|
inputs: formattedMessages
|
|
352
288
|
}, {
|
|
353
289
|
headers: {
|
|
@@ -362,8 +298,8 @@ function apexai(prompt) {
|
|
|
362
298
|
output = lines.join('\n');
|
|
363
299
|
return [2 /*return*/, output];
|
|
364
300
|
case 2:
|
|
365
|
-
|
|
366
|
-
console.error('Error:',
|
|
301
|
+
error_3 = _a.sent();
|
|
302
|
+
console.error('Error:', error_3.response.data);
|
|
367
303
|
return [2 /*return*/, 'Please wait i am on cooldown.'];
|
|
368
304
|
case 3: return [2 /*return*/];
|
|
369
305
|
}
|
package/lib/ai/functions/draw.js
CHANGED
|
@@ -338,7 +338,7 @@ function drawImage(_0x2f79ae, numOfImages, _0x28baff, _0x5cea2c, _0x59b128, _0x1
|
|
|
338
338
|
"ass",
|
|
339
339
|
];
|
|
340
340
|
if (nsfwWords.some(function (word) { return textToCheck_1.includes(word); })) {
|
|
341
|
-
|
|
341
|
+
return _0x2f79ae.reply("Warning ⚠️. The generated image contatining nsfw content. Turn off nsfw to send nsfw images.");
|
|
342
342
|
}
|
|
343
343
|
_c.label = 17;
|
|
344
344
|
case 17:
|
|
@@ -398,7 +398,7 @@ function drawImage(_0x2f79ae, numOfImages, _0x28baff, _0x5cea2c, _0x59b128, _0x1
|
|
|
398
398
|
.setStyle(discord_js_1.ButtonStyle.Primary)
|
|
399
399
|
.setLabel("Redraw")
|
|
400
400
|
.setCustomId("redraw_button");
|
|
401
|
-
row1 = (_a = new discord_js_1.ActionRowBuilder()).addComponents.apply(_a, __spreadArray(__spreadArray([], buttonsRow1, false)
|
|
401
|
+
row1 = (_a = new discord_js_1.ActionRowBuilder()).addComponents.apply(_a, __spreadArray(__spreadArray([], buttonsRow1, false)));
|
|
402
402
|
row2 = new discord_js_1.ActionRowBuilder().addComponents(selectMenu_1);
|
|
403
403
|
return [4 /*yield*/, _0x2f79ae.reply({
|
|
404
404
|
files: _0x168f75,
|