@hyperbytes/wappler-all-in-one-ai-v2 1.0.0 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@hyperbytes/wappler-all-in-one-ai-v2",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.2",
|
|
4
4
|
"description": "Versitile interface to chatGPT, Gemini Claude with file analysis cababilities",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"author": {
|
|
@@ -19,7 +19,7 @@
|
|
|
19
19
|
"openai": "^6.15.0",
|
|
20
20
|
"sharp": "^0.34.5",
|
|
21
21
|
"xlsx": "^0.18.5",
|
|
22
|
-
"pdf-
|
|
22
|
+
"pdf-parse": "^2.4.5",
|
|
23
23
|
"path": "^0.12.7"
|
|
24
24
|
}
|
|
25
25
|
}
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
{
|
|
2
|
+
type: 'multiaiv2_multiaiv2',
|
|
3
|
+
module : 'multiaiv2',
|
|
4
|
+
action : 'multiaiv2',
|
|
5
|
+
groupTitle : 'AI Modules',
|
|
6
|
+
groupIcon : 'fas fa-solid fa-brain comp-loops',
|
|
7
|
+
title : 'Multi AI Module V2',
|
|
8
|
+
icon : 'fas fa-brands fa-brain comp-loops',
|
|
9
|
+
serverModel : ['node'],
|
|
10
|
+
dataPickObject: true,
|
|
11
|
+
dataScheme: [
|
|
12
|
+
{name: 'result', type: 'text'},
|
|
13
|
+
],
|
|
14
|
+
usedModules : {
|
|
15
|
+
node: {
|
|
16
|
+
'@anthropic-ai/sdk' : '^0.71.2',
|
|
17
|
+
'@google/generative-ai' : '^0.24.1',
|
|
18
|
+
'openai': '^6.15.0',
|
|
19
|
+
'sharp': '^0.34.5',
|
|
20
|
+
'xlsx': '^0.18.5',
|
|
21
|
+
'pdf-parse':'^2.4.5',
|
|
22
|
+
'path':'^0.12.7'
|
|
23
|
+
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
properties : [
|
|
27
|
+
{
|
|
28
|
+
group: 'Input Question',
|
|
29
|
+
variables: [
|
|
30
|
+
{ name: 'actionName',
|
|
31
|
+
optionName: 'name',
|
|
32
|
+
title: 'Name',
|
|
33
|
+
type: 'text',
|
|
34
|
+
required: true,
|
|
35
|
+
defaultValue: '',
|
|
36
|
+
baseName: "multiAI1"
|
|
37
|
+
},
|
|
38
|
+
{ name: 'mytraining',
|
|
39
|
+
optionName: 'mytraining',
|
|
40
|
+
title: 'Training Info',
|
|
41
|
+
type: 'textarea',
|
|
42
|
+
required: true,
|
|
43
|
+
defaultValue: '',
|
|
44
|
+
serverDataBindings: true,
|
|
45
|
+
help: 'Info about the user to train the AI on, use dynamic data if you can, to improve the UX'
|
|
46
|
+
},
|
|
47
|
+
{ name: 'question',
|
|
48
|
+
optionName: 'question',
|
|
49
|
+
title: 'AI Question',
|
|
50
|
+
type: 'text',
|
|
51
|
+
required: true,
|
|
52
|
+
defaultValue: '',
|
|
53
|
+
serverDataBindings: true,
|
|
54
|
+
help: 'The question you wish to ask'
|
|
55
|
+
},
|
|
56
|
+
{ name: 'maxtokens',
|
|
57
|
+
optionName: 'maxtokens',
|
|
58
|
+
title: 'Maximum tokens',
|
|
59
|
+
type: 'text',
|
|
60
|
+
required: false,
|
|
61
|
+
serverDataBindings: true,
|
|
62
|
+
help: 'The maximum number of tokens to use'
|
|
63
|
+
},
|
|
64
|
+
{ name: 'temperature',
|
|
65
|
+
optionName: 'temperature',
|
|
66
|
+
title: 'temperature',
|
|
67
|
+
type: 'text',
|
|
68
|
+
required: false,
|
|
69
|
+
defaultValue: ".5",
|
|
70
|
+
serverDataBindings: true,
|
|
71
|
+
help: 'The temperature'
|
|
72
|
+
},
|
|
73
|
+
{ name: 'engine', optionName: 'engine', title: 'Engine',
|
|
74
|
+
type: 'droplist',
|
|
75
|
+
values: [
|
|
76
|
+
{title: 'claude-3-5-sonnet-', value: 'claude-3-5-sonnet-latest' },
|
|
77
|
+
{title: 'claude-3-opus', value: 'claude-3-opus-20240229' },
|
|
78
|
+
{title: 'claude-3.5-haiku Latest', value: 'claude-3.5-haiku-latest' },
|
|
79
|
+
{title: 'claude-3.7-Sonnet', value: 'claude-3.7-sonnet' },
|
|
80
|
+
{title: 'claude-4-5-Sonnet', value: 'claude-sonnet-4-5' },
|
|
81
|
+
{title: 'claude-4-5-Haiku', value: 'claude-haiku-4-5' },
|
|
82
|
+
{title: 'claude-4-5-Opus', value: 'claude-opus-4-5' },
|
|
83
|
+
{title: 'claude-4-5-Sonnet-20250929', value: 'claude-sonnet-4-5' },
|
|
84
|
+
{title: 'Gemini 1.5 Pro', value: 'gemini-1.5-pro-latest' },
|
|
85
|
+
{title: 'Gemini 2.0 Flash', value: 'gemini-2.0-flash' },
|
|
86
|
+
{title: 'Gemini 2.5 Pro', value: 'gemini-2.0-pro' },
|
|
87
|
+
{title: 'Deepseek Reasoner', value: 'deepseek-reasoner' },
|
|
88
|
+
{title: 'Deepseek v3', value: 'deepseek-v3' },
|
|
89
|
+
{title: 'deepseek v3 vision', value: 'deepseek-v3-vision' },
|
|
90
|
+
{title: 'gpt-4o-mini', value: 'gpt-4o-mini' },
|
|
91
|
+
{title: 'gpt-4o ', value: 'gpt-4o' },
|
|
92
|
+
{title: 'gpt-4.1', value: 'gpt-4.1' },
|
|
93
|
+
{title: 'gpt-5', value: 'gpt-5' },
|
|
94
|
+
{title: 'gpt-5 mini', value: 'gpt-5-mini' },
|
|
95
|
+
{title: 'gpt-5 nano', value: 'gpt-5-nano' }
|
|
96
|
+
],
|
|
97
|
+
help: 'Choose your AI service'
|
|
98
|
+
},
|
|
99
|
+
{ name: 'image_url',
|
|
100
|
+
optionName: 'image_url',
|
|
101
|
+
title: 'Upload File',
|
|
102
|
+
type: 'file',
|
|
103
|
+
required: false,
|
|
104
|
+
serverDataBindings: true,
|
|
105
|
+
help: 'File to upload'
|
|
106
|
+
},
|
|
107
|
+
{ name: 'output',
|
|
108
|
+
optionName: 'output',
|
|
109
|
+
title: 'Output',
|
|
110
|
+
type: 'boolean',
|
|
111
|
+
defaultValue: false
|
|
112
|
+
}
|
|
113
|
+
]
|
|
114
|
+
},
|
|
115
|
+
|
|
116
|
+
]
|
|
117
|
+
}
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
const { OpenAI } = require('openai');
|
|
2
|
+
const { GoogleGenerativeAI } = require('@google/generative-ai');
|
|
3
|
+
const Anthropic = require('@anthropic-ai/sdk');
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const XLSX = require('xlsx');
|
|
7
|
+
const pdf = require('pdf-parse');
|
|
8
|
+
const sharp = require('sharp');
|
|
9
|
+
|
|
10
|
+
const clamp = (val, min, max) => Math.max(min, Math.min(max, val));
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* HELPER: Compresses and converts image to Base64
|
|
14
|
+
*/
|
|
15
|
+
async function getCompressedImageBase64(filePath) {
|
|
16
|
+
try {
|
|
17
|
+
const fullPath = path.join(process.cwd(), filePath);
|
|
18
|
+
console.log("Checking for image at:", fullPath);
|
|
19
|
+
|
|
20
|
+
if (!fs.existsSync(fullPath)) {
|
|
21
|
+
console.error("FILE NOT FOUND:", fullPath);
|
|
22
|
+
return null;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const buffer = await sharp(fullPath)
|
|
26
|
+
.resize(1500, 1500, { fit: 'inside', withoutEnlargement: true })
|
|
27
|
+
.jpeg({ quality: 80 })
|
|
28
|
+
.toBuffer();
|
|
29
|
+
|
|
30
|
+
console.log("Image compressed successfully. Size:", (buffer.length / 1024).toFixed(2), "KB");
|
|
31
|
+
return `data:image/jpeg;base64,${buffer.toString('base64')}`;
|
|
32
|
+
} catch (err) {
|
|
33
|
+
console.error("Sharp Compression Error:", err.message);
|
|
34
|
+
return null;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* HELPER: Extract text from Docs
|
|
40
|
+
*/
|
|
41
|
+
async function getFileContent(filePath) {
|
|
42
|
+
try {
|
|
43
|
+
const fullPath = path.join(process.cwd(), filePath);
|
|
44
|
+
if (!fs.existsSync(fullPath)) return null;
|
|
45
|
+
const ext = path.extname(fullPath).toLowerCase();
|
|
46
|
+
|
|
47
|
+
if (ext === '.txt' || ext === '.csv') return fs.readFileSync(fullPath, 'utf8');
|
|
48
|
+
if (ext === '.xlsx' || ext === '.xls') {
|
|
49
|
+
const wb = XLSX.readFile(fullPath);
|
|
50
|
+
return XLSX.utils.sheet_to_csv(wb.Sheets[wb.SheetNames[0]]);
|
|
51
|
+
}
|
|
52
|
+
if (ext === '.pdf') {
|
|
53
|
+
const data = await pdf(fs.readFileSync(fullPath));
|
|
54
|
+
return data.text;
|
|
55
|
+
}
|
|
56
|
+
} catch (e) { return null; }
|
|
57
|
+
return null;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
exports.multiaiv2 = async function (options) {
|
|
61
|
+
let question = this.parseRequired(options.question, "*", 'No Question passed');
|
|
62
|
+
const engine = this.parse(options.engine) || 'gpt-4o-mini';
|
|
63
|
+
const maxTokens = parseInt(this.parseOptional(options.maxtokens, "*", 2000));
|
|
64
|
+
const temp = parseFloat(this.parseOptional(options.temperature, "*", 0.7));
|
|
65
|
+
const fileInput = this.parse(options.image_url);
|
|
66
|
+
|
|
67
|
+
const modelName = engine.toLowerCase();
|
|
68
|
+
const isReasoningModel = modelName.includes('gpt-5') || modelName.startsWith('o1') || modelName.startsWith('o3');
|
|
69
|
+
|
|
70
|
+
console.log(`Starting Request - Model: ${engine}, File: ${fileInput}`);
|
|
71
|
+
|
|
72
|
+
// 1. Pre-process Docs (Injection)
|
|
73
|
+
if (fileInput && /\.(pdf|xlsx|xls|txt|csv)$/i.test(fileInput)) {
|
|
74
|
+
const text = await getFileContent(fileInput);
|
|
75
|
+
if (text) {
|
|
76
|
+
question += `\n\n[FILE CONTENT]:\n${text}`;
|
|
77
|
+
console.log("Document text injected into prompt.");
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
try {
|
|
82
|
+
// --- OpenAI (GPT-4o, GPT-5, o1) ---
|
|
83
|
+
if (modelName.includes('gpt') || modelName.includes('o1') || modelName.includes('o3')) {
|
|
84
|
+
const client = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
|
|
85
|
+
|
|
86
|
+
let userContent = [{ type: "text", text: question }];
|
|
87
|
+
|
|
88
|
+
if (fileInput && /\.(jpg|jpeg|png|webp)$/i.test(fileInput) && !isReasoningModel) {
|
|
89
|
+
const b64 = await getCompressedImageBase64(fileInput);
|
|
90
|
+
if (b64) {
|
|
91
|
+
userContent.push({
|
|
92
|
+
type: "image_url",
|
|
93
|
+
image_url: { url: b64, detail: "high" }
|
|
94
|
+
});
|
|
95
|
+
console.log("Image successfully attached to OpenAI payload.");
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
const payload = {
|
|
100
|
+
model: engine,
|
|
101
|
+
messages: [{ role: 'user', content: userContent }]
|
|
102
|
+
};
|
|
103
|
+
|
|
104
|
+
if (isReasoningModel) {
|
|
105
|
+
payload.max_completion_tokens = Math.max(maxTokens, 5000);
|
|
106
|
+
} else {
|
|
107
|
+
payload.max_tokens = maxTokens;
|
|
108
|
+
payload.temperature = clamp(temp, 0, 2);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const res = await client.chat.completions.create(payload);
|
|
112
|
+
return { 'result': res.choices[0].message.content };
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// --- Gemini ---
|
|
116
|
+
if (modelName.includes('gemini')) {
|
|
117
|
+
const genAI = new GoogleGenerativeAI(process.env.GEMINI_API_KEY);
|
|
118
|
+
const model = genAI.getGenerativeModel({ model: engine });
|
|
119
|
+
const promptParts = [{ text: question }];
|
|
120
|
+
|
|
121
|
+
if (fileInput && /\.(jpg|jpeg|png|webp)$/i.test(fileInput)) {
|
|
122
|
+
const b64 = await getCompressedImageBase64(fileInput);
|
|
123
|
+
if (b64) {
|
|
124
|
+
promptParts.push({
|
|
125
|
+
inlineData: {
|
|
126
|
+
data: b64.split(',')[1],
|
|
127
|
+
mimeType: "image/jpeg"
|
|
128
|
+
}
|
|
129
|
+
});
|
|
130
|
+
console.log("Image successfully attached to Gemini payload.");
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
const result = await model.generateContent({ contents: [{ role: 'user', parts: promptParts }] });
|
|
135
|
+
return { 'result': result.response.text() };
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// --- Claude ---
|
|
139
|
+
if (modelName.includes('claude')) {
|
|
140
|
+
const anthropic = new Anthropic({ apiKey: process.env.CLAUDE_API_KEY });
|
|
141
|
+
const messageParts = [{ type: "text", text: question }];
|
|
142
|
+
|
|
143
|
+
if (fileInput && /\.(jpg|jpeg|png|webp)$/i.test(fileInput)) {
|
|
144
|
+
const b64 = await getCompressedImageBase64(fileInput);
|
|
145
|
+
if (b64) {
|
|
146
|
+
messageParts.push({
|
|
147
|
+
type: "image",
|
|
148
|
+
source: {
|
|
149
|
+
type: "base64",
|
|
150
|
+
media_type: "image/jpeg",
|
|
151
|
+
data: b64.split(',')[1]
|
|
152
|
+
}
|
|
153
|
+
});
|
|
154
|
+
console.log("Image successfully attached to Claude payload.");
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
const res = await anthropic.messages.create({
|
|
159
|
+
model: engine,
|
|
160
|
+
max_tokens: maxTokens,
|
|
161
|
+
messages: [{ role: 'user', content: messageParts }],
|
|
162
|
+
});
|
|
163
|
+
return { "result": res.content[0].text };
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
} catch (e) {
|
|
167
|
+
console.error("API Call failed:", e.message);
|
|
168
|
+
throw new Error(`AI Extension Error: ${e.message}`);
|
|
169
|
+
}
|
|
170
|
+
};
|