smartplant 0.1.7 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +47 -20
- package/package.json +134 -119
- package/src/main.js +430 -875
package/src/main.js
CHANGED
|
@@ -1,881 +1,436 @@
|
|
|
1
|
-
|
|
2
|
-
import
|
|
3
|
-
import
|
|
4
|
-
import
|
|
5
|
-
import {
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
import
|
|
9
|
-
import
|
|
10
|
-
import
|
|
11
|
-
import
|
|
12
|
-
import
|
|
13
|
-
import
|
|
14
|
-
import
|
|
15
|
-
import
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
function loadMessages(
|
|
20
|
-
|
|
21
|
-
const messages = {
|
|
22
|
-
de : MsgDe,
|
|
23
|
-
en : MsgEn,
|
|
24
|
-
es : MsgEs,
|
|
25
|
-
fr : MsgFr,
|
|
26
|
-
ja : MsgJa,
|
|
27
|
-
it : MsgIt,
|
|
28
|
-
pt : MsgPt,
|
|
29
|
-
ru : MsgRu,
|
|
30
|
-
zh : MsgZh,
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
try {
|
|
34
|
-
|
|
35
|
-
return messages[ language ] || MsgEn // Carga el archivo del idioma o el inglés por defecto
|
|
36
|
-
|
|
37
|
-
} catch ( error ) {
|
|
38
|
-
|
|
39
|
-
console.error( `Error loading language file: ${error.message}` )
|
|
40
|
-
return MsgEn // Si hay algún error, carga los mensajes en inglés por defecto
|
|
41
|
-
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
class AIClient {
|
|
47
|
-
|
|
48
|
-
constructor( type, apiKey, localModel ) {
|
|
49
|
-
|
|
50
|
-
this.type = type
|
|
51
|
-
this.apiKey = apiKey
|
|
52
|
-
this.localModel = localModel
|
|
53
|
-
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
async generateResponse( prompt, language ) {
|
|
57
|
-
|
|
58
|
-
const languagePrompt = `Respond in ${language}. `
|
|
59
|
-
const fullPrompt = languagePrompt + prompt
|
|
60
|
-
|
|
61
|
-
switch ( this.type ) {
|
|
62
|
-
|
|
63
|
-
case 'openai' :
|
|
64
|
-
return this.generateOpenAIResponse( fullPrompt )
|
|
65
|
-
case 'local' :
|
|
66
|
-
return this.generateLocalResponse( fullPrompt )
|
|
67
|
-
default :
|
|
68
|
-
throw new Error( 'Unsupported AI type' )
|
|
69
|
-
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
async generateOpenAIResponse( prompt ) {
|
|
75
|
-
|
|
76
|
-
try {
|
|
77
|
-
|
|
78
|
-
const response = await fetch( 'https://api.openai.com/v1/engines/davinci-codex/completions', {
|
|
79
|
-
method : 'POST',
|
|
80
|
-
headers : {
|
|
81
|
-
'Authorization' : `Bearer ${this.apiKey}`,
|
|
82
|
-
'Content-Type' : 'application/json',
|
|
83
|
-
},
|
|
84
|
-
body : JSON.stringify( {
|
|
85
|
-
prompt : prompt,
|
|
86
|
-
max_tokens : 500,
|
|
87
|
-
n : 1,
|
|
88
|
-
stop : null,
|
|
89
|
-
temperature : 0.7,
|
|
90
|
-
} ),
|
|
91
|
-
} )
|
|
92
|
-
|
|
93
|
-
if ( !response.ok ) {
|
|
94
|
-
|
|
95
|
-
throw new Error( `HTTP error! status: ${response.status}` )
|
|
96
|
-
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
const data = await response.json()
|
|
100
|
-
return data.choices[ 0 ].text.trim()
|
|
101
|
-
|
|
102
|
-
} catch ( error ) {
|
|
103
|
-
|
|
104
|
-
console.error( 'Error generating OpenAI response:', error )
|
|
105
|
-
return null
|
|
106
|
-
|
|
107
|
-
}
|
|
108
|
-
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
async generateLocalResponse( prompt ) {
|
|
112
|
-
|
|
113
|
-
try {
|
|
114
|
-
|
|
115
|
-
const command = `ollama run ${this.localModel} "${this.sanitizeInput( prompt )}"`
|
|
116
|
-
const output = execSync( command, { encoding: 'utf-8' } )
|
|
117
|
-
return output.trim()
|
|
118
|
-
|
|
119
|
-
} catch ( error ) {
|
|
120
|
-
|
|
121
|
-
console.error( 'Error generating local response:', error )
|
|
122
|
-
return null
|
|
123
|
-
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
sanitizeInput( input ) {
|
|
129
|
-
|
|
130
|
-
return input.replace( /"/g, '\\"' ).replace( /\n/g, ' ' )
|
|
131
|
-
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
class AIDetector {
|
|
137
|
-
|
|
138
|
-
async detectAI() {
|
|
139
|
-
|
|
140
|
-
try {
|
|
141
|
-
|
|
142
|
-
const output = execSync( 'ollama list', { encoding: 'utf-8' } )
|
|
143
|
-
const models = output.split( '\n' )
|
|
144
|
-
.filter( line => line.trim() && !line.startsWith( 'NAME' ) )
|
|
145
|
-
.map( line => line.split( ' ' )[ 0 ] )
|
|
146
|
-
if ( models.length > 0 ) {
|
|
147
|
-
|
|
148
|
-
return {
|
|
149
|
-
name : 'ollama',
|
|
150
|
-
models : models,
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
}
|
|
154
|
-
|
|
155
|
-
} catch ( _e ) {
|
|
156
|
-
|
|
157
|
-
console.error( 'Error detecting Ollama:' )
|
|
158
|
-
process.exit( 0 )
|
|
159
|
-
|
|
160
|
-
}
|
|
161
|
-
|
|
162
|
-
}
|
|
163
|
-
|
|
1
|
+
import { ReadlineParser } from '@serialport/parser-readline';
|
|
2
|
+
import chalk from 'chalk';
|
|
3
|
+
import enquirer from 'enquirer';
|
|
4
|
+
import { execSync } from 'node:child_process';
|
|
5
|
+
import { SerialPort } from 'serialport';
|
|
6
|
+
|
|
7
|
+
import MsgDe from './language/messages-de.js';
|
|
8
|
+
import MsgEn from './language/messages-en.js';
|
|
9
|
+
import MsgEs from './language/messages-es.js';
|
|
10
|
+
import MsgFr from './language/messages-fr.js';
|
|
11
|
+
import MsgIt from './language/messages-it.js';
|
|
12
|
+
import MsgJa from './language/messages-ja.js';
|
|
13
|
+
import MsgPt from './language/messages-pt.js';
|
|
14
|
+
import MsgRu from './language/messages-ru.js';
|
|
15
|
+
import MsgZh from './language/messages-zh.js';
|
|
16
|
+
|
|
17
|
+
const messagesMap = { de: MsgDe, en: MsgEn, es: MsgEs, fr: MsgFr, ja: MsgJa, it: MsgIt, pt: MsgPt, ru: MsgRu, zh: MsgZh };
|
|
18
|
+
|
|
19
|
+
function loadMessages(lang) {
|
|
20
|
+
return messagesMap[lang] || MsgEn;
|
|
164
21
|
}
|
|
165
22
|
|
|
166
|
-
class
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
const response = await this.aiClient.generateResponse( prompt, this.language )
|
|
234
|
-
await this.simulateTyping( response )
|
|
235
|
-
return response.trim()
|
|
236
|
-
|
|
237
|
-
} catch ( error ) {
|
|
238
|
-
|
|
239
|
-
console.error( 'Error getting AI response:', error )
|
|
240
|
-
return null
|
|
241
|
-
|
|
242
|
-
}
|
|
243
|
-
|
|
244
|
-
}
|
|
245
|
-
|
|
246
|
-
async simulateTyping( text ) {
|
|
247
|
-
|
|
248
|
-
for ( let i = 0; i < text.length; i++ ) {
|
|
249
|
-
|
|
250
|
-
process.stdout.write( text[ i ] )
|
|
251
|
-
await new Promise( resolve => setTimeout( resolve, 10 ) )
|
|
252
|
-
|
|
253
|
-
}
|
|
254
|
-
console.log( '\n' )
|
|
255
|
-
|
|
256
|
-
}
|
|
257
|
-
|
|
258
|
-
formatPlantInfo() {
|
|
259
|
-
|
|
260
|
-
if ( !this.plantInfo ) return 'No hay información disponible.'
|
|
261
|
-
|
|
262
|
-
return `
|
|
263
|
-
Rangos ideales:
|
|
264
|
-
🌞 ${this.plantInfo.lighting.min}-${this.plantInfo.lighting.max} lux
|
|
265
|
-
🌡️ ${this.plantInfo.temperature.min}-${this.plantInfo.temperature.max}°C
|
|
266
|
-
💦 ${this.plantInfo.humidity.min}-${this.plantInfo.humidity.max}%
|
|
267
|
-
`
|
|
268
|
-
|
|
269
|
-
}
|
|
270
|
-
|
|
23
|
+
class AIService {
|
|
24
|
+
constructor(provider, apiKey = null, localModel = null) {
|
|
25
|
+
this.provider = provider;
|
|
26
|
+
this.apiKey = apiKey;
|
|
27
|
+
this.localModel = localModel;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
async generateResponse(prompt, language) {
|
|
31
|
+
const fullPrompt = `Responde en ${language}. ${prompt}`;
|
|
32
|
+
try {
|
|
33
|
+
switch (this.provider) {
|
|
34
|
+
case 'openai': return this._openAICompatible(fullPrompt, 'https://api.openai.com/v1/chat/completions', 'gpt-4o-mini');
|
|
35
|
+
case 'grok': return this._openAICompatible(fullPrompt, 'https://api.x.ai/v1/chat/completions', 'grok-beta');
|
|
36
|
+
case 'claude': return this._claude(fullPrompt);
|
|
37
|
+
case 'gemini': return this._gemini(fullPrompt);
|
|
38
|
+
case 'local': return this._ollama(fullPrompt);
|
|
39
|
+
default: throw new Error('Proveedor no soportado');
|
|
40
|
+
}
|
|
41
|
+
} catch (err) {
|
|
42
|
+
console.error(chalk.red(`Error en ${this.provider}:`), err.message);
|
|
43
|
+
return null;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
async _openAICompatible(prompt, url, model) {
|
|
48
|
+
const res = await fetch(url, {
|
|
49
|
+
method: 'POST',
|
|
50
|
+
headers: { 'Authorization': `Bearer ${this.apiKey}`, 'Content-Type': 'application/json' },
|
|
51
|
+
body: JSON.stringify({ model, messages: [{ role: 'user', content: prompt }], max_tokens: 600, temperature: 0.7 }),
|
|
52
|
+
});
|
|
53
|
+
if (!res.ok) throw new Error(`HTTP ${res.status}`);
|
|
54
|
+
const data = await res.json();
|
|
55
|
+
return data.choices[0].message.content.trim();
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
async _claude(prompt) {
|
|
59
|
+
const res = await fetch('https://api.anthropic.com/v1/messages', {
|
|
60
|
+
method: 'POST',
|
|
61
|
+
headers: { 'x-api-key': this.apiKey, 'anthropic-version': '2023-06-01', 'Content-Type': 'application/json' },
|
|
62
|
+
body: JSON.stringify({ model: 'claude-3-5-sonnet-20241022', max_tokens: 600, messages: [{ role: 'user', content: prompt }] }),
|
|
63
|
+
});
|
|
64
|
+
if (!res.ok) throw new Error(`HTTP ${res.status}`);
|
|
65
|
+
const data = await res.json();
|
|
66
|
+
return data.content[0].text.trim();
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
async _gemini(prompt) {
|
|
70
|
+
const res = await fetch(`https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:generateContent?key=${this.apiKey}`, {
|
|
71
|
+
method: 'POST',
|
|
72
|
+
headers: { 'Content-Type': 'application/json' },
|
|
73
|
+
body: JSON.stringify({ contents: [{ parts: [{ text: prompt }] }] }),
|
|
74
|
+
});
|
|
75
|
+
if (!res.ok) {
|
|
76
|
+
if (res.status === 429) throw new Error('429 - Límite de Gemini alcanzado. Espera 60 segundos.');
|
|
77
|
+
if (res.status === 404) throw new Error('404 - Modelo Gemini no encontrado. Usa OpenAI o Grok.');
|
|
78
|
+
throw new Error(`HTTP ${res.status}`);
|
|
79
|
+
}
|
|
80
|
+
const data = await res.json();
|
|
81
|
+
return data.candidates[0].content.parts[0].text.trim();
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
async _ollama(prompt) {
|
|
85
|
+
const cmd = `ollama run ${this.localModel} "${prompt.replace(/"/g, '\\"').replace(/\n/g, ' ')}"`;
|
|
86
|
+
const out = execSync(cmd, { encoding: 'utf-8' });
|
|
87
|
+
return out.trim();
|
|
88
|
+
}
|
|
271
89
|
}
|
|
272
90
|
|
|
273
91
|
export class SmartPlant {
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
}
|
|
616
|
-
|
|
617
|
-
getLightEmoji( percentage ) {
|
|
618
|
-
|
|
619
|
-
if ( percentage <= 0 ) return '🌑'
|
|
620
|
-
if ( percentage <= 30 ) return '🌑'
|
|
621
|
-
if ( percentage <= 60 ) return '🌥'
|
|
622
|
-
return '🌞'
|
|
623
|
-
|
|
624
|
-
}
|
|
625
|
-
|
|
626
|
-
getTemperatureEmoji( percentage ) {
|
|
627
|
-
|
|
628
|
-
if ( percentage <= 0 ) return '🧊'
|
|
629
|
-
if ( percentage <= 30 ) return '🧊'
|
|
630
|
-
if ( percentage <= 80 ) return '🌡️'
|
|
631
|
-
return '🔥'
|
|
632
|
-
|
|
633
|
-
}
|
|
634
|
-
|
|
635
|
-
getHappinessEmoji( averagePercentage ) {
|
|
636
|
-
|
|
637
|
-
if ( averagePercentage <= 0 ) return '😵'
|
|
638
|
-
if ( averagePercentage >= 90 ) return '🤩'
|
|
639
|
-
if ( averagePercentage >= 75 ) return '😊'
|
|
640
|
-
if ( averagePercentage >= 60 ) return '😐'
|
|
641
|
-
if ( averagePercentage >= 45 ) return '😞'
|
|
642
|
-
if ( averagePercentage >= 30 ) return '😖'
|
|
643
|
-
return '🥵'
|
|
644
|
-
|
|
645
|
-
}
|
|
646
|
-
|
|
647
|
-
logPlantStatus() {
|
|
648
|
-
|
|
649
|
-
const allSensorsZero = Object.values( this.sensors ).every( value => value === 0 || value === null || value === undefined )
|
|
650
|
-
|
|
651
|
-
if ( !this.hasSensors || allSensorsZero ) {
|
|
652
|
-
|
|
653
|
-
console.log( chalk.yellow( '🔔 Warning: No data input or sensors are disconnected.' ) )
|
|
654
|
-
console.log( '😵 | Lighting: 🌑 (0%) | Temperature: 🧊 (0.0°C) | Humidity: 🍂 (0%)' )
|
|
655
|
-
return
|
|
656
|
-
|
|
657
|
-
}
|
|
658
|
-
|
|
659
|
-
const humidityPercentage = this.calculatePercentage( this.sensors.humidity, this.alerts.humidity.min, this.alerts.humidity.max )
|
|
660
|
-
const lightPercentage = this.calculatePercentage( this.sensors.light, this.alerts.light.min, this.alerts.light.max )
|
|
661
|
-
const temperaturePercentage = this.calculatePercentage( this.sensors.temperature, this.alerts.temperature.min, this.alerts.temperature.max )
|
|
662
|
-
|
|
663
|
-
const averagePercentage = ( humidityPercentage + lightPercentage + temperaturePercentage ) / 3
|
|
664
|
-
|
|
665
|
-
const happinessEmoji = this.getHappinessEmoji( averagePercentage )
|
|
666
|
-
const humidityEmoji = this.getHumidityEmoji( humidityPercentage )
|
|
667
|
-
const lightEmoji = this.getLightEmoji( lightPercentage )
|
|
668
|
-
const temperatureEmoji = this.getTemperatureEmoji( temperaturePercentage )
|
|
669
|
-
|
|
670
|
-
const status = `${happinessEmoji} | Lighting: ${lightEmoji} (${lightPercentage.toFixed( 0 )}%) | Temperature: ${temperatureEmoji} (${this.sensors.temperature?.toFixed( 1 ) || 0.0}°C) | Humidity: ${humidityEmoji} (${humidityPercentage.toFixed( 0 )}%)`
|
|
671
|
-
console.log( status )
|
|
672
|
-
|
|
673
|
-
this.historicalData.push( {
|
|
674
|
-
timestamp : new Date(),
|
|
675
|
-
humidity : this.sensors.humidity,
|
|
676
|
-
light : this.sensors.light,
|
|
677
|
-
temperature : this.sensors.temperature,
|
|
678
|
-
} )
|
|
679
|
-
|
|
680
|
-
this.predictCriticalState()
|
|
681
|
-
|
|
682
|
-
}
|
|
683
|
-
|
|
684
|
-
predictCriticalState() {
|
|
685
|
-
|
|
686
|
-
if ( this.historicalData.length < 10 ) return // Need more data for prediction
|
|
687
|
-
|
|
688
|
-
const recentData = this.historicalData.slice( -10 )
|
|
689
|
-
const trends = {
|
|
690
|
-
humidity : this.calculateTrend( recentData.map( d => d.humidity ) ),
|
|
691
|
-
light : this.calculateTrend( recentData.map( d => d.light ) ),
|
|
692
|
-
temperature : this.calculateTrend( recentData.map( d => d.temperature ) ),
|
|
693
|
-
}
|
|
694
|
-
|
|
695
|
-
for ( const [ sensor, trend ] of Object.entries( trends ) ) {
|
|
696
|
-
|
|
697
|
-
if ( Math.abs( trend ) > 0.5 ) { // Significant trend detected
|
|
698
|
-
|
|
699
|
-
const direction = trend > 0 ? 'increasing' : 'decreasing'
|
|
700
|
-
console.log( chalk.yellow.bold( `🔔 Warning: ${sensor} is ${direction} rapidly. Consider taking action.` ) )
|
|
701
|
-
|
|
702
|
-
}
|
|
703
|
-
|
|
704
|
-
}
|
|
705
|
-
|
|
706
|
-
}
|
|
707
|
-
|
|
708
|
-
calculateTrend( data ) {
|
|
709
|
-
|
|
710
|
-
const n = data.length
|
|
711
|
-
const sum_x = n * ( n + 1 ) / 2
|
|
712
|
-
const sum_y = data.reduce( ( a, b ) => a + b, 0 )
|
|
713
|
-
const sum_xy = data.reduce( ( sum, y, i ) => sum + y * ( i + 1 ), 0 )
|
|
714
|
-
const sum_xx = n * ( n + 1 ) * ( 2 * n + 1 ) / 6
|
|
715
|
-
|
|
716
|
-
const slope = ( n * sum_xy - sum_x * sum_y ) / ( n * sum_xx - sum_x * sum_x )
|
|
717
|
-
return slope
|
|
718
|
-
|
|
719
|
-
}
|
|
720
|
-
|
|
721
|
-
startMonitoring() {
|
|
722
|
-
|
|
723
|
-
if( this.messages.general.startMonitoring ) console.log( this.messages.general.startMonitoring )
|
|
724
|
-
this.isMonitoring = true
|
|
725
|
-
this.monitoringInterval = setInterval( () => {
|
|
726
|
-
|
|
727
|
-
if ( !this.hibernationMode ) {
|
|
728
|
-
|
|
729
|
-
this.logPlantStatus()
|
|
730
|
-
|
|
731
|
-
}
|
|
732
|
-
|
|
733
|
-
}, 60000 ) // Log every minute
|
|
734
|
-
|
|
735
|
-
// Enable keypress detection
|
|
736
|
-
process.stdin.setRawMode( true )
|
|
737
|
-
process.stdin.resume()
|
|
738
|
-
process.stdin.setEncoding( 'utf8' )
|
|
739
|
-
process.stdin.on( 'data', key => {
|
|
740
|
-
|
|
741
|
-
if ( key === '\u0003' ) { // Ctrl+C
|
|
742
|
-
|
|
743
|
-
process.exit()
|
|
744
|
-
|
|
745
|
-
} else if ( key === '\u000F' ) { // Ctrl+O
|
|
746
|
-
|
|
747
|
-
this.pauseMonitoring()
|
|
748
|
-
|
|
749
|
-
}
|
|
750
|
-
|
|
751
|
-
} )
|
|
752
|
-
|
|
753
|
-
}
|
|
754
|
-
|
|
755
|
-
pauseMonitoring() {
|
|
756
|
-
|
|
757
|
-
clearInterval( this.monitoringInterval )
|
|
758
|
-
this.isMonitoring = false
|
|
759
|
-
this.displaySensorSettingsMenu()
|
|
760
|
-
|
|
761
|
-
}
|
|
762
|
-
|
|
763
|
-
async displaySensorSettingsMenu() {
|
|
764
|
-
|
|
765
|
-
const choices = [
|
|
766
|
-
'Ajustar configuración de sensores',
|
|
767
|
-
'Activar/Desactivar modo de hibernación',
|
|
768
|
-
'Volver a iniciar monitoreo',
|
|
769
|
-
'Salir',
|
|
770
|
-
]
|
|
771
|
-
|
|
772
|
-
const { option } = await enquirer.prompt( {
|
|
773
|
-
type : 'select',
|
|
774
|
-
name : 'option',
|
|
775
|
-
message : 'Monitoreo detenido. ¿Qué deseas hacer?',
|
|
776
|
-
choices : choices,
|
|
777
|
-
} )
|
|
778
|
-
|
|
779
|
-
if ( option === choices[ 0 ] ) {
|
|
780
|
-
|
|
781
|
-
await this.adjustSensorSettings()
|
|
782
|
-
|
|
783
|
-
} else if ( option === choices[ 1 ] ) {
|
|
784
|
-
|
|
785
|
-
await this.toggleHibernation()
|
|
786
|
-
|
|
787
|
-
} else if ( option === choices[ 2 ] ) {
|
|
788
|
-
|
|
789
|
-
this.startMonitoring()
|
|
790
|
-
|
|
791
|
-
} else if ( option === choices[ 3 ] ) {
|
|
792
|
-
|
|
793
|
-
console.log( 'Saliendo del menú de ajustes.' )
|
|
794
|
-
process.exit()
|
|
795
|
-
|
|
796
|
-
}
|
|
797
|
-
|
|
798
|
-
}
|
|
799
|
-
|
|
800
|
-
async adjustSensorSettings() {
|
|
801
|
-
|
|
802
|
-
const sensorSettings = await enquirer.prompt( [
|
|
803
|
-
{
|
|
804
|
-
type : 'input',
|
|
805
|
-
name : 'humidity',
|
|
806
|
-
message : `Humedad ideal (actual: ${this.plantInfo.humidity.min}-${this.plantInfo.humidity.max}%):`,
|
|
807
|
-
default : `${this.plantInfo.humidity.min}-${this.plantInfo.humidity.max}`,
|
|
808
|
-
},
|
|
809
|
-
{
|
|
810
|
-
type : 'input',
|
|
811
|
-
name : 'temperature',
|
|
812
|
-
message : `Temperatura ideal (actual: ${this.plantInfo.temperature.min}-${this.plantInfo.temperature.max}°C):`,
|
|
813
|
-
default : `${this.plantInfo.temperature.min}-${this.plantInfo.temperature.max}`,
|
|
814
|
-
},
|
|
815
|
-
{
|
|
816
|
-
type : 'input',
|
|
817
|
-
name : 'light',
|
|
818
|
-
message : `Luz ideal (actual: ${this.plantInfo.lighting.min}-${this.plantInfo.lighting.max} lux):`,
|
|
819
|
-
default : `${this.plantInfo.lighting.min}-${this.plantInfo.lighting.max}`,
|
|
820
|
-
},
|
|
821
|
-
] )
|
|
822
|
-
|
|
823
|
-
this.plantInfo.humidity = this.parseRange( sensorSettings.humidity )
|
|
824
|
-
this.plantInfo.temperature = this.parseRange( sensorSettings.temperature )
|
|
825
|
-
this.plantInfo.lighting = this.parseRange( sensorSettings.light )
|
|
826
|
-
|
|
827
|
-
this.setupAlerts()
|
|
828
|
-
|
|
829
|
-
console.log( `Nuevos valores ajustados:
|
|
830
|
-
Humedad: ${this.plantInfo.humidity.min}-${this.plantInfo.humidity.max}%
|
|
831
|
-
Temperatura: ${this.plantInfo.temperature.min}-${this.plantInfo.temperature.max}°C
|
|
832
|
-
Luz: ${this.plantInfo.lighting.min}-${this.plantInfo.lighting.max} lux` )
|
|
833
|
-
|
|
834
|
-
}
|
|
835
|
-
|
|
836
|
-
parseRange( rangeString ) {
|
|
837
|
-
|
|
838
|
-
const [ min, max ] = rangeString.split( '-' ).map( Number )
|
|
839
|
-
return {
|
|
840
|
-
min,
|
|
841
|
-
max,
|
|
842
|
-
}
|
|
843
|
-
|
|
844
|
-
}
|
|
845
|
-
|
|
846
|
-
async toggleHibernation() {
|
|
847
|
-
|
|
848
|
-
const { hibernation } = await enquirer.prompt( {
|
|
849
|
-
type : 'confirm',
|
|
850
|
-
name : 'hibernation',
|
|
851
|
-
message : '¿Activar modo de hibernación?',
|
|
852
|
-
default : false,
|
|
853
|
-
} )
|
|
854
|
-
|
|
855
|
-
this.hibernationMode = hibernation
|
|
856
|
-
console.log( `Modo de hibernación ${this.hibernationMode ? 'activado' : 'desactivado'}.` )
|
|
857
|
-
|
|
858
|
-
}
|
|
859
|
-
|
|
860
|
-
checkAlerts() {
|
|
861
|
-
|
|
862
|
-
if ( !this.hasSensors ) return
|
|
863
|
-
|
|
864
|
-
for ( const [ sensor, value ] of Object.entries( this.sensors ) ) {
|
|
865
|
-
|
|
866
|
-
if ( value === null || value === undefined || isNaN( value ) ) continue
|
|
867
|
-
if ( value < this.alerts[ sensor ].min ) {
|
|
868
|
-
|
|
869
|
-
console.log( chalk.red( this.messages.alerts[ sensor ].low.replace( '{value}', value.toFixed( 2 ) ) ) )
|
|
870
|
-
|
|
871
|
-
} else if ( value > this.alerts[ sensor ].max ) {
|
|
872
|
-
|
|
873
|
-
console.log( chalk.red( this.messages.alerts[ sensor ].high.replace( '{value}', value.toFixed( 2 ) ) ) )
|
|
874
|
-
|
|
875
|
-
}
|
|
876
|
-
|
|
877
|
-
}
|
|
878
|
-
|
|
879
|
-
}
|
|
880
|
-
|
|
92
|
+
constructor() {
|
|
93
|
+
this.language = 'en';
|
|
94
|
+
this.messages = null;
|
|
95
|
+
this.platform = null;
|
|
96
|
+
this.aiClient = null;
|
|
97
|
+
this.plantName = '';
|
|
98
|
+
this.plantType = null;
|
|
99
|
+
this.plantInfo = null;
|
|
100
|
+
this.alerts = {};
|
|
101
|
+
this.sensors = { humidity: null, light: null, temperature: null };
|
|
102
|
+
this.historicalData = [];
|
|
103
|
+
this.isMonitoring = false;
|
|
104
|
+
this.hibernationMode = false;
|
|
105
|
+
this.hasSensors = false;
|
|
106
|
+
this.serialPort = null;
|
|
107
|
+
this.monitoringInterval = null;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
async init() { this.messages = loadMessages(this.language); }
|
|
111
|
+
|
|
112
|
+
async setLanguage(lang) {
|
|
113
|
+
this.language = lang;
|
|
114
|
+
this.messages = loadMessages(lang);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
welcome() {
|
|
118
|
+
console.log('\n' + chalk.bold(this.messages.general.welcome) + '\n');
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
async start() {
|
|
122
|
+
await this.init();
|
|
123
|
+
this.welcome();
|
|
124
|
+
await this.selectLanguage();
|
|
125
|
+
await this.selectPlatform();
|
|
126
|
+
await this.selectAIMethod();
|
|
127
|
+
console.log();
|
|
128
|
+
await this.selectPlantType();
|
|
129
|
+
await this.setPlantName();
|
|
130
|
+
await this.generatePlantInfo();
|
|
131
|
+
await this.setupSensors();
|
|
132
|
+
this.setupAlerts();
|
|
133
|
+
this.startMonitoring();
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
async selectLanguage() {
|
|
137
|
+
const { language } = await enquirer.prompt({
|
|
138
|
+
type: 'autocomplete',
|
|
139
|
+
name: 'language',
|
|
140
|
+
message: 'Select language:',
|
|
141
|
+
choices: [
|
|
142
|
+
{ name: 'English', value: 'en' }, { name: 'Español', value: 'es' },
|
|
143
|
+
{ name: 'Français', value: 'fr' }, { name: 'Deutsch', value: 'de' },
|
|
144
|
+
{ name: 'Italiano', value: 'it' }, { name: 'Português', value: 'pt' },
|
|
145
|
+
{ name: 'Nederlands', value: 'nl' }, { name: 'Русский', value: 'ru' },
|
|
146
|
+
{ name: '中文', value: 'zh' }, { name: '日本語', value: 'ja' },
|
|
147
|
+
],
|
|
148
|
+
});
|
|
149
|
+
await this.setLanguage(language);
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
async selectPlatform() {
|
|
153
|
+
const { platform } = await enquirer.prompt({
|
|
154
|
+
type: 'select',
|
|
155
|
+
name: 'platform',
|
|
156
|
+
message: this.messages.general.selectPlatform,
|
|
157
|
+
choices: [
|
|
158
|
+
{ name: 'Raspberry Pi', value: 'raspberry' },
|
|
159
|
+
{ name: 'Arduino', value: 'arduino' },
|
|
160
|
+
],
|
|
161
|
+
});
|
|
162
|
+
this.platform = platform;
|
|
163
|
+
await this.setupPlatform();
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
async setupPlatform() {
|
|
167
|
+
if (this.platform === 'raspberry') {
|
|
168
|
+
console.log(chalk.bold('Setting up Raspberry Pi...'));
|
|
169
|
+
this.hasSensors = true;
|
|
170
|
+
} else if (this.platform === 'arduino') {
|
|
171
|
+
console.log(chalk.bold('Setting up Arduino...'));
|
|
172
|
+
this.serialPort = new SerialPort({ path: '/dev/ttyACM0', baudRate: 9600 });
|
|
173
|
+
this.serialPort.pipe(new ReadlineParser({ delimiter: '\r\n' })).on('data', data => this.handleArduinoData(data));
|
|
174
|
+
console.log('Arduino setup complete. Upload arduino_dht22.ino first.');
|
|
175
|
+
this.hasSensors = true;
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
handleArduinoData(data) {
|
|
180
|
+
const [temperature, humidity, light] = data.split(',').map(Number);
|
|
181
|
+
this.sensors.temperature = temperature;
|
|
182
|
+
this.sensors.humidity = humidity;
|
|
183
|
+
this.sensors.light = light;
|
|
184
|
+
this.checkAlerts();
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
async selectAIMethod() {
|
|
188
|
+
const { provider } = await enquirer.prompt({
|
|
189
|
+
type: 'select',
|
|
190
|
+
name: 'provider',
|
|
191
|
+
message: 'Elige proveedor de IA:',
|
|
192
|
+
choices: [
|
|
193
|
+
{ message: 'OpenAI (GPT-4o-mini)', value: 'openai' },
|
|
194
|
+
{ message: 'Grok (xAI)', value: 'grok' },
|
|
195
|
+
{ message: 'Claude (Anthropic)', value: 'claude' },
|
|
196
|
+
{ message: 'Gemini (Google)', value: 'gemini' },
|
|
197
|
+
{ message: 'Local (Ollama)', value: 'local' },
|
|
198
|
+
],
|
|
199
|
+
});
|
|
200
|
+
|
|
201
|
+
let apiKey = null;
|
|
202
|
+
let localModel = null;
|
|
203
|
+
|
|
204
|
+
if (provider !== 'local') {
|
|
205
|
+
const { key } = await enquirer.prompt({
|
|
206
|
+
type: 'password',
|
|
207
|
+
name: 'key',
|
|
208
|
+
message: `Introduce tu API key de ${provider.toUpperCase()}:`,
|
|
209
|
+
});
|
|
210
|
+
apiKey = key;
|
|
211
|
+
} else {
|
|
212
|
+
localModel = await this.selectLocalModel();
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
this.aiClient = new AIService(provider, apiKey, localModel);
|
|
216
|
+
console.log(chalk.green('✅ IA conectada correctamente! 🤖✨'));
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
async selectLocalModel() {
|
|
220
|
+
try {
|
|
221
|
+
const output = execSync('ollama list', { encoding: 'utf-8' });
|
|
222
|
+
const models = output.split('\n').filter(l => l.trim() && !l.startsWith('NAME')).map(l => l.split(' ')[0]);
|
|
223
|
+
if (models.length === 0) throw new Error();
|
|
224
|
+
const { model } = await enquirer.prompt({
|
|
225
|
+
type: 'select',
|
|
226
|
+
name: 'model',
|
|
227
|
+
message: 'Selecciona modelo local:',
|
|
228
|
+
choices: models,
|
|
229
|
+
});
|
|
230
|
+
return model;
|
|
231
|
+
} catch {
|
|
232
|
+
console.log(chalk.yellow('No se encontraron modelos Ollama.'));
|
|
233
|
+
process.exit(0);
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
async selectPlantType() {
|
|
238
|
+
const { type } = await enquirer.prompt({
|
|
239
|
+
type: 'select',
|
|
240
|
+
name: 'type',
|
|
241
|
+
message: this.messages.general.selectPlantType,
|
|
242
|
+
choices: [
|
|
243
|
+
{ name: this.messages.general.indoor, value: 'indoor' },
|
|
244
|
+
{ name: this.messages.general.outdoor, value: 'outdoor' },
|
|
245
|
+
],
|
|
246
|
+
});
|
|
247
|
+
this.plantType = type;
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
async setPlantName() {
|
|
251
|
+
const { name } = await enquirer.prompt({
|
|
252
|
+
type: 'input',
|
|
253
|
+
name: 'name',
|
|
254
|
+
message: this.messages.general.enterPlantName,
|
|
255
|
+
});
|
|
256
|
+
this.plantName = name;
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
async generatePlantInfo() {
|
|
260
|
+
console.log(chalk.bold('🔍🌿 Generando información de la planta...'));
|
|
261
|
+
const prompt = `Provide a comprehensive summary for ${this.plantName} (${this.plantType}) including: Lighting, Watering, Temperature, Humidity, Soil, Fertilization, Pruning, and Propagation. Also give ranges in format: "Lighting: X-Y lux, Temperature: A-B°C, Humidity: C-D%".`;
|
|
262
|
+
const response = await this.aiClient.generateResponse(prompt, this.language);
|
|
263
|
+
if (!response) {
|
|
264
|
+
console.log(chalk.red('❌ No se pudo generar la información de la planta.'));
|
|
265
|
+
return;
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
const lightingMatch = response.match(/Lighting:\s*(\d+)-(\d+)\s*lux/i);
|
|
269
|
+
const tempMatch = response.match(/Temperature:\s*(\d+)-(\d+)\s*°C/i);
|
|
270
|
+
const humMatch = response.match(/Humidity:\s*(\d+)-(\d+)%/i);
|
|
271
|
+
|
|
272
|
+
this.plantInfo = {
|
|
273
|
+
summary: response,
|
|
274
|
+
lighting: lightingMatch ? { min: +lightingMatch[1], max: +lightingMatch[2] } : { min: 50, max: 700 },
|
|
275
|
+
temperature: tempMatch ? { min: +tempMatch[1], max: +tempMatch[2] } : { min: 18, max: 24 },
|
|
276
|
+
humidity: humMatch ? { min: +humMatch[1], max: +humMatch[2] } : { min: 40, max: 60 },
|
|
277
|
+
};
|
|
278
|
+
console.log(chalk.green('✅ Información de planta generada!'));
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
async setupSensors() {
|
|
282
|
+
console.log(chalk.bold(this.messages.general.settingUpSensors));
|
|
283
|
+
if (!this.hasSensors) console.log(chalk.yellow('Modo simulación activado.'));
|
|
284
|
+
console.log(chalk.bold(this.messages.general.sensorsReady));
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
setupAlerts() {
|
|
288
|
+
if (!this.plantInfo) return;
|
|
289
|
+
this.alerts = {
|
|
290
|
+
humidity: { min: this.plantInfo.humidity.min, max: this.plantInfo.humidity.max },
|
|
291
|
+
light: { min: this.plantInfo.lighting.min, max: this.plantInfo.lighting.max },
|
|
292
|
+
temperature: { min: this.plantInfo.temperature.min, max: this.plantInfo.temperature.max },
|
|
293
|
+
};
|
|
294
|
+
if (this.messages.general.alertsSet) console.log(this.messages.general.alertsSet);
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
calculatePercentage(v, min, max) {
|
|
298
|
+
if (!v || isNaN(v)) return 0;
|
|
299
|
+
return Math.min(Math.max(((v - min) / (max - min)) * 100, 0), 100);
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
getHumidityEmoji(p) { if (p <= 0) return '🍂'; if (p <= 30) return '🍂'; if (p <= 60) return '🌿'; if (p <= 80) return '💧'; return '🌊'; }
|
|
303
|
+
getLightEmoji(p) { if (p <= 0) return '🌑'; if (p <= 30) return '🌑'; if (p <= 60) return '🌥'; return '🌞'; }
|
|
304
|
+
getTemperatureEmoji(p) { if (p <= 0) return '🧊'; if (p <= 30) return '🧊'; if (p <= 80) return '🌡️'; return '🔥'; }
|
|
305
|
+
getHappinessEmoji(avg) {
|
|
306
|
+
if (avg <= 0) return '😵';
|
|
307
|
+
if (avg >= 90) return '🤩';
|
|
308
|
+
if (avg >= 75) return '😊';
|
|
309
|
+
if (avg >= 60) return '😐';
|
|
310
|
+
if (avg >= 45) return '😞';
|
|
311
|
+
if (avg >= 30) return '😖';
|
|
312
|
+
return '🥵';
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
logPlantStatus() {
|
|
316
|
+
const allSensorsZero = Object.values(this.sensors).every(value => value === 0 || value === null || value === undefined);
|
|
317
|
+
if (!this.hasSensors || allSensorsZero) {
|
|
318
|
+
console.log(chalk.yellow('🔔 Warning: No data input or sensors are disconnected.'));
|
|
319
|
+
console.log('😵 | Lighting: 🌑 (0%) | Temperature: 🧊 (0.0°C) | Humidity: 🍂 (0%)');
|
|
320
|
+
return;
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
const humidityPercentage = this.calculatePercentage(this.sensors.humidity, this.alerts.humidity.min, this.alerts.humidity.max);
|
|
324
|
+
const lightPercentage = this.calculatePercentage(this.sensors.light, this.alerts.light.min, this.alerts.light.max);
|
|
325
|
+
const temperaturePercentage = this.calculatePercentage(this.sensors.temperature, this.alerts.temperature.min, this.alerts.temperature.max);
|
|
326
|
+
const averagePercentage = (humidityPercentage + lightPercentage + temperaturePercentage) / 3;
|
|
327
|
+
|
|
328
|
+
const happinessEmoji = this.getHappinessEmoji(averagePercentage);
|
|
329
|
+
const humidityEmoji = this.getHumidityEmoji(humidityPercentage);
|
|
330
|
+
const lightEmoji = this.getLightEmoji(lightPercentage);
|
|
331
|
+
const temperatureEmoji = this.getTemperatureEmoji(temperaturePercentage);
|
|
332
|
+
|
|
333
|
+
const status = `${happinessEmoji} | Lighting: ${lightEmoji} (${lightPercentage.toFixed(0)}%) | Temperature: ${temperatureEmoji} (${this.sensors.temperature?.toFixed(1) || 0.0}°C) | Humidity: ${humidityEmoji} (${humidityPercentage.toFixed(0)}%)`;
|
|
334
|
+
console.log(status);
|
|
335
|
+
|
|
336
|
+
this.historicalData.push({ timestamp: new Date(), humidity: this.sensors.humidity, light: this.sensors.light, temperature: this.sensors.temperature });
|
|
337
|
+
this.predictCriticalState();
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
predictCriticalState() {
|
|
341
|
+
if (this.historicalData.length < 10) return;
|
|
342
|
+
const recentData = this.historicalData.slice(-10);
|
|
343
|
+
const trends = {
|
|
344
|
+
humidity: this.calculateTrend(recentData.map(d => d.humidity)),
|
|
345
|
+
light: this.calculateTrend(recentData.map(d => d.light)),
|
|
346
|
+
temperature: this.calculateTrend(recentData.map(d => d.temperature)),
|
|
347
|
+
};
|
|
348
|
+
for (const [sensor, trend] of Object.entries(trends)) {
|
|
349
|
+
if (Math.abs(trend) > 0.5) {
|
|
350
|
+
const direction = trend > 0 ? 'increasing' : 'decreasing';
|
|
351
|
+
console.log(chalk.yellow.bold(`🔔 Warning: ${sensor} is ${direction} rapidly. Consider taking action.`));
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
calculateTrend(data) {
|
|
357
|
+
const n = data.length;
|
|
358
|
+
const sum_x = n * (n + 1) / 2;
|
|
359
|
+
const sum_y = data.reduce((a, b) => a + b, 0);
|
|
360
|
+
const sum_xy = data.reduce((sum, y, i) => sum + y * (i + 1), 0);
|
|
361
|
+
const sum_xx = n * (n + 1) * (2 * n + 1) / 6;
|
|
362
|
+
return (n * sum_xy - sum_x * sum_y) / (n * sum_xx - sum_x * sum_x);
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
startMonitoring() {
|
|
366
|
+
console.log(this.messages.general.startMonitoring || 'Monitoreo iniciado...');
|
|
367
|
+
this.isMonitoring = true;
|
|
368
|
+
this.monitoringInterval = setInterval(() => {
|
|
369
|
+
if (!this.hibernationMode) this.logPlantStatus();
|
|
370
|
+
}, 60000);
|
|
371
|
+
|
|
372
|
+
process.stdin.setRawMode(true);
|
|
373
|
+
process.stdin.resume();
|
|
374
|
+
process.stdin.setEncoding('utf8');
|
|
375
|
+
process.stdin.on('data', key => {
|
|
376
|
+
if (key === '\u0003') process.exit();
|
|
377
|
+
if (key === '\u000F') this.pauseMonitoring();
|
|
378
|
+
});
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
pauseMonitoring() {
|
|
382
|
+
clearInterval(this.monitoringInterval);
|
|
383
|
+
this.isMonitoring = false;
|
|
384
|
+
this.displaySensorSettingsMenu();
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
async displaySensorSettingsMenu() {
|
|
388
|
+
const choices = ['Ajustar configuración de sensores', 'Activar/Desactivar modo de hibernación', 'Volver a iniciar monitoreo', 'Salir'];
|
|
389
|
+
const { option } = await enquirer.prompt({ type: 'select', name: 'option', message: 'Monitoreo detenido. ¿Qué deseas hacer?', choices });
|
|
390
|
+
if (option === choices[0]) await this.adjustSensorSettings();
|
|
391
|
+
else if (option === choices[1]) await this.toggleHibernation();
|
|
392
|
+
else if (option === choices[2]) this.startMonitoring();
|
|
393
|
+
else if (option === choices[3]) process.exit();
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
async adjustSensorSettings() {
|
|
397
|
+
const sensorSettings = await enquirer.prompt([
|
|
398
|
+
{ type: 'input', name: 'humidity', message: `Humedad ideal (actual: ${this.plantInfo.humidity.min}-${this.plantInfo.humidity.max}%):`, default: `${this.plantInfo.humidity.min}-${this.plantInfo.humidity.max}` },
|
|
399
|
+
{ type: 'input', name: 'temperature', message: `Temperatura ideal (actual: ${this.plantInfo.temperature.min}-${this.plantInfo.temperature.max}°C):`, default: `${this.plantInfo.temperature.min}-${this.plantInfo.temperature.max}` },
|
|
400
|
+
{ type: 'input', name: 'light', message: `Luz ideal (actual: ${this.plantInfo.lighting.min}-${this.plantInfo.lighting.max} lux):`, default: `${this.plantInfo.lighting.min}-${this.plantInfo.lighting.max}` },
|
|
401
|
+
]);
|
|
402
|
+
this.plantInfo.humidity = this.parseRange(sensorSettings.humidity);
|
|
403
|
+
this.plantInfo.temperature = this.parseRange(sensorSettings.temperature);
|
|
404
|
+
this.plantInfo.lighting = this.parseRange(sensorSettings.light);
|
|
405
|
+
this.setupAlerts();
|
|
406
|
+
console.log(`Nuevos valores ajustados:\nHumedad: ${this.plantInfo.humidity.min}-${this.plantInfo.humidity.max}%\nTemperatura: ${this.plantInfo.temperature.min}-${this.plantInfo.temperature.max}°C\nLuz: ${this.plantInfo.lighting.min}-${this.plantInfo.lighting.max} lux`);
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
parseRange(rangeString) {
|
|
410
|
+
const [min, max] = rangeString.split('-').map(Number);
|
|
411
|
+
return { min, max };
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
async toggleHibernation() {
|
|
415
|
+
const { hibernation } = await enquirer.prompt({
|
|
416
|
+
type: 'confirm',
|
|
417
|
+
name: 'hibernation',
|
|
418
|
+
message: '¿Activar modo de hibernación?',
|
|
419
|
+
default: false,
|
|
420
|
+
});
|
|
421
|
+
this.hibernationMode = hibernation;
|
|
422
|
+
console.log(`Modo de hibernación ${this.hibernationMode ? 'activado' : 'desactivado'}.`);
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
checkAlerts() {
|
|
426
|
+
if (!this.hasSensors) return;
|
|
427
|
+
for (const [sensor, value] of Object.entries(this.sensors)) {
|
|
428
|
+
if (value === null || value === undefined || isNaN(value)) continue;
|
|
429
|
+
if (value < this.alerts[sensor].min) console.log(chalk.red(this.messages.alerts[sensor].low.replace('{value}', value.toFixed(2))));
|
|
430
|
+
else if (value > this.alerts[sensor].max) console.log(chalk.red(this.messages.alerts[sensor].high.replace('{value}', value.toFixed(2))));
|
|
431
|
+
}
|
|
432
|
+
}
|
|
881
433
|
}
|
|
434
|
+
|
|
435
|
+
const plant = new SmartPlant();
|
|
436
|
+
plant.start().catch(err => console.error(chalk.red('❌ Error al iniciar:'), err.message));
|