agentic-flow 1.1.2 → 1.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/dist/agents/directApiAgent.js +108 -22
  2. package/dist/cli-proxy.js +30 -5
  3. package/dist/router/providers/gemini.js +34 -58
  4. package/dist/router/router.js +55 -9
  5. package/dist/utils/cli.js +4 -0
  6. package/dist/utils/logger.js +4 -0
  7. package/docs/PACKAGE_STRUCTURE.md +199 -0
  8. package/package.json +1 -1
  9. package/.claude/commands/coordination/README.md +0 -9
  10. package/.claude/commands/coordination/agent-spawn.md +0 -25
  11. package/.claude/commands/coordination/init.md +0 -44
  12. package/.claude/commands/coordination/orchestrate.md +0 -43
  13. package/.claude/commands/coordination/spawn.md +0 -45
  14. package/.claude/commands/coordination/swarm-init.md +0 -85
  15. package/.claude/commands/coordination/task-orchestrate.md +0 -25
  16. package/.claude/commands/memory/README.md +0 -9
  17. package/.claude/commands/memory/memory-persist.md +0 -25
  18. package/.claude/commands/memory/memory-search.md +0 -25
  19. package/.claude/commands/memory/memory-usage.md +0 -25
  20. package/.claude/commands/memory/neural.md +0 -47
  21. package/.claude/commands/memory/usage.md +0 -46
  22. package/dist/cli.js +0 -158
  23. package/dist/coordination/parallelSwarm.js +0 -226
  24. package/dist/index-with-proxy.js +0 -101
  25. package/dist/mcp/fastmcp/tools/memory/retrieve.js +0 -38
  26. package/dist/mcp/fastmcp/tools/memory/search.js +0 -41
  27. package/dist/mcp/fastmcp/tools/memory/store.js +0 -56
  28. package/docs/.claude-flow/metrics/agent-metrics.json +0 -1
  29. package/docs/.claude-flow/metrics/performance.json +0 -9
  30. package/docs/.claude-flow/metrics/task-metrics.json +0 -10
@@ -1,10 +1,12 @@
1
- // Direct Anthropic API agent with in-process tool execution (no subprocess)
1
+ // Direct API agent with multi-provider support (Anthropic, OpenRouter, Gemini)
2
2
  import Anthropic from '@anthropic-ai/sdk';
3
3
  import { logger } from '../utils/logger.js';
4
4
  import { withRetry } from '../utils/retry.js';
5
5
  import { execSync } from 'child_process';
6
- // Lazy initialize Anthropic client to allow runtime API key validation
6
+ import { ModelRouter } from '../router/router.js';
7
+ // Lazy initialize clients
7
8
  let anthropic = null;
9
+ let router = null;
8
10
  function getAnthropicClient() {
9
11
  if (!anthropic) {
10
12
  const apiKey = process.env.ANTHROPIC_API_KEY;
@@ -21,6 +23,26 @@ function getAnthropicClient() {
21
23
  }
22
24
  return anthropic;
23
25
  }
26
+ function getRouter() {
27
+ if (!router) {
28
+ // Router will now auto-create config from environment variables if no file exists
29
+ router = new ModelRouter();
30
+ }
31
+ return router;
32
+ }
33
+ function getCurrentProvider() {
34
+ // Determine provider from environment
35
+ if (process.env.PROVIDER === 'gemini' || process.env.USE_GEMINI === 'true') {
36
+ return 'gemini';
37
+ }
38
+ if (process.env.PROVIDER === 'openrouter' || process.env.USE_OPENROUTER === 'true') {
39
+ return 'openrouter';
40
+ }
41
+ if (process.env.PROVIDER === 'onnx' || process.env.USE_ONNX === 'true') {
42
+ return 'onnx';
43
+ }
44
+ return 'anthropic';
45
+ }
24
46
  // Define claude-flow tools as native Anthropic tool definitions
25
47
  const claudeFlowTools = [
26
48
  {
@@ -194,31 +216,95 @@ export async function directApiAgent(agent, input, onStream) {
194
216
  // Agentic loop: keep calling API until no more tool uses
195
217
  while (toolUseCount < maxToolUses) {
196
218
  logger.debug('API call iteration', { toolUseCount, messagesLength: messages.length });
197
- const client = getAnthropicClient();
219
+ const provider = getCurrentProvider();
198
220
  let response;
199
221
  try {
200
- response = await client.messages.create({
201
- model: 'claude-sonnet-4-5-20250929',
202
- max_tokens: 8192,
203
- system: agent.systemPrompt || 'You are a helpful AI assistant.',
204
- messages,
205
- tools: claudeFlowTools
206
- });
222
+ // Use router for non-Anthropic providers
223
+ if (provider === 'gemini' || provider === 'openrouter') {
224
+ const routerInstance = getRouter();
225
+ // Convert Anthropic messages format to router format
226
+ const routerMessages = messages.map(msg => ({
227
+ role: msg.role,
228
+ content: typeof msg.content === 'string' ? msg.content : msg.content.map((block) => {
229
+ if ('text' in block)
230
+ return { type: 'text', text: block.text };
231
+ if ('tool_use_id' in block)
232
+ return {
233
+ type: 'tool_result',
234
+ content: block.content
235
+ };
236
+ if ('name' in block && 'input' in block)
237
+ return {
238
+ type: 'tool_use',
239
+ id: block.id,
240
+ name: block.name,
241
+ input: block.input
242
+ };
243
+ return { type: 'text', text: '' };
244
+ }).filter((b) => b.type === 'text' || b.type === 'tool_use' || b.type === 'tool_result')
245
+ }));
246
+ // Add system prompt as first message if needed
247
+ const messagesWithSystem = agent.systemPrompt
248
+ ? [{ role: 'system', content: agent.systemPrompt }, ...routerMessages]
249
+ : routerMessages;
250
+ const params = {
251
+ model: provider === 'gemini'
252
+ ? (process.env.COMPLETION_MODEL || 'gemini-2.0-flash-exp')
253
+ : (process.env.COMPLETION_MODEL || 'meta-llama/llama-3.1-8b-instruct'),
254
+ messages: messagesWithSystem,
255
+ maxTokens: 8192,
256
+ temperature: 0.7
257
+ };
258
+ const routerResponse = await routerInstance.chat(params);
259
+ // Convert router response to Anthropic format
260
+ response = {
261
+ id: routerResponse.id,
262
+ model: routerResponse.model,
263
+ stop_reason: routerResponse.stopReason,
264
+ content: routerResponse.content.map(block => {
265
+ if (block.type === 'text')
266
+ return { type: 'text', text: block.text || '' };
267
+ if (block.type === 'tool_use')
268
+ return {
269
+ type: 'tool_use',
270
+ id: block.id || '',
271
+ name: block.name || '',
272
+ input: block.input || {}
273
+ };
274
+ return { type: 'text', text: '' };
275
+ })
276
+ };
277
+ }
278
+ else {
279
+ // Use Anthropic client for Anthropic provider
280
+ const client = getAnthropicClient();
281
+ response = await client.messages.create({
282
+ model: process.env.COMPLETION_MODEL || 'claude-sonnet-4-5-20250929',
283
+ max_tokens: 8192,
284
+ system: agent.systemPrompt || 'You are a helpful AI assistant.',
285
+ messages,
286
+ tools: claudeFlowTools
287
+ });
288
+ }
207
289
  }
208
290
  catch (error) {
209
291
  // Enhance authentication errors with helpful guidance
210
- if (error?.status === 401) {
211
- const apiKey = process.env.ANTHROPIC_API_KEY;
212
- throw new Error(`❌ Anthropic API authentication failed (401)\n\n` +
292
+ if (error?.status === 401 || error?.statusCode === 401) {
293
+ const providerName = provider === 'gemini' ? 'Google Gemini' : provider === 'openrouter' ? 'OpenRouter' : 'Anthropic';
294
+ const apiKey = provider === 'gemini'
295
+ ? process.env.GOOGLE_GEMINI_API_KEY
296
+ : provider === 'openrouter'
297
+ ? process.env.OPENROUTER_API_KEY
298
+ : process.env.ANTHROPIC_API_KEY;
299
+ throw new Error(`❌ ${providerName} API authentication failed (401)\n\n` +
213
300
  `Your API key is invalid, expired, or lacks permissions.\n` +
214
301
  `Current key: ${apiKey?.substring(0, 15)}...\n\n` +
215
- `Please:\n` +
216
- ` 1. Check your key at: https://console.anthropic.com/settings/keys\n` +
217
- ` 2. Verify it's not expired\n` +
218
- ` 3. Ensure it has proper permissions\n` +
219
- ` 4. Update your .env file with: ANTHROPIC_API_KEY=sk-ant-...\n\n` +
220
- `Alternative: Use OpenRouter instead (--model "meta-llama/llama-3.1-8b-instruct")\n` +
221
- `Or use local ONNX (--provider onnx)`);
302
+ `Please check your ${providerName} API key and update your .env file.\n\n` +
303
+ `Alternative providers:\n` +
304
+ ` --provider anthropic (Claude models)\n` +
305
+ ` --provider openrouter (100+ models, 99% cost savings)\n` +
306
+ ` --provider gemini (Google models)\n` +
307
+ ` --provider onnx (free local inference)`);
222
308
  }
223
309
  throw error;
224
310
  }
@@ -272,9 +358,9 @@ export async function directApiAgent(agent, input, onStream) {
272
358
  });
273
359
  }
274
360
  // Stop if no tool use or end_turn
275
- if (response.stop_reason === 'end_turn' || response.content.every(b => b.type === 'text')) {
361
+ if (response.stop_reason === 'end_turn' || response.content.every((b) => b.type === 'text')) {
276
362
  // Add final assistant message if it has text
277
- const textContent = response.content.filter(b => b.type === 'text');
363
+ const textContent = response.content.filter((b) => b.type === 'text');
278
364
  if (textContent.length > 0 && messages[messages.length - 1].role !== 'assistant') {
279
365
  messages.push({
280
366
  role: 'assistant',
package/dist/cli-proxy.js CHANGED
@@ -3,7 +3,27 @@
3
3
  * Agentic Flow - Standalone CLI with integrated OpenRouter proxy
4
4
  * Usage: npx agentic-flow-proxy --agent coder --task "Create code" --openrouter
5
5
  */
6
- import "dotenv/config";
6
+ import dotenv from "dotenv";
7
+ import { existsSync, readFileSync } from 'fs';
8
+ import { resolve as pathResolve, dirname } from 'path';
9
+ import { fileURLToPath } from 'url';
10
+ // Load .env from current directory, or search up the directory tree
11
+ function loadEnvRecursive(startPath = process.cwd()) {
12
+ let currentPath = startPath;
13
+ const root = pathResolve('/');
14
+ while (currentPath !== root) {
15
+ const envPath = pathResolve(currentPath, '.env');
16
+ if (existsSync(envPath)) {
17
+ dotenv.config({ path: envPath });
18
+ return true;
19
+ }
20
+ currentPath = pathResolve(currentPath, '..');
21
+ }
22
+ // Fallback to default behavior
23
+ dotenv.config();
24
+ return false;
25
+ }
26
+ loadEnvRecursive();
7
27
  import { AnthropicToOpenRouterProxy } from "./proxy/anthropic-to-openrouter.js";
8
28
  import { logger } from "./utils/logger.js";
9
29
  import { parseArgs } from "./utils/cli.js";
@@ -12,18 +32,19 @@ import { directApiAgent } from "./agents/directApiAgent.js";
12
32
  import { handleConfigCommand } from "./cli/config-wizard.js";
13
33
  import { handleAgentCommand } from "./cli/agent-manager.js";
14
34
  import { ModelOptimizer } from "./utils/modelOptimizer.js";
15
- import { readFileSync } from 'fs';
16
- import { resolve, dirname } from 'path';
17
- import { fileURLToPath } from 'url';
18
35
  const __filename = fileURLToPath(import.meta.url);
19
36
  const __dirname = dirname(__filename);
20
- const packageJson = JSON.parse(readFileSync(resolve(__dirname, '../package.json'), 'utf-8'));
37
+ const packageJson = JSON.parse(readFileSync(pathResolve(__dirname, '../package.json'), 'utf-8'));
21
38
  const VERSION = packageJson.version;
22
39
  class AgenticFlowCLI {
23
40
  proxyServer = null;
24
41
  proxyPort = 3000;
25
42
  async start() {
26
43
  const options = parseArgs();
44
+ if (options.version) {
45
+ console.log(`agentic-flow v${VERSION}`);
46
+ process.exit(0);
47
+ }
27
48
  if (options.help) {
28
49
  this.printHelp();
29
50
  process.exit(0);
@@ -193,6 +214,10 @@ class AgenticFlowCLI {
193
214
  this.printHelp();
194
215
  process.exit(1);
195
216
  }
217
+ // Set PROVIDER environment variable if --provider flag is used
218
+ if (options.provider) {
219
+ process.env.PROVIDER = options.provider;
220
+ }
196
221
  // Check for API key (unless using ONNX)
197
222
  const isOnnx = options.provider === 'onnx' || process.env.USE_ONNX === 'true' || process.env.PROVIDER === 'onnx';
198
223
  if (!isOnnx && !useOpenRouter && !useGemini && !process.env.ANTHROPIC_API_KEY) {
@@ -4,18 +4,16 @@ export class GeminiProvider {
4
4
  name = 'gemini';
5
5
  type = 'gemini';
6
6
  supportsStreaming = true;
7
- supportsTools = false; // Will add function calling support later
7
+ supportsTools = false;
8
8
  supportsMCP = false;
9
- client; // GoogleGenAI instance
9
+ client;
10
10
  config;
11
11
  constructor(config) {
12
12
  this.config = config;
13
13
  if (!config.apiKey) {
14
14
  throw new Error('Google Gemini API key is required');
15
15
  }
16
- this.client = new GoogleGenAI({
17
- apiKey: config.apiKey
18
- });
16
+ this.client = new GoogleGenAI({ apiKey: config.apiKey });
19
17
  }
20
18
  validateCapabilities(features) {
21
19
  const supported = ['chat', 'streaming'];
@@ -23,37 +21,33 @@ export class GeminiProvider {
23
21
  }
24
22
  async chat(params) {
25
23
  try {
26
- const startTime = Date.now();
27
- // Convert messages format
28
- const contents = this.convertMessages(params.messages);
24
+ // Convert messages to Gemini format
25
+ const contents = params.messages.map(msg => ({
26
+ role: msg.role === 'assistant' ? 'model' : 'user',
27
+ parts: [{ text: typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content) }]
28
+ }));
29
29
  const response = await this.client.models.generateContent({
30
30
  model: params.model || 'gemini-2.0-flash-exp',
31
31
  contents,
32
- generationConfig: {
32
+ config: {
33
33
  temperature: params.temperature,
34
- maxOutputTokens: params.maxTokens || 4096
34
+ maxOutputTokens: params.maxTokens || 8192
35
35
  }
36
36
  });
37
- const latency = Date.now() - startTime;
38
- // Extract text from response
39
37
  const text = response.text || '';
40
- const usage = {
41
- inputTokens: response.usageMetadata?.promptTokenCount || 0,
42
- outputTokens: response.usageMetadata?.candidatesTokenCount || 0
43
- };
44
38
  return {
45
- id: `gemini-${Date.now()}`,
39
+ id: crypto.randomUUID(),
46
40
  model: params.model || 'gemini-2.0-flash-exp',
47
- content: [{
48
- type: 'text',
49
- text
50
- }],
51
- stopReason: response.candidates?.[0]?.finishReason === 'STOP' ? 'end_turn' : 'max_tokens',
52
- usage,
41
+ content: [{ type: 'text', text }],
42
+ stopReason: 'end_turn',
43
+ usage: {
44
+ inputTokens: response.usageMetadata?.promptTokenCount || 0,
45
+ outputTokens: response.usageMetadata?.candidatesTokenCount || 0
46
+ },
53
47
  metadata: {
54
48
  provider: 'gemini',
55
- cost: this.calculateCost(usage),
56
- latency
49
+ cost: this.calculateCost(response.usageMetadata || {}),
50
+ latency: 0
57
51
  }
58
52
  };
59
53
  }
@@ -63,25 +57,24 @@ export class GeminiProvider {
63
57
  }
64
58
  async *stream(params) {
65
59
  try {
66
- // Convert messages format
67
- const contents = this.convertMessages(params.messages);
68
- const response = await this.client.models.generateContentStream({
60
+ const contents = params.messages.map(msg => ({
61
+ role: msg.role === 'assistant' ? 'model' : 'user',
62
+ parts: [{ text: typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content) }]
63
+ }));
64
+ const stream = await this.client.models.generateContentStream({
69
65
  model: params.model || 'gemini-2.0-flash-exp',
70
66
  contents,
71
- generationConfig: {
67
+ config: {
72
68
  temperature: params.temperature,
73
- maxOutputTokens: params.maxTokens || 4096
69
+ maxOutputTokens: params.maxTokens || 8192
74
70
  }
75
71
  });
76
- for await (const chunk of response) {
72
+ for await (const chunk of stream) {
77
73
  const text = chunk.text || '';
78
74
  if (text) {
79
75
  yield {
80
76
  type: 'content_block_delta',
81
- delta: {
82
- type: 'text_delta',
83
- text
84
- }
77
+ delta: { type: 'text_delta', text }
85
78
  };
86
79
  }
87
80
  }
@@ -90,30 +83,13 @@ export class GeminiProvider {
90
83
  throw this.handleError(error);
91
84
  }
92
85
  }
93
- convertMessages(messages) {
94
- // Gemini expects a single prompt string for simple use cases
95
- // For more complex scenarios, we'd use the chat history format
96
- return messages
97
- .map(msg => {
98
- if (typeof msg.content === 'string') {
99
- return `${msg.role === 'user' ? 'User' : 'Assistant'}: ${msg.content}`;
100
- }
101
- else if (Array.isArray(msg.content)) {
102
- const texts = msg.content
103
- .filter((block) => block.type === 'text')
104
- .map((block) => block.text)
105
- .join('\n');
106
- return `${msg.role === 'user' ? 'User' : 'Assistant'}: ${texts}`;
107
- }
108
- return '';
109
- })
110
- .filter(Boolean)
111
- .join('\n\n');
112
- }
113
86
  calculateCost(usage) {
114
- // Gemini 2.0 Flash pricing: Free up to rate limits, then ~$0.075/MTok input, $0.30/MTok output
115
- const inputCost = (usage.inputTokens / 1_000_000) * 0.075;
116
- const outputCost = (usage.outputTokens / 1_000_000) * 0.30;
87
+ // Gemini pricing varies by model
88
+ const inputTokens = usage.promptTokenCount || 0;
89
+ const outputTokens = usage.candidatesTokenCount || 0;
90
+ // Flash pricing: Free tier or low cost
91
+ const inputCost = (inputTokens / 1_000_000) * 0.075;
92
+ const outputCost = (outputTokens / 1_000_000) * 0.3;
117
93
  return inputCost + outputCost;
118
94
  }
119
95
  handleError(error) {
@@ -21,6 +21,7 @@ export class ModelRouter {
21
21
  process.env.AGENTIC_FLOW_ROUTER_CONFIG,
22
22
  join(homedir(), '.agentic-flow', 'router.config.json'),
23
23
  join(process.cwd(), 'router.config.json'),
24
+ join(process.cwd(), 'config', 'router.config.json'),
24
25
  join(process.cwd(), 'router.config.example.json')
25
26
  ].filter(Boolean);
26
27
  for (const path of paths) {
@@ -31,7 +32,43 @@ export class ModelRouter {
31
32
  return this.substituteEnvVars(config);
32
33
  }
33
34
  }
34
- throw new Error('No router configuration file found');
35
+ // If no config file found, create config from environment variables
36
+ return this.createConfigFromEnv();
37
+ }
38
+ createConfigFromEnv() {
39
+ // Create minimal config from environment variables
40
+ const config = {
41
+ version: '1.0',
42
+ defaultProvider: process.env.PROVIDER || 'anthropic',
43
+ routing: { mode: 'manual' },
44
+ providers: {}
45
+ };
46
+ // Add Anthropic if API key exists
47
+ if (process.env.ANTHROPIC_API_KEY) {
48
+ config.providers.anthropic = {
49
+ apiKey: process.env.ANTHROPIC_API_KEY,
50
+ baseUrl: process.env.ANTHROPIC_BASE_URL
51
+ };
52
+ }
53
+ // Add OpenRouter if API key exists
54
+ if (process.env.OPENROUTER_API_KEY) {
55
+ config.providers.openrouter = {
56
+ apiKey: process.env.OPENROUTER_API_KEY,
57
+ baseUrl: process.env.OPENROUTER_BASE_URL
58
+ };
59
+ }
60
+ // Add Gemini if API key exists
61
+ if (process.env.GOOGLE_GEMINI_API_KEY) {
62
+ config.providers.gemini = {
63
+ apiKey: process.env.GOOGLE_GEMINI_API_KEY
64
+ };
65
+ }
66
+ // ONNX is always available (no API key needed)
67
+ config.providers.onnx = {
68
+ modelPath: process.env.ONNX_MODEL_PATH,
69
+ executionProviders: ['cpu']
70
+ };
71
+ return config;
35
72
  }
36
73
  substituteEnvVars(obj) {
37
74
  if (typeof obj === 'string') {
@@ -54,15 +91,18 @@ export class ModelRouter {
54
91
  return obj;
55
92
  }
56
93
  initializeProviders() {
94
+ const verbose = process.env.ROUTER_VERBOSE === 'true';
57
95
  // Initialize Anthropic
58
96
  if (this.config.providers.anthropic) {
59
97
  try {
60
98
  const provider = new AnthropicProvider(this.config.providers.anthropic);
61
99
  this.providers.set('anthropic', provider);
62
- console.log('✅ Anthropic provider initialized');
100
+ if (verbose)
101
+ console.log('✅ Anthropic provider initialized');
63
102
  }
64
103
  catch (error) {
65
- console.error('❌ Failed to initialize Anthropic:', error);
104
+ if (verbose)
105
+ console.error('❌ Failed to initialize Anthropic:', error);
66
106
  }
67
107
  }
68
108
  // Initialize OpenRouter
@@ -70,10 +110,12 @@ export class ModelRouter {
70
110
  try {
71
111
  const provider = new OpenRouterProvider(this.config.providers.openrouter);
72
112
  this.providers.set('openrouter', provider);
73
- console.log('✅ OpenRouter provider initialized');
113
+ if (verbose)
114
+ console.log('✅ OpenRouter provider initialized');
74
115
  }
75
116
  catch (error) {
76
- console.error('❌ Failed to initialize OpenRouter:', error);
117
+ if (verbose)
118
+ console.error('❌ Failed to initialize OpenRouter:', error);
77
119
  }
78
120
  }
79
121
  // Initialize ONNX Local
@@ -86,10 +128,12 @@ export class ModelRouter {
86
128
  temperature: this.config.providers.onnx.temperature || 0.7
87
129
  });
88
130
  this.providers.set('onnx', provider);
89
- console.log('✅ ONNX Local provider initialized');
131
+ if (verbose)
132
+ console.log('✅ ONNX Local provider initialized');
90
133
  }
91
134
  catch (error) {
92
- console.error('❌ Failed to initialize ONNX:', error);
135
+ if (verbose)
136
+ console.error('❌ Failed to initialize ONNX:', error);
93
137
  }
94
138
  }
95
139
  // Initialize Gemini
@@ -97,10 +141,12 @@ export class ModelRouter {
97
141
  try {
98
142
  const provider = new GeminiProvider(this.config.providers.gemini);
99
143
  this.providers.set('gemini', provider);
100
- console.log('✅ Gemini provider initialized');
144
+ if (verbose)
145
+ console.log('✅ Gemini provider initialized');
101
146
  }
102
147
  catch (error) {
103
- console.error('❌ Failed to initialize Gemini:', error);
148
+ if (verbose)
149
+ console.error('❌ Failed to initialize Gemini:', error);
104
150
  }
105
151
  }
106
152
  // TODO: Initialize other providers (OpenAI, Ollama, LiteLLM)
package/dist/utils/cli.js CHANGED
@@ -28,6 +28,10 @@ export function parseArgs() {
28
28
  case '-h':
29
29
  options.help = true;
30
30
  break;
31
+ case '--version':
32
+ case '-v':
33
+ options.version = true;
34
+ break;
31
35
  case '--agent':
32
36
  case '-a':
33
37
  options.mode = 'agent';
@@ -26,6 +26,10 @@ class Logger {
26
26
  }
27
27
  }
28
28
  debug(message, data) {
29
+ // Skip debug logs unless DEBUG or VERBOSE environment variable is set
30
+ if (!process.env.DEBUG && !process.env.VERBOSE) {
31
+ return;
32
+ }
29
33
  this.log('debug', message, data);
30
34
  }
31
35
  info(message, data) {