praisonai 1.0.10 → 1.0.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,6 +9,7 @@ export declare class Agent {
9
9
  private taskAgent;
10
10
  constructor(config: ProxyAgentConfig);
11
11
  execute(input: Task | string): Promise<any>;
12
+ start(prompt: string, previousResult?: string): Promise<string>;
12
13
  chat(prompt: string, previousResult?: string): Promise<string>;
13
14
  }
14
15
  export declare class PraisonAIAgents {
@@ -43,6 +43,22 @@ class Agent {
43
43
  }
44
44
  throw new Error('No agent implementation available');
45
45
  }
46
+ async start(prompt, previousResult) {
47
+ if (this.simpleAgent) {
48
+ return this.simpleAgent.start(prompt, previousResult);
49
+ }
50
+ else if (this.taskAgent) {
51
+ // For task agents, we'll use execute but wrap the prompt in a simple task
52
+ const task = new types_2.Task({
53
+ name: 'Start Task',
54
+ description: prompt,
55
+ expected_output: 'A response to the prompt',
56
+ dependencies: []
57
+ });
58
+ return this.taskAgent.execute(task, [previousResult]);
59
+ }
60
+ throw new Error('No agent implementation available');
61
+ }
46
62
  async chat(prompt, previousResult) {
47
63
  if (this.simpleAgent) {
48
64
  return this.simpleAgent.chat(prompt, previousResult);
@@ -2,17 +2,20 @@ export interface SimpleAgentConfig {
2
2
  instructions: string;
3
3
  name?: string;
4
4
  verbose?: boolean;
5
+ pretty?: boolean;
5
6
  llm?: string;
6
7
  markdown?: boolean;
8
+ stream?: boolean;
7
9
  }
8
10
  export declare class Agent {
9
11
  private instructions;
10
12
  name: string;
11
13
  private verbose;
14
+ private pretty;
12
15
  private llm;
13
16
  private markdown;
17
+ private stream;
14
18
  private llmService;
15
- private result;
16
19
  constructor(config: SimpleAgentConfig);
17
20
  private createSystemPrompt;
18
21
  start(prompt: string, previousResult?: string): Promise<string>;
@@ -24,12 +27,14 @@ export interface PraisonAIAgentsConfig {
24
27
  agents: Agent[];
25
28
  tasks: string[];
26
29
  verbose?: boolean;
30
+ pretty?: boolean;
27
31
  process?: 'sequential' | 'parallel';
28
32
  }
29
33
  export declare class PraisonAIAgents {
30
34
  private agents;
31
35
  private tasks;
32
36
  private verbose;
37
+ private pretty;
33
38
  private process;
34
39
  constructor(config: PraisonAIAgentsConfig);
35
40
  start(): Promise<string[]>;
@@ -2,41 +2,51 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.PraisonAIAgents = exports.Agent = void 0;
4
4
  const openai_1 = require("../llm/openai");
5
+ const logger_1 = require("../utils/logger");
5
6
  class Agent {
6
7
  constructor(config) {
7
- this.result = null;
8
8
  this.instructions = config.instructions;
9
9
  this.name = config.name || `Agent_${Math.random().toString(36).substr(2, 9)}`;
10
- this.verbose = config.verbose || false;
10
+ this.verbose = config.verbose ?? process.env.PRAISON_VERBOSE !== 'false';
11
+ this.pretty = config.pretty ?? process.env.PRAISON_PRETTY === 'true';
11
12
  this.llm = config.llm || 'gpt-4o-mini';
12
- this.markdown = config.markdown || true;
13
+ this.markdown = config.markdown ?? true;
14
+ this.stream = config.stream ?? true;
13
15
  this.llmService = new openai_1.OpenAIService(this.llm);
16
+ // Configure logging
17
+ logger_1.Logger.setVerbose(this.verbose);
18
+ logger_1.Logger.setPretty(this.pretty);
14
19
  }
15
20
  createSystemPrompt() {
16
- return `${this.instructions}
17
- Please provide detailed, accurate, and helpful responses.
18
- Format your response in markdown if appropriate.`;
21
+ let prompt = this.instructions;
22
+ if (this.markdown) {
23
+ prompt += '\nPlease format your response in markdown.';
24
+ }
25
+ return prompt;
19
26
  }
20
27
  async start(prompt, previousResult) {
21
- if (this.verbose) {
22
- console.log(`Agent ${this.name} starting with prompt: ${prompt}`);
23
- }
28
+ await logger_1.Logger.debug(`Agent ${this.name} starting with prompt: ${prompt}`);
24
29
  try {
25
30
  // Replace placeholder with previous result if available
26
- const finalPrompt = previousResult
27
- ? prompt.replace('{previous_result}', previousResult)
28
- : prompt;
29
- if (this.verbose) {
30
- console.log('Generating response (streaming)...');
31
- await this.llmService.streamText(finalPrompt, this.createSystemPrompt(), 0.7, (token) => process.stdout.write(token));
32
- console.log('\n');
31
+ if (previousResult) {
32
+ prompt = prompt.replace('{{previous}}', previousResult);
33
+ }
34
+ let response;
35
+ if (this.stream) {
36
+ let fullResponse = '';
37
+ await this.llmService.streamText(prompt, this.createSystemPrompt(), 0.7, (token) => {
38
+ process.stdout.write(token);
39
+ fullResponse += token;
40
+ });
41
+ response = fullResponse;
33
42
  }
34
- // Get the final response
35
- this.result = await this.llmService.generateText(finalPrompt, this.createSystemPrompt());
36
- return this.result;
43
+ else {
44
+ response = await this.llmService.generateText(prompt, this.createSystemPrompt());
45
+ }
46
+ return response;
37
47
  }
38
48
  catch (error) {
39
- console.error(`Error executing prompt: ${error}`);
49
+ await logger_1.Logger.error('Error in agent execution', error);
40
50
  throw error;
41
51
  }
42
52
  }
@@ -48,7 +58,7 @@ Format your response in markdown if appropriate.`;
48
58
  return this.start(this.instructions, previousResult);
49
59
  }
50
60
  getResult() {
51
- return this.result;
61
+ return null;
52
62
  }
53
63
  }
54
64
  exports.Agent = Agent;
@@ -57,12 +67,14 @@ class PraisonAIAgents {
57
67
  this.agents = config.agents;
58
68
  this.tasks = config.tasks;
59
69
  this.verbose = config.verbose || false;
70
+ this.pretty = config.pretty || false;
60
71
  this.process = config.process || 'sequential';
72
+ // Configure logging
73
+ logger_1.Logger.setVerbose(config.verbose ?? process.env.PRAISON_VERBOSE !== 'false');
74
+ logger_1.Logger.setPretty(config.pretty ?? process.env.PRAISON_PRETTY === 'true');
61
75
  }
62
76
  async start() {
63
- if (this.verbose) {
64
- console.log('Starting PraisonAI Agents execution...');
65
- }
77
+ await logger_1.Logger.debug('Starting PraisonAI Agents execution...');
66
78
  let results;
67
79
  if (this.process === 'parallel') {
68
80
  results = await Promise.all(this.tasks.map((task, index) => this.agents[index].start(task)));
@@ -88,9 +100,7 @@ class PraisonAIAgents {
88
100
  const agent = this.agents[i];
89
101
  const task = this.tasks[i];
90
102
  const previousResult = i > 0 ? results[i - 1] : undefined;
91
- if (this.verbose) {
92
- console.log(`Agent ${agent.name} starting with prompt: ${task}`);
93
- }
103
+ await logger_1.Logger.info(`Agent ${agent.name} starting with prompt: ${task}`);
94
104
  const result = await agent.start(task, previousResult);
95
105
  results.push(result);
96
106
  }
@@ -91,7 +91,7 @@ class PraisonAIAgents {
91
91
  logger_1.Logger.debug('PraisonAIAgents initialized', { config });
92
92
  }
93
93
  async start() {
94
- logger_1.Logger.info('Starting PraisonAI Agents execution...');
94
+ logger_1.Logger.debug('Starting PraisonAI Agents execution...');
95
95
  logger_1.Logger.debug('Starting with process mode:', this.process);
96
96
  let results;
97
97
  switch (this.process) {
@@ -2,14 +2,19 @@ export interface LLMResponse {
2
2
  content: string;
3
3
  role: string;
4
4
  }
5
+ type ChatRole = 'system' | 'user' | 'assistant';
6
+ interface ChatMessage {
7
+ role: ChatRole;
8
+ content: string;
9
+ }
5
10
  export declare class OpenAIService {
6
- private client;
7
11
  private model;
12
+ private client;
8
13
  constructor(model?: string);
14
+ private getClient;
9
15
  generateText(prompt: string, systemPrompt?: string, temperature?: number): Promise<string>;
16
+ generateChat(messages: ChatMessage[], temperature?: number): Promise<LLMResponse>;
10
17
  streamText(prompt: string, systemPrompt: string | undefined, temperature: number | undefined, onToken: (token: string) => void): Promise<void>;
11
- chatCompletion(messages: Array<{
12
- role: 'system' | 'user' | 'assistant';
13
- content: string;
14
- }>, temperature?: number): Promise<LLMResponse>;
18
+ chatCompletion(messages: ChatMessage[], temperature?: number): Promise<LLMResponse>;
15
19
  }
20
+ export {};
@@ -7,80 +7,140 @@ exports.OpenAIService = void 0;
7
7
  const openai_1 = __importDefault(require("openai"));
8
8
  const dotenv_1 = __importDefault(require("dotenv"));
9
9
  const logger_1 = require("../utils/logger");
10
+ // Load environment variables once at the application level
10
11
  dotenv_1.default.config();
11
- class OpenAIService {
12
- constructor(model = 'gpt-4o-mini') {
13
- if (!process.env.OPENAI_API_KEY) {
14
- throw new Error('OPENAI_API_KEY not found in environment variables');
15
- }
16
- this.client = new openai_1.default({
12
+ if (!process.env.OPENAI_API_KEY) {
13
+ throw new Error('OPENAI_API_KEY not found in environment variables');
14
+ }
15
+ // Singleton instance for OpenAI client
16
+ let openAIInstance = null;
17
+ // Get cached OpenAI client instance
18
+ async function getOpenAIClient() {
19
+ if (!openAIInstance) {
20
+ openAIInstance = new openai_1.default({
17
21
  apiKey: process.env.OPENAI_API_KEY
18
22
  });
23
+ await logger_1.Logger.success('OpenAI client initialized');
24
+ }
25
+ return openAIInstance;
26
+ }
27
+ class OpenAIService {
28
+ constructor(model = 'gpt-4o-mini') {
29
+ this.client = null;
19
30
  this.model = model;
20
31
  logger_1.Logger.debug(`OpenAIService initialized with model: ${model}`);
21
32
  }
33
+ // Lazy initialization of client
34
+ async getClient() {
35
+ if (!this.client) {
36
+ this.client = await getOpenAIClient();
37
+ }
38
+ return this.client;
39
+ }
22
40
  async generateText(prompt, systemPrompt = '', temperature = 0.7) {
23
- logger_1.Logger.debug('Generating text with OpenAI', {
24
- model: this.model,
25
- temperature,
26
- systemPrompt,
27
- prompt
28
- });
29
- const completion = await this.client.chat.completions.create({
30
- model: this.model,
31
- messages: [
32
- { role: 'system', content: systemPrompt },
33
- { role: 'user', content: prompt }
34
- ],
35
- temperature,
36
- });
37
- const response = completion.choices[0].message.content || '';
38
- logger_1.Logger.debug('OpenAI response received', { response });
39
- return response;
41
+ await logger_1.Logger.startSpinner('Generating text with OpenAI...');
42
+ const messages = [];
43
+ if (systemPrompt) {
44
+ messages.push({ role: 'system', content: systemPrompt });
45
+ }
46
+ messages.push({ role: 'user', content: prompt });
47
+ try {
48
+ const completion = await this.getClient().then(client => client.chat.completions.create({
49
+ model: this.model,
50
+ temperature,
51
+ messages
52
+ }));
53
+ const response = completion.choices[0]?.message?.content;
54
+ if (!response) {
55
+ throw new Error('No response from OpenAI');
56
+ }
57
+ await logger_1.Logger.stopSpinner(true);
58
+ await logger_1.Logger.section('Generated Response', response);
59
+ return response;
60
+ }
61
+ catch (error) {
62
+ await logger_1.Logger.stopSpinner(false);
63
+ await logger_1.Logger.error('Error generating text', error);
64
+ throw error;
65
+ }
66
+ }
67
+ async generateChat(messages, temperature = 0.7) {
68
+ await logger_1.Logger.startSpinner('Generating chat response...');
69
+ try {
70
+ const completion = await this.getClient().then(client => client.chat.completions.create({
71
+ model: this.model,
72
+ temperature,
73
+ messages
74
+ }));
75
+ const response = completion.choices[0]?.message;
76
+ if (!response) {
77
+ throw new Error('No response from OpenAI');
78
+ }
79
+ await logger_1.Logger.stopSpinner(true);
80
+ const result = {
81
+ content: response.content || '',
82
+ role: response.role
83
+ };
84
+ await logger_1.Logger.section('Chat Response', result.content);
85
+ return result;
86
+ }
87
+ catch (error) {
88
+ await logger_1.Logger.stopSpinner(false);
89
+ await logger_1.Logger.error('Error generating chat response', error);
90
+ throw error;
91
+ }
40
92
  }
41
93
  async streamText(prompt, systemPrompt = '', temperature = 0.7, onToken) {
42
- logger_1.Logger.debug('Streaming text with OpenAI', {
43
- model: this.model,
44
- temperature,
45
- systemPrompt,
46
- prompt
47
- });
48
- const stream = await this.client.chat.completions.create({
94
+ await logger_1.Logger.debug('Starting text stream...', {
49
95
  model: this.model,
50
- messages: [
51
- { role: 'system', content: systemPrompt },
52
- { role: 'user', content: prompt }
53
- ],
54
- temperature,
55
- stream: true,
96
+ temperature
56
97
  });
57
- let fullResponse = '';
58
- for await (const chunk of stream) {
59
- const content = chunk.choices[0]?.delta?.content;
60
- if (content) {
61
- onToken(content);
62
- fullResponse += content;
98
+ const messages = [];
99
+ if (systemPrompt) {
100
+ messages.push({ role: 'system', content: systemPrompt });
101
+ }
102
+ messages.push({ role: 'user', content: prompt });
103
+ try {
104
+ const stream = await this.getClient().then(client => client.chat.completions.create({
105
+ model: this.model,
106
+ temperature,
107
+ messages,
108
+ stream: true,
109
+ }));
110
+ let fullResponse = '';
111
+ for await (const chunk of stream) {
112
+ const token = chunk.choices[0]?.delta?.content || '';
113
+ fullResponse += token;
114
+ onToken(token);
63
115
  }
116
+ await logger_1.Logger.success('Stream completed successfully');
117
+ }
118
+ catch (error) {
119
+ await logger_1.Logger.error('Error in text stream', error);
120
+ throw error;
64
121
  }
65
- logger_1.Logger.debug('OpenAI streaming completed', { fullResponse });
66
122
  }
67
123
  async chatCompletion(messages, temperature = 0.7) {
68
- logger_1.Logger.debug('Chat completion with OpenAI', {
69
- model: this.model,
70
- temperature,
71
- messages
72
- });
73
- const completion = await this.client.chat.completions.create({
74
- model: this.model,
75
- messages,
76
- temperature,
77
- });
78
- const response = {
79
- content: completion.choices[0].message.content || '',
80
- role: completion.choices[0].message.role
81
- };
82
- logger_1.Logger.debug('OpenAI chat completion response received', { response });
83
- return response;
124
+ await logger_1.Logger.startSpinner('Chat completion with OpenAI...');
125
+ try {
126
+ const completion = await this.getClient().then(client => client.chat.completions.create({
127
+ model: this.model,
128
+ temperature,
129
+ messages
130
+ }));
131
+ const response = {
132
+ content: completion.choices[0].message.content || '',
133
+ role: completion.choices[0].message.role
134
+ };
135
+ await logger_1.Logger.stopSpinner(true);
136
+ await logger_1.Logger.section('Chat Completion Response', response.content);
137
+ return response;
138
+ }
139
+ catch (error) {
140
+ await logger_1.Logger.stopSpinner(false);
141
+ await logger_1.Logger.error('Error in chat completion', error);
142
+ throw error;
143
+ }
84
144
  }
85
145
  }
86
146
  exports.OpenAIService = OpenAIService;
@@ -6,10 +6,20 @@ export declare enum LogLevel {
6
6
  }
7
7
  export declare class Logger {
8
8
  private static level;
9
+ private static verbose;
10
+ private static pretty;
9
11
  private static getCircularReplacer;
10
12
  private static formatContext;
11
- static debug(message: string, context?: any): void;
12
- static info(message: string, context?: any): void;
13
- static warn(message: string, context?: any): void;
14
- static error(message: string, context?: any): void;
13
+ static setVerbose(verbose: boolean): void;
14
+ static setPretty(pretty: boolean): void;
15
+ static debug(message: string, context?: any): Promise<void>;
16
+ static info(message: string, context?: any): Promise<void>;
17
+ static warn(message: string, context?: any): Promise<void>;
18
+ static error(message: string, context?: any): Promise<void>;
19
+ static success(message: string, data?: unknown): Promise<void>;
20
+ static startSpinner(text: string): Promise<void>;
21
+ static updateSpinner(text: string): Promise<void>;
22
+ static stopSpinner(success?: boolean): Promise<void>;
23
+ static table(headers: string[], data: (string | number)[][]): Promise<void>;
24
+ static section(title: string, content: string): Promise<void>;
15
25
  }
@@ -1,6 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.Logger = exports.LogLevel = void 0;
4
+ const pretty_logger_1 = require("./pretty-logger");
4
5
  var LogLevel;
5
6
  (function (LogLevel) {
6
7
  LogLevel[LogLevel["DEBUG"] = 0] = "DEBUG";
@@ -12,9 +13,9 @@ class Logger {
12
13
  static getCircularReplacer() {
13
14
  const seen = new WeakSet();
14
15
  return (key, value) => {
15
- if (typeof value === 'object' && value !== null) {
16
+ if (typeof value === "object" && value !== null) {
16
17
  if (seen.has(value)) {
17
- return '[Circular]';
18
+ return "[Circular]";
18
19
  }
19
20
  seen.add(value);
20
21
  }
@@ -26,29 +27,123 @@ class Logger {
26
27
  return JSON.stringify(context, this.getCircularReplacer(), 2);
27
28
  }
28
29
  catch (error) {
29
- return '[Unable to stringify context]';
30
+ return String(context);
30
31
  }
31
32
  }
32
- static debug(message, context) {
33
- if (this.level <= LogLevel.DEBUG) {
34
- console.log(`[DEBUG] ${message}${context ? '\nContext: ' + this.formatContext(context) : ''}`);
33
+ static setVerbose(verbose) {
34
+ this.verbose = verbose;
35
+ }
36
+ static setPretty(pretty) {
37
+ this.pretty = pretty;
38
+ }
39
+ static async debug(message, context) {
40
+ if (this.level <= LogLevel.DEBUG && this.verbose) {
41
+ if (this.pretty) {
42
+ await pretty_logger_1.PrettyLogger.info(message, context);
43
+ }
44
+ else {
45
+ console.log(`[DEBUG] ${message}${context ? '\nContext: ' + this.formatContext(context) : ''}`);
46
+ }
35
47
  }
36
48
  }
37
- static info(message, context) {
38
- if (this.level <= LogLevel.INFO) {
39
- console.log(`[INFO] ${message}${context ? '\nContext: ' + this.formatContext(context) : ''}`);
49
+ static async info(message, context) {
50
+ if (this.level <= LogLevel.INFO && this.verbose) {
51
+ if (this.pretty) {
52
+ await pretty_logger_1.PrettyLogger.info(message, context);
53
+ }
54
+ else {
55
+ console.log(`[INFO] ${message}${context ? '\nContext: ' + this.formatContext(context) : ''}`);
56
+ }
40
57
  }
41
58
  }
42
- static warn(message, context) {
43
- if (this.level <= LogLevel.WARN) {
44
- console.warn(`[WARN] ${message}${context ? '\nContext: ' + this.formatContext(context) : ''}`);
59
+ static async warn(message, context) {
60
+ if (this.level <= LogLevel.WARN && this.verbose) {
61
+ if (this.pretty) {
62
+ await pretty_logger_1.PrettyLogger.warning(message, context);
63
+ }
64
+ else {
65
+ console.warn(`[WARN] ${message}${context ? '\nContext: ' + this.formatContext(context) : ''}`);
66
+ }
45
67
  }
46
68
  }
47
- static error(message, context) {
69
+ static async error(message, context) {
48
70
  if (this.level <= LogLevel.ERROR) {
49
- console.error(`[ERROR] ${message}${context ? '\nContext: ' + this.formatContext(context) : ''}`);
71
+ if (this.pretty) {
72
+ await pretty_logger_1.PrettyLogger.error(message, context);
73
+ }
74
+ else {
75
+ console.error(`[ERROR] ${message}${context ? '\nContext: ' + this.formatContext(context) : ''}`);
76
+ }
77
+ }
78
+ }
79
+ static async success(message, data) {
80
+ if (!this.verbose)
81
+ return;
82
+ if (this.pretty) {
83
+ await pretty_logger_1.PrettyLogger.success(message, data);
84
+ }
85
+ else {
86
+ console.log(`✓ ${message}`);
87
+ if (data) {
88
+ console.log(data);
89
+ }
90
+ }
91
+ }
92
+ static async startSpinner(text) {
93
+ if (!this.verbose)
94
+ return;
95
+ if (this.pretty) {
96
+ await pretty_logger_1.PrettyLogger.startSpinner(text);
97
+ }
98
+ else {
99
+ console.log(`⟳ ${text}`);
100
+ }
101
+ }
102
+ static async updateSpinner(text) {
103
+ if (!this.verbose)
104
+ return;
105
+ if (this.pretty) {
106
+ await pretty_logger_1.PrettyLogger.updateSpinner(text);
107
+ }
108
+ else {
109
+ console.log(`⟳ ${text}`);
110
+ }
111
+ }
112
+ static async stopSpinner(success = true) {
113
+ if (!this.verbose)
114
+ return;
115
+ if (this.pretty) {
116
+ await pretty_logger_1.PrettyLogger.stopSpinner(success);
117
+ }
118
+ else {
119
+ // Already logged in startSpinner
120
+ }
121
+ }
122
+ static async table(headers, data) {
123
+ if (!this.verbose)
124
+ return;
125
+ if (this.pretty) {
126
+ await pretty_logger_1.PrettyLogger.table(headers, data);
127
+ }
128
+ else {
129
+ console.log(headers.join('\t'));
130
+ data.forEach(row => console.log(row.join('\t')));
131
+ }
132
+ }
133
+ static async section(title, content) {
134
+ if (!this.verbose)
135
+ return;
136
+ if (this.pretty) {
137
+ await pretty_logger_1.PrettyLogger.section(title, content);
138
+ }
139
+ else {
140
+ console.log(`\n=== ${title} ===`);
141
+ console.log(content);
142
+ console.log('='.repeat(title.length + 8));
50
143
  }
51
144
  }
52
145
  }
53
146
  exports.Logger = Logger;
54
147
  Logger.level = process.env.LOGLEVEL === 'debug' ? LogLevel.DEBUG : LogLevel.INFO;
148
+ Logger.verbose = true;
149
+ Logger.pretty = false;
@@ -0,0 +1,17 @@
1
+ export declare class PrettyLogger {
2
+ private static spinner;
3
+ private static lastSpinnerText;
4
+ private static initialized;
5
+ private static isLoaded;
6
+ private static init;
7
+ static showTitle(text: string): Promise<void>;
8
+ static info(message: string, data?: unknown): Promise<void>;
9
+ static success(message: string, data?: unknown): Promise<void>;
10
+ static error(message: string, error?: unknown): Promise<void>;
11
+ static warning(message: string, data?: unknown): Promise<void>;
12
+ static startSpinner(text: string): Promise<void>;
13
+ static updateSpinner(text: string): Promise<void>;
14
+ static stopSpinner(success?: boolean): Promise<void>;
15
+ static table(headers: string[], data: (string | number)[][]): Promise<void>;
16
+ static section(title: string, content: string): Promise<void>;
17
+ }
@@ -0,0 +1,224 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.PrettyLogger = void 0;
37
+ // We'll load these dynamically since they're ES modules
38
+ let chalk;
39
+ let boxen;
40
+ let ora;
41
+ let Table;
42
+ let figlet;
43
+ // Load dependencies dynamically
44
+ async function loadDependencies() {
45
+ try {
46
+ const imports = await Promise.all([
47
+ Promise.resolve().then(() => __importStar(require('chalk'))),
48
+ Promise.resolve().then(() => __importStar(require('boxen'))),
49
+ Promise.resolve().then(() => __importStar(require('ora'))),
50
+ Promise.resolve().then(() => __importStar(require('cli-table3'))),
51
+ Promise.resolve().then(() => __importStar(require('figlet')))
52
+ ]);
53
+ [chalk, boxen, ora, Table, figlet] = imports.map(imp => imp.default);
54
+ return true;
55
+ }
56
+ catch (error) {
57
+ console.warn('Pretty logging dependencies not available, falling back to basic logging');
58
+ return false;
59
+ }
60
+ }
61
+ class PrettyLogger {
62
+ static async init() {
63
+ if (!this.initialized) {
64
+ this.isLoaded = await loadDependencies();
65
+ this.initialized = true;
66
+ }
67
+ return this.isLoaded;
68
+ }
69
+ static async showTitle(text) {
70
+ if (!await this.init()) {
71
+ console.log(text);
72
+ return;
73
+ }
74
+ return new Promise((resolve, reject) => {
75
+ figlet(text, (err, data) => {
76
+ if (err) {
77
+ reject(err);
78
+ return;
79
+ }
80
+ if (data) {
81
+ console.log(chalk.cyan(data));
82
+ }
83
+ resolve();
84
+ });
85
+ });
86
+ }
87
+ static async info(message, data) {
88
+ if (!await this.init()) {
89
+ console.log(`ℹ ${message}`);
90
+ if (data)
91
+ console.log(data);
92
+ return;
93
+ }
94
+ console.log(chalk.blue('ℹ'), chalk.blue(message));
95
+ if (data) {
96
+ console.log(boxen(JSON.stringify(data, null, 2), {
97
+ padding: 1,
98
+ margin: 1,
99
+ borderStyle: 'round',
100
+ borderColor: 'blue'
101
+ }));
102
+ }
103
+ }
104
+ static async success(message, data) {
105
+ if (!await this.init()) {
106
+ console.log(`✓ ${message}`);
107
+ if (data)
108
+ console.log(data);
109
+ return;
110
+ }
111
+ console.log(chalk.green('✓'), chalk.green(message));
112
+ if (data) {
113
+ console.log(boxen(JSON.stringify(data, null, 2), {
114
+ padding: 1,
115
+ margin: 1,
116
+ borderStyle: 'round',
117
+ borderColor: 'green'
118
+ }));
119
+ }
120
+ }
121
+ static async error(message, error) {
122
+ if (!await this.init()) {
123
+ console.error(`✗ ${message}`);
124
+ if (error)
125
+ console.error(error);
126
+ return;
127
+ }
128
+ console.log(chalk.red('✗'), chalk.red(message));
129
+ if (error) {
130
+ console.log(boxen(JSON.stringify(error, null, 2), {
131
+ padding: 1,
132
+ margin: 1,
133
+ borderStyle: 'round',
134
+ borderColor: 'red'
135
+ }));
136
+ }
137
+ }
138
+ static async warning(message, data) {
139
+ if (!await this.init()) {
140
+ console.warn(`⚠ ${message}`);
141
+ if (data)
142
+ console.warn(data);
143
+ return;
144
+ }
145
+ console.log(chalk.yellow('⚠'), chalk.yellow(message));
146
+ if (data) {
147
+ console.log(boxen(JSON.stringify(data, null, 2), {
148
+ padding: 1,
149
+ margin: 1,
150
+ borderStyle: 'round',
151
+ borderColor: 'yellow'
152
+ }));
153
+ }
154
+ }
155
+ static async startSpinner(text) {
156
+ if (!await this.init()) {
157
+ console.log(`⟳ ${text}`);
158
+ return;
159
+ }
160
+ this.lastSpinnerText = text;
161
+ this.spinner = ora({
162
+ text: chalk.cyan(text),
163
+ color: 'cyan'
164
+ }).start();
165
+ }
166
+ static async updateSpinner(text) {
167
+ if (!await this.init()) {
168
+ console.log(`⟳ ${text}`);
169
+ return;
170
+ }
171
+ if (this.spinner) {
172
+ this.lastSpinnerText = text;
173
+ this.spinner.text = chalk.cyan(text);
174
+ }
175
+ }
176
+ static async stopSpinner(success = true) {
177
+ if (!await this.init())
178
+ return;
179
+ if (this.spinner) {
180
+ if (success) {
181
+ this.spinner.succeed(chalk.green(this.lastSpinnerText));
182
+ }
183
+ else {
184
+ this.spinner.fail(chalk.red(this.lastSpinnerText));
185
+ }
186
+ this.spinner = null;
187
+ }
188
+ }
189
+ static async table(headers, data) {
190
+ if (!await this.init()) {
191
+ console.log(headers.join('\t'));
192
+ data.forEach(row => console.log(row.join('\t')));
193
+ return;
194
+ }
195
+ const table = new Table({
196
+ head: headers.map(h => chalk.cyan(h)),
197
+ style: {
198
+ head: [],
199
+ border: []
200
+ }
201
+ });
202
+ data.forEach(row => table.push(row));
203
+ console.log(table.toString());
204
+ }
205
+ static async section(title, content) {
206
+ if (!await this.init()) {
207
+ console.log(`\n=== ${title} ===`);
208
+ console.log(content);
209
+ console.log('='.repeat(title.length + 8));
210
+ return;
211
+ }
212
+ console.log('\n' + boxen(chalk.bold(title) + '\n\n' + content, {
213
+ padding: 1,
214
+ margin: 1,
215
+ borderStyle: 'double',
216
+ borderColor: 'cyan'
217
+ }));
218
+ }
219
+ }
220
+ exports.PrettyLogger = PrettyLogger;
221
+ PrettyLogger.spinner = null;
222
+ PrettyLogger.lastSpinnerText = '';
223
+ PrettyLogger.initialized = false;
224
+ PrettyLogger.isLoaded = false;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "praisonai",
3
- "version": "1.0.10",
3
+ "version": "1.0.12",
4
4
  "description": "PraisonAI TypeScript AI Agents Framework - Node.js, npm, and Javascript AI Agents Framework",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -43,11 +43,12 @@
43
43
  "author": "Mervin Praison",
44
44
  "license": "MIT",
45
45
  "devDependencies": {
46
+ "@types/figlet": "^1.7.0",
46
47
  "@types/jest": "^29.5.14",
47
48
  "@types/node": "^22.12.0",
48
49
  "@typescript-eslint/eslint-plugin": "^8.22.0",
49
50
  "@typescript-eslint/parser": "^8.22.0",
50
- "eslint": "^8.57.0",
51
+ "eslint": "^9.19.0",
51
52
  "jest": "^29.7.0",
52
53
  "rimraf": "^5.0.5",
53
54
  "ts-jest": "^29.1.2",
@@ -56,11 +57,22 @@
56
57
  "typescript": "^5.7.3"
57
58
  },
58
59
  "dependencies": {
59
- "axios": "^1.6.7",
60
- "dotenv": "^16.4.1",
61
- "fast-xml-parser": "^4.3.4",
62
- "openai": "^4.24.7",
63
- "praisonai": "^1.0.8"
60
+ "axios": "^1.7.9",
61
+ "dotenv": "^16.4.7",
62
+ "fast-xml-parser": "^4.5.1",
63
+ "node-fetch": "^3.3.2",
64
+ "openai": "^4.81.0",
65
+ "praisonai": "^1.0.10"
66
+ },
67
+ "optionalDependencies": {
68
+ "boxen": "^7.1.1",
69
+ "chalk": "^4.1.2",
70
+ "cli-table3": "^0.6.3",
71
+ "figlet": "^1.7.0",
72
+ "ora": "^5.4.1"
73
+ },
74
+ "overrides": {
75
+ "whatwg-url": "^14.1.0"
64
76
  },
65
77
  "engines": {
66
78
  "node": ">=14.0.0"