n8n-nodes-rooyai-chat 0.1.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -4,7 +4,7 @@ Custom n8n node for Rooyai Chat API - A supply node that provides Rooyai languag
4
4
 
5
5
  ## Description
6
6
 
7
- This n8n custom node provides a **Rooyai Chat Model** supply node that can be connected to the **Basic LLM Chain** node, **AI Agent**, **Better AI Agent**, and other AI processing nodes in n8n.
7
+ This n8n custom node provides a **Rooyai Chat Model** supply node that can be connected to the **Basic LLM Chain** node, **AI Agent**, and other AI processing nodes in n8n.
8
8
 
9
9
  ## Features
10
10
 
@@ -77,7 +77,7 @@ Then restart your n8n instance. The node will appear in the node palette under *
77
77
  Connect the **Model** output from **Rooyai Chat Model** to:
78
78
  - **Basic LLM Chain**
79
79
  - **AI Agent**
80
- - **Better AI Agent**
80
+
81
81
  - Any other n8n AI node that accepts a Language Model
82
82
 
83
83
  ## Available Models
@@ -26,14 +26,6 @@ class RooyaiApi {
26
26
  required: true,
27
27
  description: 'Rooyai API base URL',
28
28
  },
29
- {
30
- displayName: 'Optional Headers',
31
- name: 'optionalHeaders',
32
- type: 'string',
33
- default: '',
34
- placeholder: '{"X-Custom-Header": "value"}',
35
- description: 'Additional headers as JSON object (optional)',
36
- },
37
29
  ];
38
30
  this.authenticate = {
39
31
  type: 'generic',
@@ -48,10 +40,13 @@ class RooyaiApi {
48
40
  baseURL: '={{$credentials.baseUrl}}',
49
41
  url: '/chat',
50
42
  method: 'POST',
43
+ headers: {
44
+ 'Content-Type': 'application/json',
45
+ },
51
46
  body: {
52
47
  model: 'gemini-2.0-flash',
53
- messages: [{ role: 'user', content: 'Hi' }],
54
- max_tokens: 10,
48
+ messages: [{ role: 'user', content: 'test' }],
49
+ max_tokens: 5,
55
50
  },
56
51
  },
57
52
  };
@@ -10,27 +10,27 @@ class N8nLlmTracing extends base_1.BaseCallbackHandler {
10
10
  }
11
11
  async handleLLMStart(llm, prompts, runId, parentRunId, extraParams, tags, metadata) {
12
12
  if (this.supplyDataFunctions.logger) {
13
- this.supplyDataFunctions.logger.debug('Rooyai LLM started', {
13
+ this.supplyDataFunctions.logger.info('🚀 Rooyai LLM started', {
14
14
  runId,
15
- parentRunId,
16
- prompts,
15
+ promptCount: prompts.length,
16
+ model: extraParams?.invocation_params?.model || 'unknown',
17
17
  });
18
18
  }
19
19
  }
20
20
  async handleLLMEnd(output, runId, parentRunId, tags) {
21
21
  if (this.supplyDataFunctions.logger) {
22
- this.supplyDataFunctions.logger.debug('Rooyai LLM finished', {
22
+ this.supplyDataFunctions.logger.info('Rooyai LLM finished successfully', {
23
23
  runId,
24
- parentRunId,
24
+ generationCount: output?.generations?.length || 0,
25
25
  });
26
26
  }
27
27
  }
28
28
  async handleLLMError(error, runId, parentRunId, tags) {
29
29
  if (this.supplyDataFunctions.logger) {
30
- this.supplyDataFunctions.logger.error('Rooyai LLM error', {
30
+ this.supplyDataFunctions.logger.error('Rooyai LLM error', {
31
31
  runId,
32
- parentRunId,
33
32
  error: error.message,
33
+ stack: error.stack,
34
34
  });
35
35
  }
36
36
  }
@@ -138,7 +138,6 @@ class Rooyai {
138
138
  const credentials = await this.getCredentials('rooyaiApi');
139
139
  const apiKey = credentials.apiKey;
140
140
  const baseUrl = credentials.baseUrl;
141
- const optionalHeaders = credentials.optionalHeaders;
142
141
  if (!apiKey) {
143
142
  throw new Error('Rooyai API key not found in credentials.');
144
143
  }
@@ -147,32 +146,29 @@ class Rooyai {
147
146
  }
148
147
  const modelName = this.getNodeParameter('model', itemIndex);
149
148
  const options = this.getNodeParameter('options', itemIndex, {});
150
- let parsedHeaders;
151
- if (optionalHeaders) {
152
- try {
153
- parsedHeaders = JSON.parse(optionalHeaders);
154
- }
155
- catch (error) {
156
- throw new Error(`Invalid JSON in optional headers: ${error instanceof Error ? error.message : String(error)}`);
157
- }
158
- }
159
149
  if (this.logger) {
160
- this.logger.info('Rooyai Chat Model initialized', {
150
+ this.logger.info('🎯 Initializing Rooyai Chat Model', {
161
151
  model: modelName,
162
152
  temperature: options.temperature ?? 0.7,
163
153
  maxTokens: options.maxTokensToSample,
154
+ baseUrl,
164
155
  });
165
156
  }
166
157
  const model = new RooyaiLangChainWrapper_1.RooyaiLangChainWrapper({
167
158
  apiKey,
168
159
  baseUrl,
169
- optionalHeaders: parsedHeaders,
170
160
  model: modelName,
171
161
  maxTokens: options.maxTokensToSample,
172
162
  temperature: options.temperature ?? 0.7,
173
163
  supplyDataFunctions: this,
174
164
  callbacks: [new N8nLlmTracing_1.N8nLlmTracing(this)],
165
+ verbose: true,
175
166
  });
167
+ if (this.logger) {
168
+ this.logger.info('✅ Rooyai Chat Model ready', {
169
+ model: modelName,
170
+ });
171
+ }
176
172
  return {
177
173
  response: model,
178
174
  };
@@ -6,22 +6,22 @@ import { ISupplyDataFunctions } from 'n8n-workflow';
6
6
  interface RooyaiLangChainWrapperParams {
7
7
  apiKey: string;
8
8
  baseUrl: string;
9
- optionalHeaders?: Record<string, string>;
10
9
  model: string;
11
10
  temperature?: number;
12
11
  maxTokens?: number;
13
12
  supplyDataFunctions?: ISupplyDataFunctions;
14
13
  callbacks?: any[];
14
+ verbose?: boolean;
15
15
  }
16
16
  export declare class RooyaiLangChainWrapper extends BaseChatModel {
17
17
  lc_namespace: string[];
18
18
  private apiKey;
19
19
  private baseUrl;
20
- private optionalHeaders?;
21
20
  private model;
22
21
  private temperature;
23
22
  private maxTokens?;
24
23
  private supplyDataFunctions?;
24
+ verbose: boolean;
25
25
  constructor(params: RooyaiLangChainWrapperParams);
26
26
  _llmType(): string;
27
27
  _modelType(): string;
@@ -13,15 +13,16 @@ class RooyaiLangChainWrapper extends chat_models_1.BaseChatModel {
13
13
  super({
14
14
  ...params,
15
15
  callbacks,
16
+ verbose: params.verbose ?? false,
16
17
  });
17
18
  this.lc_namespace = ['n8n', 'rooyai', 'chat'];
18
19
  this.apiKey = params.apiKey;
19
20
  this.baseUrl = params.baseUrl;
20
- this.optionalHeaders = params.optionalHeaders;
21
21
  this.model = params.model;
22
22
  this.temperature = params.temperature ?? 0.7;
23
23
  this.maxTokens = params.maxTokens;
24
24
  this.supplyDataFunctions = params.supplyDataFunctions;
25
+ this.verbose = params.verbose ?? false;
25
26
  }
26
27
  _llmType() {
27
28
  return 'rooyai';
@@ -34,10 +35,16 @@ class RooyaiLangChainWrapper extends chat_models_1.BaseChatModel {
34
35
  'Authorization': `Bearer ${this.apiKey}`,
35
36
  'Content-Type': 'application/json',
36
37
  };
37
- if (this.optionalHeaders) {
38
- Object.assign(headers, this.optionalHeaders);
39
- }
40
38
  const url = `${this.baseUrl}/chat`;
39
+ if (this.verbose && this.supplyDataFunctions?.logger) {
40
+ this.supplyDataFunctions.logger.debug('📡 Calling Rooyai API', {
41
+ url,
42
+ model: body.model,
43
+ messageCount: body.messages.length,
44
+ temperature: body.temperature,
45
+ maxTokens: body.max_tokens,
46
+ });
47
+ }
41
48
  try {
42
49
  const response = await axios_1.default.post(url, body, {
43
50
  headers,
@@ -49,9 +56,22 @@ class RooyaiLangChainWrapper extends chat_models_1.BaseChatModel {
49
56
  : JSON.stringify(response.data);
50
57
  throw new Error(`Rooyai API error (${response.status}): ${errorText}`);
51
58
  }
59
+ if (this.verbose && this.supplyDataFunctions?.logger) {
60
+ this.supplyDataFunctions.logger.debug('✅ Rooyai API response received', {
61
+ status: response.status,
62
+ hasChoices: !!response.data.choices,
63
+ hasReply: !!response.data.reply,
64
+ cost: response.data.usage?.cost_usd,
65
+ });
66
+ }
52
67
  return response.data;
53
68
  }
54
69
  catch (error) {
70
+ if (this.verbose && this.supplyDataFunctions?.logger) {
71
+ this.supplyDataFunctions.logger.error('❌ Rooyai API call failed', {
72
+ error: error instanceof Error ? error.message : String(error),
73
+ });
74
+ }
55
75
  if (error instanceof Error) {
56
76
  throw new Error(`Failed to call Rooyai API: ${error.message}`);
57
77
  }
@@ -108,6 +128,12 @@ class RooyaiLangChainWrapper extends chat_models_1.BaseChatModel {
108
128
  };
109
129
  if (response.usage?.cost_usd !== undefined) {
110
130
  llmOutput.costUsd = response.usage.cost_usd;
131
+ if (this.verbose && this.supplyDataFunctions?.logger) {
132
+ this.supplyDataFunctions.logger.info('💰 API Cost', {
133
+ cost: response.usage.cost_usd,
134
+ totalTokens: response.usage.total_tokens,
135
+ });
136
+ }
111
137
  }
112
138
  const generation = {
113
139
  message: aiMessage,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "n8n-nodes-rooyai-chat",
3
- "version": "0.1.0",
3
+ "version": "0.3.0",
4
4
  "description": "n8n supply node for Rooyai Chat API - Provides Rooyai language models for use with Basic LLM Chain, AI Agent, and other AI nodes.",
5
5
  "keywords": [
6
6
  "n8n-community-node-package",
@@ -53,4 +53,4 @@
53
53
  "peerDependencies": {
54
54
  "n8n-workflow": "*"
55
55
  }
56
- }
56
+ }