lua-cli 3.0.0-alpha.10 → 3.0.0-alpha.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -40,35 +40,40 @@ export default class ChatApi extends HttpClient {
40
40
  if (!response.ok) {
41
41
  throw new Error(`HTTP error! status: ${response.status}`);
42
42
  }
43
- const reader = response.body?.getReader();
44
- if (!reader) {
43
+ if (!response.body) {
45
44
  throw new Error('Response body is not readable');
46
45
  }
46
+ const reader = response.body.getReader();
47
47
  const decoder = new TextDecoder();
48
48
  let buffer = '';
49
- while (true) {
50
- const { done, value } = await reader.read();
51
- if (done)
52
- break;
53
- // Decode the chunk and add to buffer
54
- buffer += decoder.decode(value, { stream: true });
55
- // Process complete lines (chunks are separated by newlines)
56
- const lines = buffer.split('\n');
57
- buffer = lines.pop() || ''; // Keep incomplete line in buffer
58
- for (const line of lines) {
59
- if (!line.trim())
60
- continue;
61
- try {
62
- const chunk = JSON.parse(line);
63
- // Only process text-delta chunks
64
- if (chunk.type === 'text-delta' && chunk.textDelta) {
65
- onChunk(chunk.textDelta);
49
+ try {
50
+ while (true) {
51
+ const { done, value } = await reader.read();
52
+ if (done)
53
+ break;
54
+ // Decode the chunk and add to buffer
55
+ buffer += decoder.decode(value, { stream: true });
56
+ // Process complete lines (chunks are separated by newlines)
57
+ const lines = buffer.split('\n');
58
+ buffer = lines.pop() || ''; // Keep incomplete line in buffer
59
+ for (const line of lines) {
60
+ if (!line.trim())
61
+ continue;
62
+ try {
63
+ const chunk = JSON.parse(line);
64
+ // Only process text-delta chunks and call onChunk immediately
65
+ if (chunk.type === 'text-delta' && chunk.textDelta) {
66
+ onChunk(chunk.textDelta);
67
+ }
68
+ }
69
+ catch (error) {
70
+ // Skip invalid JSON lines
66
71
  }
67
- }
68
- catch (error) {
69
- // Skip invalid JSON lines
70
72
  }
71
73
  }
72
74
  }
75
+ finally {
76
+ reader.releaseLock();
77
+ }
73
78
  }
74
79
  }
@@ -402,5 +402,5 @@ export declare const AI: {
402
402
  generate(context: string, messages: import("./interfaces/chat.js").ChatMessage[], agentId?: string): Promise<string>;
403
403
  };
404
404
  export { LuaSkill, LuaTool, LuaWebhook, LuaWebhookConfig, LuaJob, LuaJobConfig, JobSchedule, PreProcessor, PreProcessorConfig, PostProcessor, PostProcessorConfig, LuaAgent, LuaAgentConfig, BasketStatus, OrderStatus, env };
405
- export { JobInstance };
405
+ export { JobInstance, UserDataInstance, DataEntryInstance, ProductInstance, BasketInstance, OrderInstance };
406
406
  export { ChatHistoryMessage, ChatHistoryContent, ChatMessage, TextMessage, ImageMessage, FileMessage, PreProcessorOverride, PostProcessorOverride };
@@ -31,6 +31,11 @@ import { BasketStatus } from "./interfaces/baskets.js";
31
31
  import { OrderStatus } from "./interfaces/orders.js";
32
32
  import { getUserInstance, getDataInstance, getProductsInstance, getBasketsInstance, getOrderInstance, getJobInstance, getChatInstance, } from "./api/lazy-instances.js";
33
33
  import { JobInstance } from "./common/job.instance.js";
34
+ import ProductInstance from "./common/product.instance.js";
35
+ import DataEntryInstance from "./common/data.entry.instance.js";
36
+ import UserDataInstance from "./common/user.instance.js";
37
+ import BasketInstance from "./common/basket.instance.js";
38
+ import OrderInstance from "./common/order.instance.js";
34
39
  export const User = {
35
40
  /**
36
41
  * Retrieves current user data.
@@ -543,4 +548,4 @@ export const AI = {
543
548
  // Export skill classes and utilities
544
549
  export { LuaSkill, LuaWebhook, LuaJob, PreProcessor, PostProcessor, LuaAgent, BasketStatus, OrderStatus, env };
545
550
  // Export instance classes
546
- export { JobInstance };
551
+ export { JobInstance, UserDataInstance, DataEntryInstance, ProductInstance, BasketInstance, OrderInstance };
@@ -148,22 +148,41 @@ async function startChatLoop(chatEnv) {
148
148
  rl.prompt();
149
149
  return;
150
150
  }
151
+ // Start typing indicator
152
+ const typingInterval = startTypingIndicator();
153
+ let firstChunk = true;
151
154
  try {
152
- // Start showing the assistant response
153
- process.stdout.write('šŸŒ™ Assistant: ');
154
- let fullResponse = '';
155
+ // Create a callback that stops typing on first chunk
156
+ const handleChunk = (chunk) => {
157
+ if (firstChunk) {
158
+ // Stop typing indicator and show assistant label
159
+ stopTypingIndicator(typingInterval);
160
+ process.stdout.write('šŸŒ™ Assistant: ');
161
+ firstChunk = false;
162
+ }
163
+ // Write the chunk immediately
164
+ process.stdout.write(chunk);
165
+ };
155
166
  if (chatEnv.type === 'sandbox') {
156
167
  // Send to sandbox with skill overrides (streaming)
157
- fullResponse = await sendSandboxMessageStream(chatEnv, message);
168
+ await sendSandboxMessageStream(chatEnv, message, handleChunk);
158
169
  }
159
170
  else {
160
171
  // Send to production (streaming)
161
- fullResponse = await sendProductionMessageStream(chatEnv, message);
172
+ await sendProductionMessageStream(chatEnv, message, handleChunk);
173
+ }
174
+ // If no chunks arrived, stop the typing indicator
175
+ if (firstChunk) {
176
+ stopTypingIndicator(typingInterval);
162
177
  }
163
178
  // Add newline after response
164
179
  console.log('\n');
165
180
  }
166
181
  catch (error) {
182
+ // Make sure typing indicator is stopped on error
183
+ if (firstChunk) {
184
+ stopTypingIndicator(typingInterval);
185
+ }
167
186
  console.error(`\nāŒ Error: ${error instanceof Error ? error.message : 'Unknown error'}\n`);
168
187
  }
169
188
  rl.prompt();
@@ -200,14 +219,14 @@ function stopTypingIndicator(interval) {
200
219
  /**
201
220
  * Sends a message to the sandbox environment with skill overrides (streaming)
202
221
  */
203
- async function sendSandboxMessageStream(chatEnv, message) {
222
+ async function sendSandboxMessageStream(chatEnv, message, onChunk) {
204
223
  if (!chatEnv.deployData) {
205
- return "āŒ Sandbox environment not properly initialized.";
224
+ throw new Error("Sandbox environment not properly initialized.");
206
225
  }
207
226
  // Get all sandbox skill IDs for skill override
208
227
  const allSkillOverrides = await getAllSandboxSkillIds(chatEnv.deployData);
209
228
  if (allSkillOverrides.length === 0) {
210
- return "āŒ No sandbox skills found. Please try running the command again.";
229
+ throw new Error("No sandbox skills found. Please try running the command again.");
211
230
  }
212
231
  const chatRequest = {
213
232
  messages: [
@@ -226,24 +245,12 @@ async function sendSandboxMessageStream(chatEnv, message) {
226
245
  chatRequest.personaOverride = chatEnv.persona;
227
246
  }
228
247
  const chatApi = new ChatApi(BASE_URLS.CHAT, chatEnv.apiKey);
229
- let fullResponse = '';
230
- try {
231
- await chatApi.sendMessageStream(chatEnv.agentId, chatRequest, (chunk) => {
232
- // Write each chunk to stdout as it arrives
233
- process.stdout.write(chunk);
234
- fullResponse += chunk;
235
- });
236
- return fullResponse;
237
- }
238
- catch (error) {
239
- console.error(`\nāŒ Chat API error: ${error instanceof Error ? error.message : 'Unknown error'}`);
240
- return '';
241
- }
248
+ await chatApi.sendMessageStream(chatEnv.agentId, chatRequest, onChunk);
242
249
  }
243
250
  /**
244
251
  * Sends a message to the production environment (streaming)
245
252
  */
246
- async function sendProductionMessageStream(chatEnv, message) {
253
+ async function sendProductionMessageStream(chatEnv, message, onChunk) {
247
254
  const chatRequest = {
248
255
  messages: [
249
256
  {
@@ -257,17 +264,5 @@ async function sendProductionMessageStream(chatEnv, message) {
257
264
  postprocessorOverride: []
258
265
  };
259
266
  const chatApi = new ChatApi(BASE_URLS.CHAT, chatEnv.apiKey);
260
- let fullResponse = '';
261
- try {
262
- await chatApi.sendMessageStream(chatEnv.agentId, chatRequest, (chunk) => {
263
- // Write each chunk to stdout as it arrives
264
- process.stdout.write(chunk);
265
- fullResponse += chunk;
266
- });
267
- return fullResponse;
268
- }
269
- catch (error) {
270
- console.error(`\nāŒ Chat API error: ${error instanceof Error ? error.message : 'Unknown error'}`);
271
- return '';
272
- }
267
+ await chatApi.sendMessageStream(chatEnv.agentId, chatRequest, onChunk);
273
268
  }
@@ -231,10 +231,9 @@ export function createSandbox(options) {
231
231
  const executeString = typeof config.execute === 'function'
232
232
  ? config.execute.toString()
233
233
  : config.execute;
234
- console.log('IsDynamicJob', config.dynamic);
235
234
  // Create job with version and activation
236
235
  return await jobService.createJobInstance({
237
- name: config.name,
236
+ name: config.name + '_' + Date.now(),
238
237
  description: config.description,
239
238
  context: config.description || '',
240
239
  schedule: config.schedule,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "lua-cli",
3
- "version": "3.0.0-alpha.10",
3
+ "version": "3.0.0-alpha.11",
4
4
  "description": "Command-line interface for Lua AI platform - develop, test, and deploy LuaSkills with custom tools",
5
5
  "readmeFilename": "README.md",
6
6
  "main": "dist/api-exports.js",