rentabots-sdk 1.7.0 → 1.7.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +78 -93
- package/dist/index.js +186 -289
- package/dist/llm.d.ts +24 -0
- package/dist/llm.js +70 -0
- package/package.json +2 -2
package/dist/index.d.ts
CHANGED
|
@@ -1,12 +1,6 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
2
|
import { EventEmitter } from 'events';
|
|
3
3
|
import { ChildProcess } from 'child_process';
|
|
4
|
-
export interface LLMConfig {
|
|
5
|
-
provider: 'openclaw' | 'openai' | 'anthropic' | 'google' | 'groq' | 'mistral' | 'custom';
|
|
6
|
-
apiKey?: string;
|
|
7
|
-
model?: string;
|
|
8
|
-
baseUrl?: string;
|
|
9
|
-
}
|
|
10
4
|
export declare const JobStatusSchema: z.ZodEnum<{
|
|
11
5
|
open: "open";
|
|
12
6
|
in_progress: "in_progress";
|
|
@@ -59,8 +53,6 @@ export interface AgentOptions {
|
|
|
59
53
|
workspaceRoot?: string;
|
|
60
54
|
workerScriptPath?: string;
|
|
61
55
|
commandWhitelist?: string[];
|
|
62
|
-
llm?: LLMConfig;
|
|
63
|
-
capabilities?: string[];
|
|
64
56
|
}
|
|
65
57
|
export interface ExecuteOptions {
|
|
66
58
|
timeout?: number;
|
|
@@ -86,7 +78,7 @@ export declare class Agent extends EventEmitter {
|
|
|
86
78
|
private apiKey;
|
|
87
79
|
readonly baseUrl: string;
|
|
88
80
|
readonly socketUrl: string;
|
|
89
|
-
agentId
|
|
81
|
+
private agentId;
|
|
90
82
|
private api;
|
|
91
83
|
private socket;
|
|
92
84
|
private debug;
|
|
@@ -98,11 +90,9 @@ export declare class Agent extends EventEmitter {
|
|
|
98
90
|
completedMissions: Map<string, Job>;
|
|
99
91
|
private bidCache;
|
|
100
92
|
private seenMessages;
|
|
101
|
-
workers
|
|
93
|
+
private workers;
|
|
102
94
|
private autopilotTimer;
|
|
103
|
-
autopilotPaused
|
|
104
|
-
private llmConfig;
|
|
105
|
-
private capabilities;
|
|
95
|
+
private autopilotPaused;
|
|
106
96
|
constructor(options?: AgentOptions);
|
|
107
97
|
/**
|
|
108
98
|
* Establish grid connection and initialize sockets.
|
|
@@ -135,94 +125,89 @@ export declare class Agent extends EventEmitter {
|
|
|
135
125
|
error: any;
|
|
136
126
|
jobId?: undefined;
|
|
137
127
|
}>;
|
|
128
|
+
setProgress(jobId: string, percent: number): Promise<{
|
|
129
|
+
success: boolean;
|
|
130
|
+
error?: string;
|
|
131
|
+
}>;
|
|
132
|
+
createRepo(jobId: string, name?: string): Promise<{
|
|
133
|
+
success: boolean;
|
|
134
|
+
repo?: any;
|
|
135
|
+
error?: string;
|
|
136
|
+
}>;
|
|
137
|
+
getRepo(jobId: string): Promise<{
|
|
138
|
+
success: boolean;
|
|
139
|
+
exists: boolean;
|
|
140
|
+
repo?: any;
|
|
141
|
+
error?: string;
|
|
142
|
+
}>;
|
|
143
|
+
uploadRepoFile(jobId: string, filePath: string, content: string | Buffer, isBlob?: boolean): Promise<{
|
|
144
|
+
success: boolean;
|
|
145
|
+
repoId?: string;
|
|
146
|
+
error?: string;
|
|
147
|
+
}>;
|
|
148
|
+
downloadRepoFile(jobId: string, filePath: string): Promise<{
|
|
149
|
+
success: boolean;
|
|
150
|
+
content?: string;
|
|
151
|
+
isBlob?: boolean;
|
|
152
|
+
error?: string;
|
|
153
|
+
}>;
|
|
154
|
+
private isBinaryFile;
|
|
138
155
|
deliver(jobId: string, files: string[]): Promise<void>;
|
|
156
|
+
runTests(jobId: string, command?: string): Promise<{
|
|
157
|
+
success: boolean;
|
|
158
|
+
passed: boolean;
|
|
159
|
+
output?: string;
|
|
160
|
+
error?: string;
|
|
161
|
+
}>;
|
|
162
|
+
verifyDeliverables(jobId: string): Promise<{
|
|
163
|
+
success: boolean;
|
|
164
|
+
verified: boolean;
|
|
165
|
+
files?: any[];
|
|
166
|
+
issues?: string[];
|
|
167
|
+
error?: string;
|
|
168
|
+
}>;
|
|
169
|
+
preDeliveryCheck(jobId: string): Promise<{
|
|
170
|
+
canDeliver: boolean;
|
|
171
|
+
checks: any[];
|
|
172
|
+
recommendations?: string[];
|
|
173
|
+
}>;
|
|
139
174
|
markComplete(jobId: string): Promise<any>;
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
/**
|
|
152
|
-
* One-shot bid search: find open jobs matching skills and bid on them
|
|
153
|
-
*/
|
|
154
|
-
findAndBid(options?: {
|
|
155
|
-
skills?: string[];
|
|
156
|
-
minBudget?: number;
|
|
157
|
-
bidMessage?: string;
|
|
175
|
+
private llmClient?;
|
|
176
|
+
initLLM(config?: {
|
|
177
|
+
provider?: 'groq' | 'openai' | 'anthropic' | 'google' | 'nvidia';
|
|
178
|
+
apiKey?: string;
|
|
179
|
+
model?: string;
|
|
180
|
+
}): Promise<void>;
|
|
181
|
+
generate(prompt: string, options?: {
|
|
182
|
+
model?: string;
|
|
183
|
+
temperature?: number;
|
|
184
|
+
maxTokens?: number;
|
|
185
|
+
system?: string;
|
|
158
186
|
}): Promise<{
|
|
159
187
|
success: boolean;
|
|
160
|
-
|
|
161
|
-
error?:
|
|
162
|
-
}
|
|
188
|
+
text?: string;
|
|
189
|
+
error?: string;
|
|
190
|
+
}>;
|
|
191
|
+
askLLM(systemPrompt: string, userPrompt: string): Promise<string | null>;
|
|
192
|
+
analyzeRequirements(job: Job): Promise<{
|
|
163
193
|
success: boolean;
|
|
164
|
-
|
|
165
|
-
|
|
194
|
+
requirements?: any;
|
|
195
|
+
error?: string;
|
|
166
196
|
}>;
|
|
167
|
-
|
|
168
|
-
* Notify the agent's human owner (via API)
|
|
169
|
-
*/
|
|
170
|
-
notifyOwner(message: string): Promise<import("axios").AxiosResponse<any, any, {}> | undefined>;
|
|
171
|
-
/**
|
|
172
|
-
* Log a message to the agent's server-side log
|
|
173
|
-
*/
|
|
174
|
-
log(message: string, level?: string): Promise<import("axios").AxiosResponse<any, any, {}> | undefined>;
|
|
175
|
-
/**
|
|
176
|
-
* Alias: triggered when human hires this agent
|
|
177
|
-
*/
|
|
178
|
-
onHired(callback: (job: Job) => void): void;
|
|
179
|
-
/**
|
|
180
|
-
* Alias: triggered on incoming chat message
|
|
181
|
-
*/
|
|
182
|
-
onMessage(callback: (msg: Message) => void): void;
|
|
183
|
-
/**
|
|
184
|
-
* Upload a deliverable file to a mission
|
|
185
|
-
*/
|
|
186
|
-
uploadDeliverable(jobId: string, url: string, name: string): Promise<import("axios").AxiosResponse<any, any, {}> | undefined>;
|
|
187
|
-
/**
|
|
188
|
-
* Create a private repository for a mission
|
|
189
|
-
*/
|
|
190
|
-
createRepo(jobId: string, name?: string): Promise<any>;
|
|
191
|
-
/**
|
|
192
|
-
* Upload files to a mission's repository
|
|
193
|
-
*/
|
|
194
|
-
uploadToRepo(jobId: string, files: {
|
|
195
|
-
path: string;
|
|
196
|
-
content: string;
|
|
197
|
-
}[]): Promise<({
|
|
198
|
-
path: string;
|
|
197
|
+
codeGenerator(prompt: string, language?: string): Promise<{
|
|
199
198
|
success: boolean;
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
199
|
+
code?: string;
|
|
200
|
+
error?: string;
|
|
201
|
+
}>;
|
|
202
|
+
reviewCode(code: string, requirements: string): Promise<{
|
|
203
203
|
success: boolean;
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
* Ask the LLM brain a question. Works with OpenClaw or any custom API provider.
|
|
212
|
-
* @param systemPrompt System-level instructions
|
|
213
|
-
* @param userMessage The user/human message to respond to
|
|
214
|
-
* @returns The LLM response text, or null on failure
|
|
215
|
-
*/
|
|
216
|
-
askLLM(systemPrompt: string, userMessage: string): Promise<string | null>;
|
|
217
|
-
/**
|
|
218
|
-
* OpenClaw bridge: sends query to local OpenClaw instance
|
|
219
|
-
*/
|
|
220
|
-
private askOpenClaw;
|
|
221
|
-
/**
|
|
222
|
-
* Custom LLM API: supports OpenAI-compatible endpoints (OpenAI, Groq, Mistral, etc.)
|
|
223
|
-
* Also supports Anthropic and Google with adapter logic.
|
|
224
|
-
*/
|
|
225
|
-
private askCustomLLM;
|
|
204
|
+
review?: string;
|
|
205
|
+
passed: boolean;
|
|
206
|
+
error?: string;
|
|
207
|
+
}>;
|
|
208
|
+
bid(jobId: string, amount: number, message: string): Promise<any>;
|
|
209
|
+
sendMessage(jobId: string, content: string): Promise<import("axios").AxiosResponse<any, any, {}>>;
|
|
210
|
+
setTyping(jobId: string, isTyping?: boolean): Promise<void>;
|
|
226
211
|
private enrichJob;
|
|
227
212
|
private syncFromCloud;
|
|
228
213
|
private loadState;
|
package/dist/index.js
CHANGED
|
@@ -38,6 +38,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
38
38
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
39
|
exports.Agent = exports.MessageSchema = exports.JobSchema = exports.JobStatusSchema = void 0;
|
|
40
40
|
const axios_1 = __importDefault(require("axios"));
|
|
41
|
+
const llm_1 = require("./llm");
|
|
41
42
|
const socket_io_client_1 = require("socket.io-client");
|
|
42
43
|
const zod_1 = require("zod");
|
|
43
44
|
const events_1 = require("events");
|
|
@@ -45,22 +46,6 @@ const fs = __importStar(require("fs"));
|
|
|
45
46
|
const path = __importStar(require("path"));
|
|
46
47
|
const child_process_1 = require("child_process");
|
|
47
48
|
const sanitizer_1 = require("./utils/sanitizer");
|
|
48
|
-
const DEFAULT_MODELS = {
|
|
49
|
-
openclaw: 'default',
|
|
50
|
-
openai: 'gpt-4o-mini',
|
|
51
|
-
anthropic: 'claude-sonnet-4-20250514',
|
|
52
|
-
google: 'gemini-2.0-flash',
|
|
53
|
-
groq: 'llama-3.3-70b-versatile',
|
|
54
|
-
mistral: 'mistral-large-latest',
|
|
55
|
-
custom: 'gpt-4o-mini',
|
|
56
|
-
};
|
|
57
|
-
const PROVIDER_URLS = {
|
|
58
|
-
openai: 'https://api.openai.com/v1',
|
|
59
|
-
anthropic: 'https://api.anthropic.com/v1',
|
|
60
|
-
google: 'https://generativelanguage.googleapis.com/v1beta',
|
|
61
|
-
groq: 'https://api.groq.com/openai/v1',
|
|
62
|
-
mistral: 'https://api.mistral.ai/v1',
|
|
63
|
-
};
|
|
64
49
|
// --- SCHEMAS & TYPES ---
|
|
65
50
|
exports.JobStatusSchema = zod_1.z.enum(['open', 'in_progress', 'completed', 'archived', 'disputed']);
|
|
66
51
|
exports.JobSchema = zod_1.z.object({
|
|
@@ -86,7 +71,7 @@ exports.MessageSchema = zod_1.z.object({
|
|
|
86
71
|
})
|
|
87
72
|
});
|
|
88
73
|
// --- CORE SDK ENGINE ---
|
|
89
|
-
let SDK_VERSION = '1.
|
|
74
|
+
let SDK_VERSION = '1.7.6'; // BUMP to v1.5.7
|
|
90
75
|
try {
|
|
91
76
|
const pkg = JSON.parse(fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf8'));
|
|
92
77
|
SDK_VERSION = pkg.version;
|
|
@@ -115,32 +100,11 @@ class Agent extends events_1.EventEmitter {
|
|
|
115
100
|
// Autopilot state
|
|
116
101
|
this.autopilotTimer = null;
|
|
117
102
|
this.autopilotPaused = false;
|
|
118
|
-
this.apiKey = options.apiKey || process.env.RENTABOTS_API_KEY ||
|
|
119
|
-
this.baseUrl = (options.baseUrl ||
|
|
120
|
-
|
|
121
|
-
// Don't rely on brittle .replace('/api', ''). Use proper URL parsing.
|
|
122
|
-
if (process.env.RENTABOTS_SOCKET_URL) {
|
|
123
|
-
this.socketUrl = process.env.RENTABOTS_SOCKET_URL.replace(/\/$/, '');
|
|
124
|
-
}
|
|
125
|
-
else {
|
|
126
|
-
try {
|
|
127
|
-
const parsed = new URL(this.baseUrl);
|
|
128
|
-
this.socketUrl = `${parsed.protocol}//${parsed.host}`;
|
|
129
|
-
}
|
|
130
|
-
catch {
|
|
131
|
-
this.socketUrl = this.baseUrl.replace(/\/api.*$/, '');
|
|
132
|
-
}
|
|
133
|
-
}
|
|
103
|
+
this.apiKey = options.apiKey || process.env.RENTABOTS_API_KEY || '';
|
|
104
|
+
this.baseUrl = (options.baseUrl || 'https://rentabots.com/api').replace(/\/$/, '');
|
|
105
|
+
this.socketUrl = (process.env.RENTABOTS_SOCKET_URL || this.baseUrl.replace('/api', '')).replace(/\/$/, '');
|
|
134
106
|
this.debug = options.debug || false;
|
|
135
107
|
this.commandWhitelist = options.commandWhitelist || null;
|
|
136
|
-
this.capabilities = options.capabilities || [];
|
|
137
|
-
// --- 🧠 LLM CONFIGURATION ---
|
|
138
|
-
this.llmConfig = options.llm || {
|
|
139
|
-
provider: process.env.LLM_PROVIDER || 'openclaw',
|
|
140
|
-
apiKey: process.env.LLM_API_KEY || '',
|
|
141
|
-
model: process.env.LLM_MODEL || '',
|
|
142
|
-
baseUrl: process.env.LLM_BASE_URL || '',
|
|
143
|
-
};
|
|
144
108
|
// --- 🛡️ AGENT ISOLATION ---
|
|
145
109
|
// By default, create a unique workspace root if not provided.
|
|
146
110
|
// We will finalize this in connect() once we have the agentId.
|
|
@@ -371,26 +335,11 @@ class Agent extends events_1.EventEmitter {
|
|
|
371
335
|
async spawnWorker(job) {
|
|
372
336
|
if (!fs.existsSync(this.workerScriptPath))
|
|
373
337
|
throw new Error("Worker script not found.");
|
|
374
|
-
|
|
375
|
-
// CLI args have OS length limits (~8KB on Windows). Long job descriptions
|
|
376
|
-
// would cause silent spawn failures or data truncation.
|
|
377
|
-
const jobDataPath = path.join(this.workspaceRoot, `_job_${job.id}.json`);
|
|
378
|
-
fs.writeFileSync(jobDataPath, JSON.stringify(job));
|
|
379
|
-
const worker = (0, child_process_1.fork)(this.workerScriptPath, [jobDataPath], {
|
|
380
|
-
// --- 🛡️ FIX: stdin set to 'ignore' for background/daemon safety ---
|
|
381
|
-
// 'inherit' causes crashes when no TTY is available (nohup, systemd, PM2).
|
|
382
|
-
stdio: ['ignore', 'pipe', 'pipe', 'ipc'],
|
|
338
|
+
const worker = (0, child_process_1.fork)(this.workerScriptPath, [JSON.stringify(job)], {
|
|
383
339
|
env: { ...process.env, RENTABOTS_API_KEY: this.apiKey }
|
|
384
340
|
});
|
|
385
341
|
this.workers.set(job.id, worker);
|
|
386
|
-
worker.on('exit', () =>
|
|
387
|
-
this.workers.delete(job.id);
|
|
388
|
-
// Cleanup temp file
|
|
389
|
-
try {
|
|
390
|
-
fs.unlinkSync(jobDataPath);
|
|
391
|
-
}
|
|
392
|
-
catch { }
|
|
393
|
-
});
|
|
342
|
+
worker.on('exit', () => this.workers.delete(job.id));
|
|
394
343
|
return worker;
|
|
395
344
|
}
|
|
396
345
|
// --- ⚡ ACTIONS ---
|
|
@@ -463,294 +412,242 @@ class Agent extends events_1.EventEmitter {
|
|
|
463
412
|
return { success: false, error: e.response?.data?.error || e.message };
|
|
464
413
|
}
|
|
465
414
|
}
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
for (const file of files) {
|
|
469
|
-
const local = path.join(cwd, file);
|
|
470
|
-
if (fs.existsSync(local)) {
|
|
471
|
-
const content = fs.readFileSync(local, 'utf8');
|
|
472
|
-
await this.api.post(`jobs/${jobId}/repo/files`, { path: file, content });
|
|
473
|
-
}
|
|
474
|
-
}
|
|
475
|
-
}
|
|
476
|
-
async markComplete(jobId) {
|
|
477
|
-
const res = await this.api.post(`jobs/${jobId}/complete`, { userId: this.agentId, role: 'agent' });
|
|
478
|
-
if (res.data.success) {
|
|
479
|
-
const job = this.activeMissions.get(jobId);
|
|
480
|
-
if (job)
|
|
481
|
-
this.completedMissions.set(jobId, { ...job, status: 'completed' });
|
|
482
|
-
this.activeMissions.delete(jobId);
|
|
483
|
-
this.saveState();
|
|
484
|
-
}
|
|
485
|
-
return res.data;
|
|
486
|
-
}
|
|
487
|
-
async bid(jobId, amount, message) {
|
|
415
|
+
// --- 📊 PROGRESS TRACKING ---
|
|
416
|
+
async setProgress(jobId, percent) {
|
|
488
417
|
try {
|
|
489
|
-
const
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
this.
|
|
418
|
+
const progress = Math.min(100, Math.max(0, Math.round(percent)));
|
|
419
|
+
await this.api.post(`jobs/${jobId}/progress`, { progress });
|
|
420
|
+
const job = this.activeMissions.get(jobId);
|
|
421
|
+
if (job) {
|
|
422
|
+
job.progress = progress;
|
|
423
|
+
this.activeMissions.set(jobId, job);
|
|
495
424
|
this.saveState();
|
|
496
425
|
}
|
|
497
|
-
return
|
|
426
|
+
return { success: true };
|
|
498
427
|
}
|
|
499
428
|
catch (e) {
|
|
500
|
-
return { success: false };
|
|
429
|
+
return { success: false, error: e.response?.data?.error || e.message };
|
|
501
430
|
}
|
|
502
431
|
}
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
}
|
|
506
|
-
async setTyping(jobId, isTyping = true) {
|
|
507
|
-
this.socket?.emit('typing_state', { jobId, isTyping });
|
|
508
|
-
}
|
|
509
|
-
// --- 📡 MISSING METHODS (Fixed in v1.6.0) ---
|
|
510
|
-
/**
|
|
511
|
-
* Update mission progress (0-100%)
|
|
512
|
-
*/
|
|
513
|
-
async setProgress(jobId, progress) {
|
|
432
|
+
// --- 📦 REPO MANAGEMENT ---
|
|
433
|
+
async createRepo(jobId, name) {
|
|
514
434
|
try {
|
|
515
|
-
|
|
435
|
+
const res = await this.api.post(`jobs/${jobId}/repo`, { name });
|
|
436
|
+
this.logInternal(`Repo created: ${res.data.repo?.name}`);
|
|
437
|
+
return { success: true, repo: res.data.repo };
|
|
516
438
|
}
|
|
517
439
|
catch (e) {
|
|
518
|
-
|
|
440
|
+
return { success: false, error: e.response?.data?.error || e.message };
|
|
519
441
|
}
|
|
520
442
|
}
|
|
521
|
-
|
|
522
|
-
* Initialize isolated workspace for a mission, returns the path
|
|
523
|
-
*/
|
|
524
|
-
async initializeMission(jobId) {
|
|
525
|
-
const cwd = path.join(this.workspaceRoot, jobId);
|
|
526
|
-
if (!fs.existsSync(cwd))
|
|
527
|
-
fs.mkdirSync(cwd, { recursive: true });
|
|
528
|
-
this.logInternal(`Mission workspace initialized: ${cwd}`);
|
|
529
|
-
return cwd;
|
|
530
|
-
}
|
|
531
|
-
/**
|
|
532
|
-
* One-shot bid search: find open jobs matching skills and bid on them
|
|
533
|
-
*/
|
|
534
|
-
async findAndBid(options = {}) {
|
|
443
|
+
async getRepo(jobId) {
|
|
535
444
|
try {
|
|
536
|
-
const res = await this.api.get(
|
|
537
|
-
|
|
538
|
-
const skills = options.skills || this.capabilities || [];
|
|
539
|
-
let bidCount = 0;
|
|
540
|
-
for (const job of jobs) {
|
|
541
|
-
if (this.bidCache.has(job.id))
|
|
542
|
-
continue;
|
|
543
|
-
if (options.minBudget && job.budgetAmount < options.minBudget)
|
|
544
|
-
continue;
|
|
545
|
-
const matches = skills.length === 0 || skills.some(s => job.title.toLowerCase().includes(s.toLowerCase()) ||
|
|
546
|
-
job.description.toLowerCase().includes(s.toLowerCase()));
|
|
547
|
-
if (matches) {
|
|
548
|
-
const msg = options.bidMessage || `I am an autonomous agent with ${this.completedMissions.size} completed missions. Skills: ${skills.join(', ')}. Ready to deliver.`;
|
|
549
|
-
await this.bid(job.id, job.budgetAmount, msg);
|
|
550
|
-
bidCount++;
|
|
551
|
-
}
|
|
552
|
-
}
|
|
553
|
-
this.logInternal(`findAndBid: Placed ${bidCount} bids on ${jobs.length} open jobs.`);
|
|
554
|
-
return { success: true, bidsPlaced: bidCount };
|
|
445
|
+
const res = await this.api.get(`jobs/${jobId}/repo`);
|
|
446
|
+
return { success: true, exists: res.data.exists, repo: res.data.repo };
|
|
555
447
|
}
|
|
556
448
|
catch (e) {
|
|
557
|
-
|
|
558
|
-
|
|
449
|
+
if (e.response?.status === 404)
|
|
450
|
+
return { success: true, exists: false };
|
|
451
|
+
return { success: false, exists: false, error: e.response?.data?.error || e.message };
|
|
559
452
|
}
|
|
560
453
|
}
|
|
561
|
-
|
|
562
|
-
* Notify the agent's human owner (via API)
|
|
563
|
-
*/
|
|
564
|
-
async notifyOwner(message) {
|
|
454
|
+
async uploadRepoFile(jobId, filePath, content, isBlob = false) {
|
|
565
455
|
try {
|
|
566
|
-
|
|
456
|
+
let finalContent;
|
|
457
|
+
if (Buffer.isBuffer(content)) {
|
|
458
|
+
finalContent = content.toString('base64');
|
|
459
|
+
isBlob = true;
|
|
460
|
+
}
|
|
461
|
+
else {
|
|
462
|
+
finalContent = String(content);
|
|
463
|
+
}
|
|
464
|
+
const res = await this.api.post(`jobs/${jobId}/repo/files`, { path: filePath, content: finalContent, isBlob });
|
|
465
|
+
return { success: true, repoId: res.data.repoId };
|
|
567
466
|
}
|
|
568
467
|
catch (e) {
|
|
569
|
-
|
|
468
|
+
return { success: false, error: e.response?.data?.error || e.message };
|
|
570
469
|
}
|
|
571
470
|
}
|
|
572
|
-
|
|
573
|
-
* Log a message to the agent's server-side log
|
|
574
|
-
*/
|
|
575
|
-
async log(message, level = 'INFO') {
|
|
576
|
-
this.logInternal(`[${level}] ${message}`);
|
|
471
|
+
async downloadRepoFile(jobId, filePath) {
|
|
577
472
|
try {
|
|
578
|
-
|
|
473
|
+
const res = await this.api.get(`jobs/${jobId}/repo/files`, { params: { path: filePath } });
|
|
474
|
+
return { success: true, content: res.data.content, isBlob: res.data.isBlob };
|
|
579
475
|
}
|
|
580
476
|
catch (e) {
|
|
581
|
-
|
|
477
|
+
return { success: false, error: e.response?.data?.error || e.message };
|
|
582
478
|
}
|
|
583
479
|
}
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
onHired(callback) {
|
|
588
|
-
this.on('assignment', callback);
|
|
480
|
+
isBinaryFile(filePath) {
|
|
481
|
+
const textExts = ['.js', '.ts', '.json', '.md', '.txt', '.html', '.css', '.yml', '.yaml', '.toml'];
|
|
482
|
+
return !textExts.includes(path.extname(filePath).toLowerCase());
|
|
589
483
|
}
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
484
|
+
async deliver(jobId, files) {
|
|
485
|
+
const cwd = path.join(this.workspaceRoot, jobId);
|
|
486
|
+
for (const file of files) {
|
|
487
|
+
const local = path.join(cwd, file);
|
|
488
|
+
if (fs.existsSync(local)) {
|
|
489
|
+
const content = fs.readFileSync(local, 'utf8');
|
|
490
|
+
await this.api.post(`jobs/${jobId}/repo/files`, { path: file, content });
|
|
491
|
+
}
|
|
492
|
+
}
|
|
595
493
|
}
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
async uploadDeliverable(jobId, url, name) {
|
|
494
|
+
// --- ✅ VERIFICATION FLOW ---
|
|
495
|
+
// --- 🧪 TEST RUNNER ---
|
|
496
|
+
async runTests(jobId, command = 'npm test') {
|
|
600
497
|
try {
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
498
|
+
const cwd = path.join(this.workspaceRoot, jobId);
|
|
499
|
+
if (!fs.existsSync(cwd)) {
|
|
500
|
+
return { success: false, passed: false, error: 'Workspace not found' };
|
|
501
|
+
}
|
|
502
|
+
// Check if test command exists in package.json
|
|
503
|
+
const pkgPath = path.join(cwd, 'package.json');
|
|
504
|
+
let testCmd = command;
|
|
505
|
+
if (fs.existsSync(pkgPath)) {
|
|
506
|
+
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
|
|
507
|
+
if (!pkg.scripts?.test && command === 'npm test') {
|
|
508
|
+
return { success: true, passed: true, output: 'No tests configured' };
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
const result = await this.execute(jobId, testCmd.split(' ')[0], testCmd.split(' ').slice(1), { timeout: 120000 });
|
|
512
|
+
return { success: true, passed: result.exitCode === 0, output: result.output };
|
|
607
513
|
}
|
|
608
514
|
catch (e) {
|
|
609
|
-
|
|
515
|
+
return { success: false, passed: false, error: e.message };
|
|
610
516
|
}
|
|
611
517
|
}
|
|
612
|
-
|
|
613
|
-
* Create a private repository for a mission
|
|
614
|
-
*/
|
|
615
|
-
async createRepo(jobId, name) {
|
|
518
|
+
async verifyDeliverables(jobId) {
|
|
616
519
|
try {
|
|
617
|
-
const
|
|
618
|
-
|
|
619
|
-
|
|
520
|
+
const cwd = path.join(this.workspaceRoot, jobId);
|
|
521
|
+
if (!fs.existsSync(cwd))
|
|
522
|
+
return { success: false, verified: false, error: 'No workspace' };
|
|
523
|
+
const issues = [];
|
|
524
|
+
const files = [];
|
|
525
|
+
const scanDir = (dir, rel = '') => {
|
|
526
|
+
fs.readdirSync(dir).forEach((item) => {
|
|
527
|
+
const full = path.join(dir, item);
|
|
528
|
+
const relPath = path.join(rel, item);
|
|
529
|
+
if (fs.statSync(full).isDirectory())
|
|
530
|
+
scanDir(full, relPath);
|
|
531
|
+
else
|
|
532
|
+
files.push({ path: relPath, size: fs.statSync(full).size });
|
|
533
|
+
});
|
|
534
|
+
};
|
|
535
|
+
scanDir(cwd);
|
|
536
|
+
if (files.length === 0)
|
|
537
|
+
issues.push('No deliverables');
|
|
538
|
+
if (!files.some(f => f.path.toLowerCase().includes('readme')))
|
|
539
|
+
issues.push('Missing README');
|
|
540
|
+
const verified = issues.length === 0;
|
|
541
|
+
return { success: true, verified, files, issues };
|
|
620
542
|
}
|
|
621
543
|
catch (e) {
|
|
622
|
-
|
|
623
|
-
|
|
544
|
+
return { success: false, verified: false, error: e.message };
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
async preDeliveryCheck(jobId) {
|
|
548
|
+
const verify = await this.verifyDeliverables(jobId);
|
|
549
|
+
const testResult = await this.runTests(jobId);
|
|
550
|
+
const checks = [
|
|
551
|
+
{ name: 'Deliverables', passed: verify.verified, details: (verify.files?.length || 0) + ' files' },
|
|
552
|
+
{ name: 'Tests', passed: testResult.passed, details: testResult.passed ? 'Passed' : 'Failed/No tests' }
|
|
553
|
+
];
|
|
554
|
+
const job = this.activeMissions.get(jobId);
|
|
555
|
+
if (job?.progress && job.progress > 0) {
|
|
556
|
+
checks.push({ name: 'Progress', passed: job.progress >= 80, details: job.progress + '%' });
|
|
557
|
+
}
|
|
558
|
+
const recommendations = [];
|
|
559
|
+
if (!verify.verified && verify.issues)
|
|
560
|
+
recommendations.push(...verify.issues);
|
|
561
|
+
if (!testResult.passed && testResult.output)
|
|
562
|
+
recommendations.push('Tests failing: ' + testResult.output.slice(0, 100));
|
|
563
|
+
return {
|
|
564
|
+
canDeliver: checks.every(c => c.passed),
|
|
565
|
+
checks,
|
|
566
|
+
recommendations: recommendations.length > 0 ? recommendations : undefined
|
|
567
|
+
};
|
|
568
|
+
}
|
|
569
|
+
async markComplete(jobId) {
|
|
570
|
+
const res = await this.api.post(`jobs/${jobId}/complete`, { userId: this.agentId, role: 'agent' });
|
|
571
|
+
if (res.data.success) {
|
|
572
|
+
const job = this.activeMissions.get(jobId);
|
|
573
|
+
if (job)
|
|
574
|
+
this.completedMissions.set(jobId, { ...job, status: 'completed' });
|
|
575
|
+
this.activeMissions.delete(jobId);
|
|
576
|
+
this.saveState();
|
|
624
577
|
}
|
|
578
|
+
return res.data;
|
|
625
579
|
}
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
async
|
|
630
|
-
|
|
631
|
-
for (const file of files) {
|
|
580
|
+
async initLLM(config) {
|
|
581
|
+
this.llmClient = new llm_1.LLMClient(config);
|
|
582
|
+
}
|
|
583
|
+
async generate(prompt, options = {}) {
|
|
584
|
+
if (!this.llmClient) {
|
|
632
585
|
try {
|
|
633
|
-
|
|
634
|
-
results.push({ path: file.path, success: true });
|
|
586
|
+
this.llmClient = new llm_1.LLMClient();
|
|
635
587
|
}
|
|
636
588
|
catch (e) {
|
|
637
|
-
|
|
589
|
+
return { success: false, error: e.message };
|
|
638
590
|
}
|
|
639
591
|
}
|
|
640
|
-
return
|
|
592
|
+
return this.llmClient.generate(prompt, options);
|
|
641
593
|
}
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
594
|
+
async askLLM(systemPrompt, userPrompt) {
|
|
595
|
+
const res = await this.generate(userPrompt, {
|
|
596
|
+
system: systemPrompt,
|
|
597
|
+
temperature: 0.4,
|
|
598
|
+
maxTokens: 1200,
|
|
599
|
+
});
|
|
600
|
+
return res.success ? (res.text || null) : null;
|
|
601
|
+
}
|
|
602
|
+
async analyzeRequirements(job) {
|
|
603
|
+
const prompt = 'Analyze job, return JSON with techStack, features, deliverables, timeline, risks. Title: ' + job.title + ' Desc: ' + job.description.slice(0, 500);
|
|
604
|
+
const res = await this.generate(prompt, { system: 'Extract requirements as JSON', temperature: 0.3 });
|
|
605
|
+
if (!res.success)
|
|
606
|
+
return res;
|
|
646
607
|
try {
|
|
647
|
-
const
|
|
648
|
-
return
|
|
608
|
+
const json = res.text?.match(/\{[^]*\}/)?.[0];
|
|
609
|
+
return { success: true, requirements: JSON.parse(json || '{}') };
|
|
649
610
|
}
|
|
650
|
-
catch
|
|
651
|
-
return {
|
|
611
|
+
catch {
|
|
612
|
+
return { success: false, error: 'Parse failed' };
|
|
652
613
|
}
|
|
653
614
|
}
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
*/
|
|
661
|
-
async askLLM(systemPrompt, userMessage) {
|
|
662
|
-
const provider = this.llmConfig.provider;
|
|
663
|
-
if (provider === 'openclaw') {
|
|
664
|
-
return this.askOpenClaw(systemPrompt, userMessage);
|
|
665
|
-
}
|
|
666
|
-
else {
|
|
667
|
-
return this.askCustomLLM(systemPrompt, userMessage);
|
|
668
|
-
}
|
|
615
|
+
async codeGenerator(prompt, language = 'javascript') {
|
|
616
|
+
const res = await this.generate(prompt, { system: 'Output only ' + language + ' code', temperature: 0.2, maxTokens: 4000 });
|
|
617
|
+
if (!res.success)
|
|
618
|
+
return res;
|
|
619
|
+
const code = res.text?.match(/```(?:\w+)?\n?([\s\S]*?)```/)?.[1] || res.text;
|
|
620
|
+
return { success: true, code: code?.trim() };
|
|
669
621
|
}
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
const child = (0, child_process_1.spawn)('openclaw', ['session:chat', fullQuery], {
|
|
677
|
-
timeout: 60000,
|
|
678
|
-
shell: false
|
|
679
|
-
});
|
|
680
|
-
let output = '';
|
|
681
|
-
child.stdout?.on('data', (d) => output += d.toString());
|
|
682
|
-
child.stderr?.on('data', (d) => output += d.toString());
|
|
683
|
-
child.on('close', (code) => {
|
|
684
|
-
if (code === 0 && output.trim())
|
|
685
|
-
resolve(output.trim());
|
|
686
|
-
else
|
|
687
|
-
resolve(null);
|
|
688
|
-
});
|
|
689
|
-
child.on('error', () => resolve(null));
|
|
690
|
-
});
|
|
622
|
+
async reviewCode(code, requirements) {
|
|
623
|
+
const res = await this.generate('Review code. Reqs: ' + requirements + ' Code: ' + code.slice(0, 2000), { system: 'Code reviewer', temperature: 0.3 });
|
|
624
|
+
if (!res.success)
|
|
625
|
+
return { success: false, passed: false, error: res.error };
|
|
626
|
+
const passed = !!(res.text?.toUpperCase().includes('PASS') && !res.text?.toUpperCase().includes('FAIL'));
|
|
627
|
+
return { success: true, review: res.text, passed };
|
|
691
628
|
}
|
|
692
|
-
|
|
693
|
-
* Custom LLM API: supports OpenAI-compatible endpoints (OpenAI, Groq, Mistral, etc.)
|
|
694
|
-
* Also supports Anthropic and Google with adapter logic.
|
|
695
|
-
*/
|
|
696
|
-
async askCustomLLM(systemPrompt, userMessage) {
|
|
697
|
-
const provider = this.llmConfig.provider;
|
|
698
|
-
const apiKey = this.llmConfig.apiKey;
|
|
699
|
-
const model = this.llmConfig.model || DEFAULT_MODELS[provider] || 'gpt-4o-mini';
|
|
700
|
-
const baseUrl = this.llmConfig.baseUrl || PROVIDER_URLS[provider];
|
|
701
|
-
if (!apiKey) {
|
|
702
|
-
this.logInternal('LLM API key not configured. Falling back to basic responses.');
|
|
703
|
-
return null;
|
|
704
|
-
}
|
|
629
|
+
async bid(jobId, amount, message) {
|
|
705
630
|
try {
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
}, {
|
|
714
|
-
headers: {
|
|
715
|
-
'x-api-key': apiKey,
|
|
716
|
-
'anthropic-version': '2023-06-01',
|
|
717
|
-
'Content-Type': 'application/json'
|
|
718
|
-
},
|
|
719
|
-
timeout: 30000
|
|
720
|
-
});
|
|
721
|
-
return res.data.content?.[0]?.text || null;
|
|
722
|
-
}
|
|
723
|
-
else if (provider === 'google') {
|
|
724
|
-
// Google Gemini API
|
|
725
|
-
const res = await axios_1.default.post(`${baseUrl}/models/${model}:generateContent?key=${apiKey}`, {
|
|
726
|
-
contents: [{ parts: [{ text: `${systemPrompt}\n\n${userMessage}` }] }]
|
|
727
|
-
}, { timeout: 30000 });
|
|
728
|
-
return res.data.candidates?.[0]?.content?.parts?.[0]?.text || null;
|
|
729
|
-
}
|
|
730
|
-
else {
|
|
731
|
-
// OpenAI-compatible (OpenAI, Groq, Mistral, Custom)
|
|
732
|
-
const res = await axios_1.default.post(`${baseUrl}/chat/completions`, {
|
|
733
|
-
model,
|
|
734
|
-
messages: [
|
|
735
|
-
{ role: 'system', content: systemPrompt },
|
|
736
|
-
{ role: 'user', content: userMessage }
|
|
737
|
-
],
|
|
738
|
-
max_tokens: 1024
|
|
739
|
-
}, {
|
|
740
|
-
headers: {
|
|
741
|
-
'Authorization': `Bearer ${apiKey}`,
|
|
742
|
-
'Content-Type': 'application/json'
|
|
743
|
-
},
|
|
744
|
-
timeout: 30000
|
|
745
|
-
});
|
|
746
|
-
return res.data.choices?.[0]?.message?.content || null;
|
|
631
|
+
const res = await this.api.post('bids', { jobId, amount, message });
|
|
632
|
+
if (res.data.success) {
|
|
633
|
+
this.bidCache.add(jobId);
|
|
634
|
+
// 📡 JOIN ROOM IMMEDIATELY: Allows chatting before assignment
|
|
635
|
+
this.socket?.emit('join_mission', jobId);
|
|
636
|
+
this.logInternal(`Joined telemetry room for pending bid: ${jobId}`);
|
|
637
|
+
this.saveState();
|
|
747
638
|
}
|
|
639
|
+
return res.data;
|
|
748
640
|
}
|
|
749
641
|
catch (e) {
|
|
750
|
-
|
|
751
|
-
return null;
|
|
642
|
+
return { success: false };
|
|
752
643
|
}
|
|
753
644
|
}
|
|
645
|
+
async sendMessage(jobId, content) {
|
|
646
|
+
return this.api.post(`jobs/${jobId}/messages`, { content, senderId: this.agentId });
|
|
647
|
+
}
|
|
648
|
+
async setTyping(jobId, isTyping = true) {
|
|
649
|
+
this.socket?.emit('typing_state', { jobId, isTyping });
|
|
650
|
+
}
|
|
754
651
|
// --- UTILS ---
|
|
755
652
|
enrichJob(raw) {
|
|
756
653
|
return {
|
package/dist/llm.d.ts
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
export interface LLMConfig {
|
|
2
|
+
provider: 'groq' | 'openai' | 'anthropic' | 'google' | 'nvidia';
|
|
3
|
+
apiKey: string;
|
|
4
|
+
baseUrl?: string;
|
|
5
|
+
model?: string;
|
|
6
|
+
}
|
|
7
|
+
export interface LLMResponse {
|
|
8
|
+
success: boolean;
|
|
9
|
+
text?: string;
|
|
10
|
+
error?: string;
|
|
11
|
+
}
|
|
12
|
+
export declare class LLMClient {
|
|
13
|
+
private config;
|
|
14
|
+
constructor(config?: Partial<LLMConfig>);
|
|
15
|
+
private detectProvider;
|
|
16
|
+
private getApiKey;
|
|
17
|
+
generate(prompt: string, options?: {
|
|
18
|
+
system?: string;
|
|
19
|
+
temperature?: number;
|
|
20
|
+
maxTokens?: number;
|
|
21
|
+
model?: string;
|
|
22
|
+
}): Promise<LLMResponse>;
|
|
23
|
+
}
|
|
24
|
+
export default LLMClient;
|
package/dist/llm.js
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.LLMClient = void 0;
|
|
7
|
+
const axios_1 = __importDefault(require("axios"));
|
|
8
|
+
const CONFIGS = {
|
|
9
|
+
groq: { baseUrl: 'https://api.groq.com/openai/v1', defaultModel: 'llama-3.3-70b-versatile' },
|
|
10
|
+
openai: { baseUrl: 'https://api.openai.com/v1', defaultModel: 'gpt-4o-mini' },
|
|
11
|
+
anthropic: { baseUrl: 'https://api.anthropic.com', defaultModel: 'claude-3-sonnet-20240229' },
|
|
12
|
+
google: { baseUrl: 'https://generativelanguage.googleapis.com/v1beta', defaultModel: 'gemini-1.5-flash' },
|
|
13
|
+
nvidia: { baseUrl: 'https://integrate.api.nvidia.com/v1', defaultModel: 'meta/llama-3.1-405b-instruct' },
|
|
14
|
+
};
|
|
15
|
+
class LLMClient {
|
|
16
|
+
constructor(config) {
|
|
17
|
+
const provider = config?.provider || this.detectProvider();
|
|
18
|
+
const apiKey = config?.apiKey || this.getApiKey(provider);
|
|
19
|
+
if (!apiKey)
|
|
20
|
+
throw new Error(`No API key for ${provider}`);
|
|
21
|
+
const preset = CONFIGS[provider];
|
|
22
|
+
this.config = { provider, apiKey, baseUrl: preset.baseUrl, model: preset.defaultModel };
|
|
23
|
+
}
|
|
24
|
+
detectProvider() {
|
|
25
|
+
if (process.env.ANTHROPIC_API_KEY)
|
|
26
|
+
return 'anthropic';
|
|
27
|
+
if (process.env.OPENAI_API_KEY)
|
|
28
|
+
return 'openai';
|
|
29
|
+
if (process.env.GOOGLE_API_KEY)
|
|
30
|
+
return 'google';
|
|
31
|
+
if (process.env.NVIDIA_API_KEY)
|
|
32
|
+
return 'nvidia';
|
|
33
|
+
return 'groq';
|
|
34
|
+
}
|
|
35
|
+
getApiKey(provider) {
|
|
36
|
+
const map = {
|
|
37
|
+
groq: 'GROQ_API_KEY', openai: 'OPENAI_API_KEY', anthropic: 'ANTHROPIC_API_KEY',
|
|
38
|
+
google: 'GOOGLE_API_KEY', nvidia: 'NVIDIA_API_KEY',
|
|
39
|
+
};
|
|
40
|
+
return process.env[map[provider]];
|
|
41
|
+
}
|
|
42
|
+
async generate(prompt, options = {}) {
|
|
43
|
+
const model = options.model || this.config.model;
|
|
44
|
+
const headers = this.config.provider === 'anthropic'
|
|
45
|
+
? { 'x-api-key': this.config.apiKey, 'anthropic-version': '2023-06-01' }
|
|
46
|
+
: { 'Authorization': `Bearer ${this.config.apiKey}` };
|
|
47
|
+
try {
|
|
48
|
+
if (this.config.provider === 'anthropic') {
|
|
49
|
+
const res = await axios_1.default.post(`${this.config.baseUrl}/v1/messages`, {
|
|
50
|
+
model, messages: [{ role: 'user', content: prompt }],
|
|
51
|
+
system: options.system, temperature: options.temperature ?? 0.7, max_tokens: options.maxTokens ?? 2048,
|
|
52
|
+
}, { headers });
|
|
53
|
+
return { success: true, text: res.data.content[0]?.text };
|
|
54
|
+
}
|
|
55
|
+
const res = await axios_1.default.post(`${this.config.baseUrl}/chat/completions`, {
|
|
56
|
+
model, messages: [
|
|
57
|
+
...(options.system ? [{ role: 'system', content: options.system }] : []),
|
|
58
|
+
{ role: 'user', content: prompt }
|
|
59
|
+
],
|
|
60
|
+
temperature: options.temperature ?? 0.7, max_tokens: options.maxTokens ?? 2048,
|
|
61
|
+
}, { headers });
|
|
62
|
+
return { success: true, text: res.data.choices[0]?.message?.content };
|
|
63
|
+
}
|
|
64
|
+
catch (e) {
|
|
65
|
+
return { success: false, error: e.response?.data?.error?.message || e.message };
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
exports.LLMClient = LLMClient;
|
|
70
|
+
exports.default = LLMClient;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "rentabots-sdk",
|
|
3
|
-
"version": "1.7.
|
|
3
|
+
"version": "1.7.6",
|
|
4
4
|
"description": "Official SDK for RentaBots AI Agent Marketplace",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -40,4 +40,4 @@
|
|
|
40
40
|
},
|
|
41
41
|
"author": "",
|
|
42
42
|
"license": "ISC"
|
|
43
|
-
}
|
|
43
|
+
}
|