rentabots-sdk 1.6.0 → 1.6.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -125,8 +125,72 @@ export declare class Agent extends EventEmitter {
125
125
  error: any;
126
126
  jobId?: undefined;
127
127
  }>;
128
+ setProgress(jobId: string, percent: number): Promise<{
129
+ success: boolean;
130
+ error?: string;
131
+ }>;
132
+ createRepo(jobId: string, name?: string): Promise<{
133
+ success: boolean;
134
+ repo?: any;
135
+ error?: string;
136
+ }>;
137
+ getRepo(jobId: string): Promise<{
138
+ success: boolean;
139
+ exists: boolean;
140
+ repo?: any;
141
+ error?: string;
142
+ }>;
143
+ uploadRepoFile(jobId: string, filePath: string, content: string | Buffer, isBlob?: boolean): Promise<{
144
+ success: boolean;
145
+ repoId?: string;
146
+ error?: string;
147
+ }>;
148
+ private isBinaryFile;
128
149
  deliver(jobId: string, files: string[]): Promise<void>;
150
+ verifyDeliverables(jobId: string): Promise<{
151
+ success: boolean;
152
+ verified: boolean;
153
+ files?: any[];
154
+ issues?: string[];
155
+ error?: string;
156
+ }>;
157
+ preDeliveryCheck(jobId: string): Promise<{
158
+ canDeliver: boolean;
159
+ checks: any[];
160
+ }>;
129
161
  markComplete(jobId: string): Promise<any>;
162
+ private llmClient?;
163
+ initLLM(config?: {
164
+ provider?: 'groq' | 'openai' | 'anthropic' | 'google' | 'nvidia';
165
+ apiKey?: string;
166
+ model?: string;
167
+ }): Promise<void>;
168
+ generate(prompt: string, options?: {
169
+ model?: string;
170
+ temperature?: number;
171
+ maxTokens?: number;
172
+ system?: string;
173
+ }): Promise<{
174
+ success: boolean;
175
+ text?: string;
176
+ error?: string;
177
+ }>;
178
+ analyzeRequirements(job: Job): Promise<{
179
+ success: boolean;
180
+ requirements?: any;
181
+ error?: string;
182
+ }>;
183
+ codeGenerator(prompt: string, language?: string): Promise<{
184
+ success: boolean;
185
+ code?: string;
186
+ error?: string;
187
+ }>;
188
+ reviewCode(code: string, requirements: string): Promise<{
189
+ success: boolean;
190
+ review?: string;
191
+ passed: boolean;
192
+ error?: string;
193
+ }>;
130
194
  bid(jobId: string, amount: number, message: string): Promise<any>;
131
195
  sendMessage(jobId: string, content: string): Promise<import("axios").AxiosResponse<any, any, {}>>;
132
196
  setTyping(jobId: string, isTyping?: boolean): Promise<void>;
package/dist/index.js CHANGED
@@ -38,6 +38,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
38
38
  Object.defineProperty(exports, "__esModule", { value: true });
39
39
  exports.Agent = exports.MessageSchema = exports.JobSchema = exports.JobStatusSchema = void 0;
40
40
  const axios_1 = __importDefault(require("axios"));
41
+ const llm_1 = require("./llm");
41
42
  const socket_io_client_1 = require("socket.io-client");
42
43
  const zod_1 = require("zod");
43
44
  const events_1 = require("events");
@@ -70,7 +71,7 @@ exports.MessageSchema = zod_1.z.object({
70
71
  })
71
72
  });
72
73
  // --- CORE SDK ENGINE ---
73
- let SDK_VERSION = '1.5.7'; // BUMP to v1.5.7
74
+ let SDK_VERSION = '1.6.2'; // BUMP to v1.5.7
74
75
  try {
75
76
  const pkg = JSON.parse(fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf8'));
76
77
  SDK_VERSION = pkg.version;
@@ -411,6 +412,66 @@ class Agent extends events_1.EventEmitter {
411
412
  return { success: false, error: e.response?.data?.error || e.message };
412
413
  }
413
414
  }
415
+ // --- 📊 PROGRESS TRACKING ---
416
+ async setProgress(jobId, percent) {
417
+ try {
418
+ const progress = Math.min(100, Math.max(0, Math.round(percent)));
419
+ await this.api.post(`jobs/${jobId}/progress`, { progress });
420
+ const job = this.activeMissions.get(jobId);
421
+ if (job) {
422
+ job.progress = progress;
423
+ this.activeMissions.set(jobId, job);
424
+ this.saveState();
425
+ }
426
+ return { success: true };
427
+ }
428
+ catch (e) {
429
+ return { success: false, error: e.response?.data?.error || e.message };
430
+ }
431
+ }
432
+ // --- 📦 REPO MANAGEMENT ---
433
+ async createRepo(jobId, name) {
434
+ try {
435
+ const res = await this.api.post(`jobs/${jobId}/repo`, { name });
436
+ this.logInternal(`Repo created: ${res.data.repo?.name}`);
437
+ return { success: true, repo: res.data.repo };
438
+ }
439
+ catch (e) {
440
+ return { success: false, error: e.response?.data?.error || e.message };
441
+ }
442
+ }
443
+ async getRepo(jobId) {
444
+ try {
445
+ const res = await this.api.get(`jobs/${jobId}/repo`);
446
+ return { success: true, exists: res.data.exists, repo: res.data.repo };
447
+ }
448
+ catch (e) {
449
+ if (e.response?.status === 404)
450
+ return { success: true, exists: false };
451
+ return { success: false, exists: false, error: e.response?.data?.error || e.message };
452
+ }
453
+ }
454
+ async uploadRepoFile(jobId, filePath, content, isBlob = false) {
455
+ try {
456
+ let finalContent;
457
+ if (Buffer.isBuffer(content)) {
458
+ finalContent = content.toString('base64');
459
+ isBlob = true;
460
+ }
461
+ else {
462
+ finalContent = String(content);
463
+ }
464
+ const res = await this.api.post(`jobs/${jobId}/repo/files`, { path: filePath, content: finalContent, isBlob });
465
+ return { success: true, repoId: res.data.repoId };
466
+ }
467
+ catch (e) {
468
+ return { success: false, error: e.response?.data?.error || e.message };
469
+ }
470
+ }
471
+ isBinaryFile(filePath) {
472
+ const textExts = ['.js', '.ts', '.json', '.md', '.txt', '.html', '.css', '.yml', '.yaml', '.toml'];
473
+ return !textExts.includes(path.extname(filePath).toLowerCase());
474
+ }
414
475
  async deliver(jobId, files) {
415
476
  const cwd = path.join(this.workspaceRoot, jobId);
416
477
  for (const file of files) {
@@ -421,6 +482,43 @@ class Agent extends events_1.EventEmitter {
421
482
  }
422
483
  }
423
484
  }
485
+ // --- ✅ VERIFICATION FLOW ---
486
+ async verifyDeliverables(jobId) {
487
+ try {
488
+ const cwd = path.join(this.workspaceRoot, jobId);
489
+ if (!fs.existsSync(cwd))
490
+ return { success: false, verified: false, error: 'No workspace' };
491
+ const issues = [];
492
+ const files = [];
493
+ const scanDir = (dir, rel = '') => {
494
+ fs.readdirSync(dir).forEach((item) => {
495
+ const full = path.join(dir, item);
496
+ const relPath = path.join(rel, item);
497
+ if (fs.statSync(full).isDirectory())
498
+ scanDir(full, relPath);
499
+ else
500
+ files.push({ path: relPath, size: fs.statSync(full).size });
501
+ });
502
+ };
503
+ scanDir(cwd);
504
+ if (files.length === 0)
505
+ issues.push('No deliverables');
506
+ if (!files.some(f => f.path.toLowerCase().includes('readme')))
507
+ issues.push('Missing README');
508
+ const verified = issues.length === 0;
509
+ return { success: true, verified, files, issues };
510
+ }
511
+ catch (e) {
512
+ return { success: false, verified: false, error: e.message };
513
+ }
514
+ }
515
+ async preDeliveryCheck(jobId) {
516
+ const verify = await this.verifyDeliverables(jobId);
517
+ const checks = [{ name: 'Deliverables', passed: verify.verified, details: verify.files?.length + ' files' }];
518
+ const job = this.activeMissions.get(jobId);
519
+ checks.push({ name: 'Progress', passed: (job?.progress || 0) >= 80, details: (job?.progress || 0) + '%' });
520
+ return { canDeliver: checks.every(c => c.passed), checks };
521
+ }
424
522
  async markComplete(jobId) {
425
523
  const res = await this.api.post(`jobs/${jobId}/complete`, { userId: this.agentId, role: 'agent' });
426
524
  if (res.data.success) {
@@ -432,6 +530,47 @@ class Agent extends events_1.EventEmitter {
432
530
  }
433
531
  return res.data;
434
532
  }
533
+ async initLLM(config) {
534
+ this.llmClient = new llm_1.LLMClient(config);
535
+ }
536
+ async generate(prompt, options = {}) {
537
+ if (!this.llmClient) {
538
+ try {
539
+ this.llmClient = new llm_1.LLMClient();
540
+ }
541
+ catch (e) {
542
+ return { success: false, error: e.message };
543
+ }
544
+ }
545
+ return this.llmClient.generate(prompt, options);
546
+ }
547
+ async analyzeRequirements(job) {
548
+ const prompt = 'Analyze job, return JSON with techStack, features, deliverables, timeline, risks. Title: ' + job.title + ' Desc: ' + job.description.slice(0, 500);
549
+ const res = await this.generate(prompt, { system: 'Extract requirements as JSON', temperature: 0.3 });
550
+ if (!res.success)
551
+ return res;
552
+ try {
553
+ const json = res.text?.match(/\{[^]*\}/)?.[0];
554
+ return { success: true, requirements: JSON.parse(json || '{}') };
555
+ }
556
+ catch {
557
+ return { success: false, error: 'Parse failed' };
558
+ }
559
+ }
560
+ async codeGenerator(prompt, language = 'javascript') {
561
+ const res = await this.generate(prompt, { system: 'Output only ' + language + ' code', temperature: 0.2, maxTokens: 4000 });
562
+ if (!res.success)
563
+ return res;
564
+ const code = res.text?.match(/```(?:\w+)?\n?([\s\S]*?)```/)?.[1] || res.text;
565
+ return { success: true, code: code?.trim() };
566
+ }
567
+ async reviewCode(code, requirements) {
568
+ const res = await this.generate('Review code. Reqs: ' + requirements + ' Code: ' + code.slice(0, 2000), { system: 'Code reviewer', temperature: 0.3 });
569
+ if (!res.success)
570
+ return { success: false, passed: false, error: res.error };
571
+ const passed = !!(res.text?.toUpperCase().includes('PASS') && !res.text?.toUpperCase().includes('FAIL'));
572
+ return { success: true, review: res.text, passed };
573
+ }
435
574
  async bid(jobId, amount, message) {
436
575
  try {
437
576
  const res = await this.api.post('bids', { jobId, amount, message });
package/dist/llm.d.ts ADDED
@@ -0,0 +1,34 @@
1
+ /**
2
+ * LLM Provider Configuration
3
+ * Agents use their own API keys - no server proxy needed
4
+ */
5
+ export interface LLMConfig {
6
+ provider: 'groq' | 'openai' | 'anthropic' | 'google' | 'nvidia';
7
+ apiKey: string;
8
+ baseUrl?: string;
9
+ model?: string;
10
+ }
11
+ export interface LLMResponse {
12
+ success: boolean;
13
+ text?: string;
14
+ error?: string;
15
+ usage?: {
16
+ promptTokens: number;
17
+ completionTokens: number;
18
+ };
19
+ }
20
+ export declare class LLMClient {
21
+ private config;
22
+ private client;
23
+ constructor(config?: Partial<LLMConfig>);
24
+ private detectProvider;
25
+ private getApiKey;
26
+ generate(prompt: string, options?: {
27
+ system?: string;
28
+ temperature?: number;
29
+ maxTokens?: number;
30
+ model?: string;
31
+ }): Promise<LLMResponse>;
32
+ private callAnthropic;
33
+ }
34
+ export default LLMClient;
package/dist/llm.js ADDED
@@ -0,0 +1,129 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.LLMClient = void 0;
7
+ const axios_1 = __importDefault(require("axios"));
8
+ const PROVIDER_CONFIGS = {
9
+ groq: {
10
+ baseUrl: 'https://api.groq.com/openai/v1',
11
+ defaultModel: 'llama-3.3-70b-versatile',
12
+ apiFormat: 'openai'
13
+ },
14
+ openai: {
15
+ baseUrl: 'https://api.openai.com/v1',
16
+ defaultModel: 'gpt-4o-mini',
17
+ apiFormat: 'openai'
18
+ },
19
+ anthropic: {
20
+ baseUrl: 'https://api.anthropic.com',
21
+ defaultModel: 'claude-3-sonnet-20240229',
22
+ apiFormat: 'anthropic'
23
+ },
24
+ google: {
25
+ baseUrl: 'https://generativelanguage.googleapis.com/v1beta',
26
+ defaultModel: 'gemini-1.5-flash',
27
+ apiFormat: 'openai'
28
+ },
29
+ nvidia: {
30
+ baseUrl: 'https://integrate.api.nvidia.com/v1',
31
+ defaultModel: 'meta/llama-3.1-405b-instruct',
32
+ apiFormat: 'openai'
33
+ }
34
+ };
35
+ class LLMClient {
36
+ constructor(config) {
37
+ // Auto-detect from environment
38
+ const provider = config?.provider || this.detectProvider();
39
+ const apiKey = config?.apiKey || this.getApiKey(provider);
40
+ if (!apiKey) {
41
+ throw new Error(`No API key found for LLM provider '${provider}'. Set ${provider.toUpperCase()}_API_KEY or GROQ_API_KEY`);
42
+ }
43
+ const preset = PROVIDER_CONFIGS[provider];
44
+ this.config = {
45
+ provider,
46
+ apiKey,
47
+ baseUrl: config?.baseUrl || preset?.baseUrl,
48
+ model: config?.model || preset?.defaultModel
49
+ };
50
+ this.client = axios_1.default.create({
51
+ baseURL: this.config.baseUrl,
52
+ headers: provider === 'anthropic'
53
+ ? { 'x-api-key': apiKey, 'anthropic-version': '2023-06-01' }
54
+ : { 'Authorization': `Bearer ${apiKey}` }
55
+ });
56
+ }
57
+ detectProvider() {
58
+ if (process.env.ANTHROPIC_API_KEY)
59
+ return 'anthropic';
60
+ if (process.env.OPENAI_API_KEY)
61
+ return 'openai';
62
+ if (process.env.GOOGLE_API_KEY)
63
+ return 'google';
64
+ if (process.env.NVIDIA_API_KEY)
65
+ return 'nvidia';
66
+ return 'groq'; // Default
67
+ }
68
+ getApiKey(provider) {
69
+ const envMap = {
70
+ groq: 'GROQ_API_KEY',
71
+ openai: 'OPENAI_API_KEY',
72
+ anthropic: 'ANTHROPIC_API_KEY',
73
+ google: 'GOOGLE_API_KEY',
74
+ nvidia: 'NVIDIA_API_KEY'
75
+ };
76
+ return process.env[envMap[provider]] || process.env.GROQ_API_KEY || process.env.RENTABOTS_LLM_KEY;
77
+ }
78
+ async generate(prompt, options = {}) {
79
+ const model = options.model || this.config.model;
80
+ try {
81
+ if (this.config.provider === 'anthropic') {
82
+ return await this.callAnthropic(prompt, options, model);
83
+ }
84
+ // OpenAI-compatible format
85
+ const res = await this.client.post('/chat/completions', {
86
+ model,
87
+ messages: [
88
+ ...(options.system ? [{ role: 'system', content: options.system }] : []),
89
+ { role: 'user', content: prompt }
90
+ ],
91
+ temperature: options.temperature ?? 0.7,
92
+ max_tokens: options.maxTokens ?? 2048
93
+ });
94
+ return {
95
+ success: true,
96
+ text: res.data.choices[0]?.message?.content,
97
+ usage: res.data.usage ? {
98
+ promptTokens: res.data.usage.prompt_tokens,
99
+ completionTokens: res.data.usage.completion_tokens
100
+ } : undefined
101
+ };
102
+ }
103
+ catch (error) {
104
+ return {
105
+ success: false,
106
+ error: error.response?.data?.error?.message || error.message
107
+ };
108
+ }
109
+ }
110
+ async callAnthropic(prompt, options, model) {
111
+ const res = await this.client.post('/v1/messages', {
112
+ model,
113
+ messages: [{ role: 'user', content: prompt }],
114
+ system: options.system,
115
+ temperature: options.temperature ?? 0.7,
116
+ max_tokens: options.maxTokens ?? 2048
117
+ });
118
+ return {
119
+ success: true,
120
+ text: res.data.content[0]?.text,
121
+ usage: {
122
+ promptTokens: res.data.usage?.input_tokens || 0,
123
+ completionTokens: res.data.usage?.output_tokens || 0
124
+ }
125
+ };
126
+ }
127
+ }
128
+ exports.LLMClient = LLMClient;
129
+ exports.default = LLMClient;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "rentabots-sdk",
3
- "version": "1.6.0",
3
+ "version": "1.6.4",
4
4
  "description": "Official SDK for RentaBots AI Agent Marketplace",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",