codeep 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +576 -0
- package/dist/api/index.d.ts +8 -0
- package/dist/api/index.js +421 -0
- package/dist/app.d.ts +2 -0
- package/dist/app.js +1406 -0
- package/dist/components/AgentProgress.d.ts +33 -0
- package/dist/components/AgentProgress.js +97 -0
- package/dist/components/Export.d.ts +8 -0
- package/dist/components/Export.js +27 -0
- package/dist/components/Help.d.ts +2 -0
- package/dist/components/Help.js +3 -0
- package/dist/components/Input.d.ts +9 -0
- package/dist/components/Input.js +89 -0
- package/dist/components/Loading.d.ts +9 -0
- package/dist/components/Loading.js +31 -0
- package/dist/components/Login.d.ts +7 -0
- package/dist/components/Login.js +77 -0
- package/dist/components/Logo.d.ts +8 -0
- package/dist/components/Logo.js +89 -0
- package/dist/components/LogoutPicker.d.ts +8 -0
- package/dist/components/LogoutPicker.js +61 -0
- package/dist/components/Message.d.ts +10 -0
- package/dist/components/Message.js +234 -0
- package/dist/components/MessageList.d.ts +10 -0
- package/dist/components/MessageList.js +8 -0
- package/dist/components/ProjectPermission.d.ts +7 -0
- package/dist/components/ProjectPermission.js +52 -0
- package/dist/components/Search.d.ts +10 -0
- package/dist/components/Search.js +30 -0
- package/dist/components/SessionPicker.d.ts +9 -0
- package/dist/components/SessionPicker.js +88 -0
- package/dist/components/Sessions.d.ts +12 -0
- package/dist/components/Sessions.js +102 -0
- package/dist/components/Settings.d.ts +7 -0
- package/dist/components/Settings.js +162 -0
- package/dist/components/Status.d.ts +2 -0
- package/dist/components/Status.js +12 -0
- package/dist/config/config.test.d.ts +1 -0
- package/dist/config/config.test.js +157 -0
- package/dist/config/index.d.ts +121 -0
- package/dist/config/index.js +555 -0
- package/dist/config/providers.d.ts +43 -0
- package/dist/config/providers.js +82 -0
- package/dist/config/providers.test.d.ts +1 -0
- package/dist/config/providers.test.js +132 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +38 -0
- package/dist/utils/agent.d.ts +37 -0
- package/dist/utils/agent.js +627 -0
- package/dist/utils/codeReview.d.ts +36 -0
- package/dist/utils/codeReview.js +390 -0
- package/dist/utils/context.d.ts +49 -0
- package/dist/utils/context.js +216 -0
- package/dist/utils/diffPreview.d.ts +57 -0
- package/dist/utils/diffPreview.js +335 -0
- package/dist/utils/export.d.ts +19 -0
- package/dist/utils/export.js +94 -0
- package/dist/utils/git.d.ts +85 -0
- package/dist/utils/git.js +399 -0
- package/dist/utils/git.test.d.ts +1 -0
- package/dist/utils/git.test.js +193 -0
- package/dist/utils/history.d.ts +93 -0
- package/dist/utils/history.js +348 -0
- package/dist/utils/interactive.d.ts +34 -0
- package/dist/utils/interactive.js +206 -0
- package/dist/utils/keychain.d.ts +17 -0
- package/dist/utils/keychain.js +160 -0
- package/dist/utils/learning.d.ts +89 -0
- package/dist/utils/learning.js +330 -0
- package/dist/utils/logger.d.ts +33 -0
- package/dist/utils/logger.js +130 -0
- package/dist/utils/project.d.ts +86 -0
- package/dist/utils/project.js +415 -0
- package/dist/utils/project.test.d.ts +1 -0
- package/dist/utils/project.test.js +212 -0
- package/dist/utils/ratelimit.d.ts +26 -0
- package/dist/utils/ratelimit.js +132 -0
- package/dist/utils/ratelimit.test.d.ts +1 -0
- package/dist/utils/ratelimit.test.js +131 -0
- package/dist/utils/retry.d.ts +28 -0
- package/dist/utils/retry.js +109 -0
- package/dist/utils/retry.test.d.ts +1 -0
- package/dist/utils/retry.test.js +163 -0
- package/dist/utils/search.d.ts +11 -0
- package/dist/utils/search.js +29 -0
- package/dist/utils/shell.d.ts +45 -0
- package/dist/utils/shell.js +242 -0
- package/dist/utils/skills.d.ts +144 -0
- package/dist/utils/skills.js +1137 -0
- package/dist/utils/smartContext.d.ts +29 -0
- package/dist/utils/smartContext.js +441 -0
- package/dist/utils/tools.d.ts +224 -0
- package/dist/utils/tools.js +731 -0
- package/dist/utils/update.d.ts +22 -0
- package/dist/utils/update.js +128 -0
- package/dist/utils/validation.d.ts +28 -0
- package/dist/utils/validation.js +141 -0
- package/dist/utils/validation.test.d.ts +1 -0
- package/dist/utils/validation.test.js +164 -0
- package/dist/utils/verify.d.ts +78 -0
- package/dist/utils/verify.js +464 -0
- package/package.json +68 -0
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import { mkdirSync, rmSync, writeFileSync } from 'fs';
|
|
3
|
+
import { join } from 'path';
|
|
4
|
+
import { tmpdir } from 'os';
|
|
5
|
+
import { isProjectDirectory, getProjectType, scanDirectory, generateTreeStructure, readProjectFile, deleteProjectFile, writeProjectFile, } from './project';
|
|
6
|
+
const TEST_DIR = join(tmpdir(), 'codeep-project-test-' + Date.now());
|
|
7
|
+
describe('project utilities', () => {
|
|
8
|
+
beforeEach(() => {
|
|
9
|
+
mkdirSync(TEST_DIR, { recursive: true });
|
|
10
|
+
});
|
|
11
|
+
afterEach(() => {
|
|
12
|
+
try {
|
|
13
|
+
rmSync(TEST_DIR, { recursive: true, force: true });
|
|
14
|
+
}
|
|
15
|
+
catch { }
|
|
16
|
+
});
|
|
17
|
+
describe('isProjectDirectory', () => {
|
|
18
|
+
it('should return true for directory with package.json', () => {
|
|
19
|
+
writeFileSync(join(TEST_DIR, 'package.json'), '{}');
|
|
20
|
+
expect(isProjectDirectory(TEST_DIR)).toBe(true);
|
|
21
|
+
});
|
|
22
|
+
it('should return true for directory with Cargo.toml', () => {
|
|
23
|
+
writeFileSync(join(TEST_DIR, 'Cargo.toml'), '[package]');
|
|
24
|
+
expect(isProjectDirectory(TEST_DIR)).toBe(true);
|
|
25
|
+
});
|
|
26
|
+
it('should return true for directory with go.mod', () => {
|
|
27
|
+
writeFileSync(join(TEST_DIR, 'go.mod'), 'module test');
|
|
28
|
+
expect(isProjectDirectory(TEST_DIR)).toBe(true);
|
|
29
|
+
});
|
|
30
|
+
it('should return true for directory with .git', () => {
|
|
31
|
+
mkdirSync(join(TEST_DIR, '.git'), { recursive: true });
|
|
32
|
+
expect(isProjectDirectory(TEST_DIR)).toBe(true);
|
|
33
|
+
});
|
|
34
|
+
it('should return false for empty directory', () => {
|
|
35
|
+
expect(isProjectDirectory(TEST_DIR)).toBe(false);
|
|
36
|
+
});
|
|
37
|
+
});
|
|
38
|
+
describe('getProjectType', () => {
|
|
39
|
+
it('should detect TypeScript project', () => {
|
|
40
|
+
writeFileSync(join(TEST_DIR, 'package.json'), JSON.stringify({
|
|
41
|
+
devDependencies: { typescript: '^5.0.0' }
|
|
42
|
+
}));
|
|
43
|
+
expect(getProjectType(TEST_DIR)).toBe('TypeScript/Node.js');
|
|
44
|
+
});
|
|
45
|
+
it('should detect TypeScript project with tsconfig', () => {
|
|
46
|
+
writeFileSync(join(TEST_DIR, 'package.json'), '{}');
|
|
47
|
+
writeFileSync(join(TEST_DIR, 'tsconfig.json'), '{}');
|
|
48
|
+
expect(getProjectType(TEST_DIR)).toBe('TypeScript/Node.js');
|
|
49
|
+
});
|
|
50
|
+
it('should detect JavaScript project', () => {
|
|
51
|
+
writeFileSync(join(TEST_DIR, 'package.json'), '{}');
|
|
52
|
+
expect(getProjectType(TEST_DIR)).toBe('JavaScript/Node.js');
|
|
53
|
+
});
|
|
54
|
+
it('should detect Rust project', () => {
|
|
55
|
+
writeFileSync(join(TEST_DIR, 'Cargo.toml'), '[package]');
|
|
56
|
+
expect(getProjectType(TEST_DIR)).toBe('Rust');
|
|
57
|
+
});
|
|
58
|
+
it('should detect Go project', () => {
|
|
59
|
+
writeFileSync(join(TEST_DIR, 'go.mod'), 'module test');
|
|
60
|
+
expect(getProjectType(TEST_DIR)).toBe('Go');
|
|
61
|
+
});
|
|
62
|
+
it('should detect Python project', () => {
|
|
63
|
+
writeFileSync(join(TEST_DIR, 'requirements.txt'), 'flask');
|
|
64
|
+
expect(getProjectType(TEST_DIR)).toBe('Python');
|
|
65
|
+
});
|
|
66
|
+
it('should return Unknown for unrecognized project', () => {
|
|
67
|
+
expect(getProjectType(TEST_DIR)).toBe('Unknown');
|
|
68
|
+
});
|
|
69
|
+
});
|
|
70
|
+
describe('scanDirectory', () => {
|
|
71
|
+
it('should scan files in directory', () => {
|
|
72
|
+
writeFileSync(join(TEST_DIR, 'index.ts'), 'export {}');
|
|
73
|
+
writeFileSync(join(TEST_DIR, 'utils.js'), '// utils');
|
|
74
|
+
writeFileSync(join(TEST_DIR, 'package.json'), '{}');
|
|
75
|
+
const files = scanDirectory(TEST_DIR);
|
|
76
|
+
const names = files.map(f => f.name);
|
|
77
|
+
expect(names).toContain('index.ts');
|
|
78
|
+
expect(names).toContain('utils.js');
|
|
79
|
+
expect(names).toContain('package.json');
|
|
80
|
+
});
|
|
81
|
+
it('should ignore node_modules', () => {
|
|
82
|
+
mkdirSync(join(TEST_DIR, 'node_modules'), { recursive: true });
|
|
83
|
+
writeFileSync(join(TEST_DIR, 'node_modules', 'dep.js'), '// dep');
|
|
84
|
+
writeFileSync(join(TEST_DIR, 'index.ts'), 'export {}');
|
|
85
|
+
const files = scanDirectory(TEST_DIR);
|
|
86
|
+
const paths = files.map(f => f.relativePath);
|
|
87
|
+
expect(paths).not.toContain('node_modules/dep.js');
|
|
88
|
+
expect(paths.some(p => p.includes('node_modules'))).toBe(false);
|
|
89
|
+
});
|
|
90
|
+
it('should ignore .git directory', () => {
|
|
91
|
+
mkdirSync(join(TEST_DIR, '.git'), { recursive: true });
|
|
92
|
+
writeFileSync(join(TEST_DIR, '.git', 'config'), '# git config');
|
|
93
|
+
const files = scanDirectory(TEST_DIR);
|
|
94
|
+
const paths = files.map(f => f.relativePath);
|
|
95
|
+
expect(paths.some(p => p.includes('.git'))).toBe(false);
|
|
96
|
+
});
|
|
97
|
+
it('should respect maxDepth', () => {
|
|
98
|
+
mkdirSync(join(TEST_DIR, 'a', 'b', 'c', 'd'), { recursive: true });
|
|
99
|
+
writeFileSync(join(TEST_DIR, 'a', 'b', 'c', 'd', 'deep.ts'), '// deep');
|
|
100
|
+
writeFileSync(join(TEST_DIR, 'a', 'shallow.ts'), '// shallow');
|
|
101
|
+
const files = scanDirectory(TEST_DIR, 2);
|
|
102
|
+
const names = files.map(f => f.name);
|
|
103
|
+
expect(names).toContain('shallow.ts');
|
|
104
|
+
expect(names).not.toContain('deep.ts');
|
|
105
|
+
});
|
|
106
|
+
it('should include directories in results', () => {
|
|
107
|
+
mkdirSync(join(TEST_DIR, 'src'), { recursive: true });
|
|
108
|
+
writeFileSync(join(TEST_DIR, 'src', 'index.ts'), 'export {}');
|
|
109
|
+
const files = scanDirectory(TEST_DIR);
|
|
110
|
+
const dirs = files.filter(f => f.isDirectory);
|
|
111
|
+
expect(dirs.some(d => d.name === 'src')).toBe(true);
|
|
112
|
+
});
|
|
113
|
+
});
|
|
114
|
+
describe('generateTreeStructure', () => {
|
|
115
|
+
it('should generate tree structure', () => {
|
|
116
|
+
mkdirSync(join(TEST_DIR, 'src'), { recursive: true });
|
|
117
|
+
writeFileSync(join(TEST_DIR, 'src', 'index.ts'), 'export {}');
|
|
118
|
+
writeFileSync(join(TEST_DIR, 'package.json'), '{}');
|
|
119
|
+
const files = scanDirectory(TEST_DIR);
|
|
120
|
+
const tree = generateTreeStructure(files);
|
|
121
|
+
expect(tree).toContain('src/');
|
|
122
|
+
expect(tree).toContain('index.ts');
|
|
123
|
+
expect(tree).toContain('package.json');
|
|
124
|
+
});
|
|
125
|
+
it('should truncate when exceeding maxLines', () => {
|
|
126
|
+
// Create many files
|
|
127
|
+
for (let i = 0; i < 50; i++) {
|
|
128
|
+
writeFileSync(join(TEST_DIR, `file${i}.ts`), '// file');
|
|
129
|
+
}
|
|
130
|
+
const files = scanDirectory(TEST_DIR);
|
|
131
|
+
const tree = generateTreeStructure(files, 10);
|
|
132
|
+
// The function uses "(+N more)" format for truncation
|
|
133
|
+
expect(tree).toContain('more');
|
|
134
|
+
});
|
|
135
|
+
});
|
|
136
|
+
describe('readProjectFile', () => {
|
|
137
|
+
it('should read file content', () => {
|
|
138
|
+
const content = 'export const hello = "world";';
|
|
139
|
+
writeFileSync(join(TEST_DIR, 'test.ts'), content);
|
|
140
|
+
const result = readProjectFile(join(TEST_DIR, 'test.ts'));
|
|
141
|
+
expect(result).not.toBeNull();
|
|
142
|
+
expect(result.content).toBe(content);
|
|
143
|
+
expect(result.truncated).toBe(false);
|
|
144
|
+
});
|
|
145
|
+
it('should return null for non-existent file', () => {
|
|
146
|
+
const result = readProjectFile(join(TEST_DIR, 'nonexistent.ts'));
|
|
147
|
+
expect(result).toBeNull();
|
|
148
|
+
});
|
|
149
|
+
it('should return null for directories', () => {
|
|
150
|
+
mkdirSync(join(TEST_DIR, 'subdir'), { recursive: true });
|
|
151
|
+
const result = readProjectFile(join(TEST_DIR, 'subdir'));
|
|
152
|
+
expect(result).toBeNull();
|
|
153
|
+
});
|
|
154
|
+
it('should truncate large files', () => {
|
|
155
|
+
// File size is 60000 bytes, maxSize is 50000
|
|
156
|
+
// Function skips files > maxSize * 2 (100000), so 60000 should be read and truncated
|
|
157
|
+
const largeContent = 'x'.repeat(60000);
|
|
158
|
+
writeFileSync(join(TEST_DIR, 'large.ts'), largeContent);
|
|
159
|
+
const result = readProjectFile(join(TEST_DIR, 'large.ts'), 50000);
|
|
160
|
+
expect(result).not.toBeNull();
|
|
161
|
+
expect(result.truncated).toBe(true);
|
|
162
|
+
expect(result.content.length).toBeLessThan(largeContent.length);
|
|
163
|
+
expect(result.content).toContain('truncated');
|
|
164
|
+
});
|
|
165
|
+
it('should skip very large files', () => {
|
|
166
|
+
const hugeContent = 'x'.repeat(200000);
|
|
167
|
+
writeFileSync(join(TEST_DIR, 'huge.ts'), hugeContent);
|
|
168
|
+
const result = readProjectFile(join(TEST_DIR, 'huge.ts'), 50000);
|
|
169
|
+
expect(result).toBeNull();
|
|
170
|
+
});
|
|
171
|
+
});
|
|
172
|
+
describe('writeProjectFile', () => {
|
|
173
|
+
it('should write file content', () => {
|
|
174
|
+
const filePath = join(TEST_DIR, 'output.ts');
|
|
175
|
+
const content = 'export const test = true;';
|
|
176
|
+
const result = writeProjectFile(filePath, content);
|
|
177
|
+
expect(result.success).toBe(true);
|
|
178
|
+
const written = readProjectFile(filePath);
|
|
179
|
+
expect(written.content).toBe(content);
|
|
180
|
+
});
|
|
181
|
+
it('should create parent directories', () => {
|
|
182
|
+
const filePath = join(TEST_DIR, 'new', 'nested', 'dir', 'file.ts');
|
|
183
|
+
const content = '// nested file';
|
|
184
|
+
const result = writeProjectFile(filePath, content);
|
|
185
|
+
expect(result.success).toBe(true);
|
|
186
|
+
const written = readProjectFile(filePath);
|
|
187
|
+
expect(written.content).toBe(content);
|
|
188
|
+
});
|
|
189
|
+
it('should overwrite existing file', () => {
|
|
190
|
+
const filePath = join(TEST_DIR, 'existing.ts');
|
|
191
|
+
writeFileSync(filePath, 'old content');
|
|
192
|
+
const result = writeProjectFile(filePath, 'new content');
|
|
193
|
+
expect(result.success).toBe(true);
|
|
194
|
+
const written = readProjectFile(filePath);
|
|
195
|
+
expect(written.content).toBe('new content');
|
|
196
|
+
});
|
|
197
|
+
});
|
|
198
|
+
describe('deleteProjectFile', () => {
|
|
199
|
+
it('should delete existing file', () => {
|
|
200
|
+
const filePath = join(TEST_DIR, 'to-delete.ts');
|
|
201
|
+
writeFileSync(filePath, 'delete me');
|
|
202
|
+
const result = deleteProjectFile(filePath);
|
|
203
|
+
expect(result.success).toBe(true);
|
|
204
|
+
expect(readProjectFile(filePath)).toBeNull();
|
|
205
|
+
});
|
|
206
|
+
it('should return error for non-existent file', () => {
|
|
207
|
+
const result = deleteProjectFile(join(TEST_DIR, 'nonexistent.ts'));
|
|
208
|
+
expect(result.success).toBe(false);
|
|
209
|
+
expect(result.error).toContain('does not exist');
|
|
210
|
+
});
|
|
211
|
+
});
|
|
212
|
+
});
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Rate limiting utility to prevent API abuse
|
|
3
|
+
*/
|
|
4
|
+
/**
|
|
5
|
+
* Update rate limiters with new config values
|
|
6
|
+
*/
|
|
7
|
+
export declare function updateRateLimits(): void;
|
|
8
|
+
export declare function checkApiRateLimit(): {
|
|
9
|
+
allowed: boolean;
|
|
10
|
+
message?: string;
|
|
11
|
+
};
|
|
12
|
+
export declare function checkCommandRateLimit(): {
|
|
13
|
+
allowed: boolean;
|
|
14
|
+
message?: string;
|
|
15
|
+
};
|
|
16
|
+
export declare function resetRateLimits(): void;
|
|
17
|
+
export declare function getRateLimitStatus(): {
|
|
18
|
+
api: {
|
|
19
|
+
count: number;
|
|
20
|
+
limit: number;
|
|
21
|
+
};
|
|
22
|
+
commands: {
|
|
23
|
+
count: number;
|
|
24
|
+
limit: number;
|
|
25
|
+
};
|
|
26
|
+
};
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Rate limiting utility to prevent API abuse
|
|
3
|
+
*/
|
|
4
|
+
import { config } from '../config';
|
|
5
|
+
class RateLimiter {
|
|
6
|
+
requests = [];
|
|
7
|
+
config;
|
|
8
|
+
constructor(config) {
|
|
9
|
+
this.config = config;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Get configuration
|
|
13
|
+
*/
|
|
14
|
+
getConfig() {
|
|
15
|
+
return this.config;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Check if request is allowed
|
|
19
|
+
*/
|
|
20
|
+
isAllowed() {
|
|
21
|
+
const now = Date.now();
|
|
22
|
+
const windowStart = now - this.config.windowMs;
|
|
23
|
+
// Remove old requests outside the window
|
|
24
|
+
this.requests = this.requests.filter(timestamp => timestamp > windowStart);
|
|
25
|
+
// Check if we're under the limit
|
|
26
|
+
if (this.requests.length >= this.config.maxRequests) {
|
|
27
|
+
return false;
|
|
28
|
+
}
|
|
29
|
+
// Add current request
|
|
30
|
+
this.requests.push(now);
|
|
31
|
+
return true;
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Get time until next request is allowed (in ms)
|
|
35
|
+
*/
|
|
36
|
+
getRetryAfter() {
|
|
37
|
+
if (this.requests.length === 0)
|
|
38
|
+
return 0;
|
|
39
|
+
const now = Date.now();
|
|
40
|
+
const oldestRequest = this.requests[0];
|
|
41
|
+
const windowStart = now - this.config.windowMs;
|
|
42
|
+
if (oldestRequest > windowStart) {
|
|
43
|
+
// Still in window, calculate wait time
|
|
44
|
+
return oldestRequest + this.config.windowMs - now;
|
|
45
|
+
}
|
|
46
|
+
return 0;
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Reset the rate limiter
|
|
50
|
+
*/
|
|
51
|
+
reset() {
|
|
52
|
+
this.requests = [];
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Get current request count in window
|
|
56
|
+
*/
|
|
57
|
+
getRequestCount() {
|
|
58
|
+
const now = Date.now();
|
|
59
|
+
const windowStart = now - this.config.windowMs;
|
|
60
|
+
return this.requests.filter(timestamp => timestamp > windowStart).length;
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Get formatted retry message
|
|
64
|
+
*/
|
|
65
|
+
getRetryMessage() {
|
|
66
|
+
const retryAfter = this.getRetryAfter();
|
|
67
|
+
if (retryAfter === 0)
|
|
68
|
+
return '';
|
|
69
|
+
const seconds = Math.ceil(retryAfter / 1000);
|
|
70
|
+
if (seconds < 60) {
|
|
71
|
+
return `Please wait ${seconds} second${seconds > 1 ? 's' : ''}`;
|
|
72
|
+
}
|
|
73
|
+
const minutes = Math.ceil(seconds / 60);
|
|
74
|
+
return `Please wait ${minutes} minute${minutes > 1 ? 's' : ''}`;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
// Default rate limiters - configs are loaded from user settings
|
|
78
|
+
let apiRateLimiter = new RateLimiter({
|
|
79
|
+
maxRequests: config.get('rateLimitApi') || 30,
|
|
80
|
+
windowMs: 60 * 1000, // per minute
|
|
81
|
+
});
|
|
82
|
+
let commandRateLimiter = new RateLimiter({
|
|
83
|
+
maxRequests: config.get('rateLimitCommands') || 100,
|
|
84
|
+
windowMs: 60 * 1000, // per minute
|
|
85
|
+
});
|
|
86
|
+
/**
|
|
87
|
+
* Update rate limiters with new config values
|
|
88
|
+
*/
|
|
89
|
+
export function updateRateLimits() {
|
|
90
|
+
apiRateLimiter = new RateLimiter({
|
|
91
|
+
maxRequests: config.get('rateLimitApi') || 30,
|
|
92
|
+
windowMs: 60 * 1000,
|
|
93
|
+
});
|
|
94
|
+
commandRateLimiter = new RateLimiter({
|
|
95
|
+
maxRequests: config.get('rateLimitCommands') || 100,
|
|
96
|
+
windowMs: 60 * 1000,
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
export function checkApiRateLimit() {
|
|
100
|
+
if (apiRateLimiter.isAllowed()) {
|
|
101
|
+
return { allowed: true };
|
|
102
|
+
}
|
|
103
|
+
return {
|
|
104
|
+
allowed: false,
|
|
105
|
+
message: `Rate limit exceeded (${apiRateLimiter.getConfig().maxRequests}/min). ${apiRateLimiter.getRetryMessage()}.`,
|
|
106
|
+
};
|
|
107
|
+
}
|
|
108
|
+
export function checkCommandRateLimit() {
|
|
109
|
+
if (commandRateLimiter.isAllowed()) {
|
|
110
|
+
return { allowed: true };
|
|
111
|
+
}
|
|
112
|
+
return {
|
|
113
|
+
allowed: false,
|
|
114
|
+
message: `Too many commands. ${commandRateLimiter.getRetryMessage()}.`,
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
export function resetRateLimits() {
|
|
118
|
+
apiRateLimiter.reset();
|
|
119
|
+
commandRateLimiter.reset();
|
|
120
|
+
}
|
|
121
|
+
export function getRateLimitStatus() {
|
|
122
|
+
return {
|
|
123
|
+
api: {
|
|
124
|
+
count: apiRateLimiter.getRequestCount(),
|
|
125
|
+
limit: apiRateLimiter.getConfig().maxRequests,
|
|
126
|
+
},
|
|
127
|
+
commands: {
|
|
128
|
+
count: commandRateLimiter.getRequestCount(),
|
|
129
|
+
limit: commandRateLimiter.getConfig().maxRequests,
|
|
130
|
+
},
|
|
131
|
+
};
|
|
132
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
|
2
|
+
import { checkApiRateLimit, checkCommandRateLimit, resetRateLimits, getRateLimitStatus, } from './ratelimit';
|
|
3
|
+
describe('ratelimit utilities', () => {
|
|
4
|
+
beforeEach(() => {
|
|
5
|
+
// Reset rate limiters before each test
|
|
6
|
+
resetRateLimits();
|
|
7
|
+
});
|
|
8
|
+
describe('checkApiRateLimit', () => {
|
|
9
|
+
it('should allow requests under limit', () => {
|
|
10
|
+
const result = checkApiRateLimit();
|
|
11
|
+
expect(result.allowed).toBe(true);
|
|
12
|
+
expect(result.message).toBeUndefined();
|
|
13
|
+
});
|
|
14
|
+
it('should track request count', () => {
|
|
15
|
+
// Make some requests
|
|
16
|
+
checkApiRateLimit();
|
|
17
|
+
checkApiRateLimit();
|
|
18
|
+
checkApiRateLimit();
|
|
19
|
+
const status = getRateLimitStatus();
|
|
20
|
+
expect(status.api.count).toBe(3);
|
|
21
|
+
});
|
|
22
|
+
it('should block requests over limit', () => {
|
|
23
|
+
// Make requests up to the limit (default 30)
|
|
24
|
+
for (let i = 0; i < 30; i++) {
|
|
25
|
+
const result = checkApiRateLimit();
|
|
26
|
+
expect(result.allowed).toBe(true);
|
|
27
|
+
}
|
|
28
|
+
// Next request should be blocked
|
|
29
|
+
const result = checkApiRateLimit();
|
|
30
|
+
expect(result.allowed).toBe(false);
|
|
31
|
+
expect(result.message).toContain('Rate limit exceeded');
|
|
32
|
+
});
|
|
33
|
+
it('should include retry message when blocked', () => {
|
|
34
|
+
// Fill up the limit
|
|
35
|
+
for (let i = 0; i < 30; i++) {
|
|
36
|
+
checkApiRateLimit();
|
|
37
|
+
}
|
|
38
|
+
const result = checkApiRateLimit();
|
|
39
|
+
expect(result.message).toContain('Please wait');
|
|
40
|
+
});
|
|
41
|
+
});
|
|
42
|
+
describe('checkCommandRateLimit', () => {
|
|
43
|
+
it('should allow commands under limit', () => {
|
|
44
|
+
const result = checkCommandRateLimit();
|
|
45
|
+
expect(result.allowed).toBe(true);
|
|
46
|
+
});
|
|
47
|
+
it('should track command count', () => {
|
|
48
|
+
checkCommandRateLimit();
|
|
49
|
+
checkCommandRateLimit();
|
|
50
|
+
const status = getRateLimitStatus();
|
|
51
|
+
expect(status.commands.count).toBe(2);
|
|
52
|
+
});
|
|
53
|
+
it('should block commands over limit', () => {
|
|
54
|
+
// Make commands up to the limit (default 100)
|
|
55
|
+
for (let i = 0; i < 100; i++) {
|
|
56
|
+
checkCommandRateLimit();
|
|
57
|
+
}
|
|
58
|
+
const result = checkCommandRateLimit();
|
|
59
|
+
expect(result.allowed).toBe(false);
|
|
60
|
+
expect(result.message).toContain('Too many commands');
|
|
61
|
+
});
|
|
62
|
+
});
|
|
63
|
+
describe('resetRateLimits', () => {
|
|
64
|
+
it('should reset all counters', () => {
|
|
65
|
+
// Make some requests
|
|
66
|
+
checkApiRateLimit();
|
|
67
|
+
checkApiRateLimit();
|
|
68
|
+
checkCommandRateLimit();
|
|
69
|
+
checkCommandRateLimit();
|
|
70
|
+
checkCommandRateLimit();
|
|
71
|
+
let status = getRateLimitStatus();
|
|
72
|
+
expect(status.api.count).toBe(2);
|
|
73
|
+
expect(status.commands.count).toBe(3);
|
|
74
|
+
// Reset
|
|
75
|
+
resetRateLimits();
|
|
76
|
+
status = getRateLimitStatus();
|
|
77
|
+
expect(status.api.count).toBe(0);
|
|
78
|
+
expect(status.commands.count).toBe(0);
|
|
79
|
+
});
|
|
80
|
+
it('should allow requests after reset', () => {
|
|
81
|
+
// Fill up the limit
|
|
82
|
+
for (let i = 0; i < 30; i++) {
|
|
83
|
+
checkApiRateLimit();
|
|
84
|
+
}
|
|
85
|
+
// Should be blocked
|
|
86
|
+
expect(checkApiRateLimit().allowed).toBe(false);
|
|
87
|
+
// Reset
|
|
88
|
+
resetRateLimits();
|
|
89
|
+
// Should be allowed again
|
|
90
|
+
expect(checkApiRateLimit().allowed).toBe(true);
|
|
91
|
+
});
|
|
92
|
+
});
|
|
93
|
+
describe('getRateLimitStatus', () => {
|
|
94
|
+
it('should return current counts and limits', () => {
|
|
95
|
+
const status = getRateLimitStatus();
|
|
96
|
+
expect(status.api).toHaveProperty('count');
|
|
97
|
+
expect(status.api).toHaveProperty('limit');
|
|
98
|
+
expect(status.commands).toHaveProperty('count');
|
|
99
|
+
expect(status.commands).toHaveProperty('limit');
|
|
100
|
+
});
|
|
101
|
+
it('should return correct limits', () => {
|
|
102
|
+
const status = getRateLimitStatus();
|
|
103
|
+
expect(status.api.limit).toBe(30);
|
|
104
|
+
expect(status.commands.limit).toBe(100);
|
|
105
|
+
});
|
|
106
|
+
it('should update count after requests', () => {
|
|
107
|
+
expect(getRateLimitStatus().api.count).toBe(0);
|
|
108
|
+
checkApiRateLimit();
|
|
109
|
+
expect(getRateLimitStatus().api.count).toBe(1);
|
|
110
|
+
checkApiRateLimit();
|
|
111
|
+
expect(getRateLimitStatus().api.count).toBe(2);
|
|
112
|
+
});
|
|
113
|
+
});
|
|
114
|
+
describe('sliding window behavior', () => {
|
|
115
|
+
it('should expire old requests after window', async () => {
|
|
116
|
+
// This test uses fake timers to simulate time passing
|
|
117
|
+
vi.useFakeTimers();
|
|
118
|
+
// Make some requests
|
|
119
|
+
for (let i = 0; i < 30; i++) {
|
|
120
|
+
checkApiRateLimit();
|
|
121
|
+
}
|
|
122
|
+
// Should be blocked
|
|
123
|
+
expect(checkApiRateLimit().allowed).toBe(false);
|
|
124
|
+
// Advance time past the window (60 seconds)
|
|
125
|
+
vi.advanceTimersByTime(61000);
|
|
126
|
+
// Should be allowed again (old requests expired)
|
|
127
|
+
expect(checkApiRateLimit().allowed).toBe(true);
|
|
128
|
+
vi.useRealTimers();
|
|
129
|
+
});
|
|
130
|
+
});
|
|
131
|
+
});
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Retry utility with exponential backoff
|
|
3
|
+
*/
|
|
4
|
+
export interface RetryOptions {
|
|
5
|
+
maxAttempts?: number;
|
|
6
|
+
baseDelay?: number;
|
|
7
|
+
maxDelay?: number;
|
|
8
|
+
shouldRetry?: (error: any) => boolean;
|
|
9
|
+
onRetry?: (attempt: number, error: any, delay: number) => void;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Check if error is a network error (no internet connection)
|
|
13
|
+
*/
|
|
14
|
+
export declare function isNetworkError(error: any): boolean;
|
|
15
|
+
/**
|
|
16
|
+
* Check if error is a timeout error
|
|
17
|
+
*/
|
|
18
|
+
export declare function isTimeoutError(error: any): boolean;
|
|
19
|
+
/**
|
|
20
|
+
* Wrap an async function with retry logic
|
|
21
|
+
*/
|
|
22
|
+
export declare function withRetry<T>(fn: () => Promise<T>, options?: RetryOptions): Promise<T>;
|
|
23
|
+
/**
|
|
24
|
+
* Create a fetch with timeout
|
|
25
|
+
*/
|
|
26
|
+
export declare function fetchWithTimeout(url: string, options?: RequestInit & {
|
|
27
|
+
timeout?: number;
|
|
28
|
+
}): Promise<Response>;
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Retry utility with exponential backoff
|
|
3
|
+
*/
|
|
4
|
+
const defaultOptions = {
|
|
5
|
+
maxAttempts: 3,
|
|
6
|
+
baseDelay: 1000,
|
|
7
|
+
maxDelay: 10000,
|
|
8
|
+
shouldRetry: (error) => {
|
|
9
|
+
// Retry on network errors
|
|
10
|
+
if (error instanceof TypeError && error.message.includes('fetch')) {
|
|
11
|
+
return true;
|
|
12
|
+
}
|
|
13
|
+
// Retry on 5xx server errors
|
|
14
|
+
if (error.status && error.status >= 500) {
|
|
15
|
+
return true;
|
|
16
|
+
}
|
|
17
|
+
// Don't retry on 4xx client errors
|
|
18
|
+
if (error.status && error.status >= 400 && error.status < 500) {
|
|
19
|
+
return false;
|
|
20
|
+
}
|
|
21
|
+
// Retry on generic network errors
|
|
22
|
+
if (error.code === 'ECONNREFUSED' || error.code === 'ENOTFOUND' || error.code === 'ETIMEDOUT') {
|
|
23
|
+
return true;
|
|
24
|
+
}
|
|
25
|
+
return true; // Default to retry for unknown errors
|
|
26
|
+
},
|
|
27
|
+
onRetry: () => { },
|
|
28
|
+
};
|
|
29
|
+
/**
|
|
30
|
+
* Calculate delay with exponential backoff and jitter
|
|
31
|
+
*/
|
|
32
|
+
function calculateDelay(attempt, baseDelay, maxDelay) {
|
|
33
|
+
const exponentialDelay = baseDelay * Math.pow(2, attempt - 1);
|
|
34
|
+
const jitter = Math.random() * 0.3 * exponentialDelay; // 0-30% jitter
|
|
35
|
+
return Math.min(exponentialDelay + jitter, maxDelay);
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Sleep for specified milliseconds
|
|
39
|
+
*/
|
|
40
|
+
function sleep(ms) {
|
|
41
|
+
return new Promise(resolve => setTimeout(resolve, ms));
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Check if error is a network error (no internet connection)
|
|
45
|
+
*/
|
|
46
|
+
export function isNetworkError(error) {
|
|
47
|
+
if (error instanceof TypeError) {
|
|
48
|
+
const msg = error.message.toLowerCase();
|
|
49
|
+
return msg.includes('fetch') || msg.includes('network') || msg.includes('failed to fetch');
|
|
50
|
+
}
|
|
51
|
+
if (error.code) {
|
|
52
|
+
return ['ECONNREFUSED', 'ENOTFOUND', 'ETIMEDOUT', 'ENETUNREACH', 'ECONNRESET'].includes(error.code);
|
|
53
|
+
}
|
|
54
|
+
return false;
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Check if error is a timeout error
|
|
58
|
+
*/
|
|
59
|
+
export function isTimeoutError(error) {
|
|
60
|
+
return error.name === 'AbortError' || error.code === 'ETIMEDOUT';
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Wrap an async function with retry logic
|
|
64
|
+
*/
|
|
65
|
+
export async function withRetry(fn, options = {}) {
|
|
66
|
+
const opts = { ...defaultOptions, ...options };
|
|
67
|
+
let lastError;
|
|
68
|
+
for (let attempt = 1; attempt <= opts.maxAttempts; attempt++) {
|
|
69
|
+
try {
|
|
70
|
+
return await fn();
|
|
71
|
+
}
|
|
72
|
+
catch (error) {
|
|
73
|
+
lastError = error;
|
|
74
|
+
const err = error;
|
|
75
|
+
// Don't retry on abort (user cancelled)
|
|
76
|
+
if (err.name === 'AbortError') {
|
|
77
|
+
throw error;
|
|
78
|
+
}
|
|
79
|
+
// Check if we should retry
|
|
80
|
+
if (attempt < opts.maxAttempts && opts.shouldRetry(error)) {
|
|
81
|
+
const delay = calculateDelay(attempt, opts.baseDelay, opts.maxDelay);
|
|
82
|
+
opts.onRetry(attempt, error, delay);
|
|
83
|
+
await sleep(delay);
|
|
84
|
+
}
|
|
85
|
+
else {
|
|
86
|
+
throw error;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
throw lastError;
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Create a fetch with timeout
|
|
94
|
+
*/
|
|
95
|
+
export function fetchWithTimeout(url, options = {}) {
|
|
96
|
+
const { timeout = 30000, ...fetchOptions } = options;
|
|
97
|
+
const controller = new AbortController();
|
|
98
|
+
const timeoutId = setTimeout(() => controller.abort(), timeout);
|
|
99
|
+
// Merge abort signals if one was provided
|
|
100
|
+
if (fetchOptions.signal) {
|
|
101
|
+
fetchOptions.signal.addEventListener('abort', () => controller.abort());
|
|
102
|
+
}
|
|
103
|
+
return fetch(url, {
|
|
104
|
+
...fetchOptions,
|
|
105
|
+
signal: controller.signal,
|
|
106
|
+
}).finally(() => {
|
|
107
|
+
clearTimeout(timeoutId);
|
|
108
|
+
});
|
|
109
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|