@renseiai/agentfactory-linear 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +91 -0
- package/dist/src/agent-client-project-repo.test.d.ts +2 -0
- package/dist/src/agent-client-project-repo.test.d.ts.map +1 -0
- package/dist/src/agent-client-project-repo.test.js +153 -0
- package/dist/src/agent-client.d.ts +261 -0
- package/dist/src/agent-client.d.ts.map +1 -0
- package/dist/src/agent-client.js +902 -0
- package/dist/src/agent-session.d.ts +303 -0
- package/dist/src/agent-session.d.ts.map +1 -0
- package/dist/src/agent-session.js +969 -0
- package/dist/src/checkbox-utils.d.ts +88 -0
- package/dist/src/checkbox-utils.d.ts.map +1 -0
- package/dist/src/checkbox-utils.js +120 -0
- package/dist/src/circuit-breaker.d.ts +76 -0
- package/dist/src/circuit-breaker.d.ts.map +1 -0
- package/dist/src/circuit-breaker.js +229 -0
- package/dist/src/circuit-breaker.test.d.ts +2 -0
- package/dist/src/circuit-breaker.test.d.ts.map +1 -0
- package/dist/src/circuit-breaker.test.js +292 -0
- package/dist/src/constants.d.ts +87 -0
- package/dist/src/constants.d.ts.map +1 -0
- package/dist/src/constants.js +101 -0
- package/dist/src/defaults/auto-trigger.d.ts +35 -0
- package/dist/src/defaults/auto-trigger.d.ts.map +1 -0
- package/dist/src/defaults/auto-trigger.js +36 -0
- package/dist/src/defaults/index.d.ts +12 -0
- package/dist/src/defaults/index.d.ts.map +1 -0
- package/dist/src/defaults/index.js +11 -0
- package/dist/src/defaults/priority.d.ts +20 -0
- package/dist/src/defaults/priority.d.ts.map +1 -0
- package/dist/src/defaults/priority.js +37 -0
- package/dist/src/defaults/prompts.d.ts +42 -0
- package/dist/src/defaults/prompts.d.ts.map +1 -0
- package/dist/src/defaults/prompts.js +310 -0
- package/dist/src/defaults/prompts.test.d.ts +2 -0
- package/dist/src/defaults/prompts.test.d.ts.map +1 -0
- package/dist/src/defaults/prompts.test.js +263 -0
- package/dist/src/defaults/work-type-detection.d.ts +19 -0
- package/dist/src/defaults/work-type-detection.d.ts.map +1 -0
- package/dist/src/defaults/work-type-detection.js +93 -0
- package/dist/src/errors.d.ts +91 -0
- package/dist/src/errors.d.ts.map +1 -0
- package/dist/src/errors.js +173 -0
- package/dist/src/frontend-adapter.d.ts +168 -0
- package/dist/src/frontend-adapter.d.ts.map +1 -0
- package/dist/src/frontend-adapter.js +314 -0
- package/dist/src/frontend-adapter.test.d.ts +2 -0
- package/dist/src/frontend-adapter.test.d.ts.map +1 -0
- package/dist/src/frontend-adapter.test.js +545 -0
- package/dist/src/index.d.ts +28 -0
- package/dist/src/index.d.ts.map +1 -0
- package/dist/src/index.js +30 -0
- package/dist/src/issue-tracker-proxy.d.ts +140 -0
- package/dist/src/issue-tracker-proxy.d.ts.map +1 -0
- package/dist/src/issue-tracker-proxy.js +10 -0
- package/dist/src/platform-adapter.d.ts +132 -0
- package/dist/src/platform-adapter.d.ts.map +1 -0
- package/dist/src/platform-adapter.js +260 -0
- package/dist/src/platform-adapter.test.d.ts +2 -0
- package/dist/src/platform-adapter.test.d.ts.map +1 -0
- package/dist/src/platform-adapter.test.js +468 -0
- package/dist/src/proxy-client.d.ts +103 -0
- package/dist/src/proxy-client.d.ts.map +1 -0
- package/dist/src/proxy-client.js +191 -0
- package/dist/src/rate-limiter.d.ts +64 -0
- package/dist/src/rate-limiter.d.ts.map +1 -0
- package/dist/src/rate-limiter.js +163 -0
- package/dist/src/rate-limiter.test.d.ts +2 -0
- package/dist/src/rate-limiter.test.d.ts.map +1 -0
- package/dist/src/rate-limiter.test.js +217 -0
- package/dist/src/retry.d.ts +59 -0
- package/dist/src/retry.d.ts.map +1 -0
- package/dist/src/retry.js +82 -0
- package/dist/src/types.d.ts +492 -0
- package/dist/src/types.d.ts.map +1 -0
- package/dist/src/types.js +143 -0
- package/dist/src/utils.d.ts +52 -0
- package/dist/src/utils.d.ts.map +1 -0
- package/dist/src/utils.js +277 -0
- package/dist/src/webhook-types.d.ts +308 -0
- package/dist/src/webhook-types.d.ts.map +1 -0
- package/dist/src/webhook-types.js +46 -0
- package/package.json +70 -0
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Proxy Issue Tracker Client
|
|
3
|
+
*
|
|
4
|
+
* Drop-in replacement for LinearAgentClient that routes all calls
|
|
5
|
+
* through the centralized dashboard proxy endpoint instead of calling
|
|
6
|
+
* the issue tracker API directly.
|
|
7
|
+
*
|
|
8
|
+
* Used when `AGENTFACTORY_API_URL` env var is set.
|
|
9
|
+
*
|
|
10
|
+
* Benefits:
|
|
11
|
+
* - Zero direct API credentials needed on the agent side
|
|
12
|
+
* - Single shared rate limiter and circuit breaker on the proxy
|
|
13
|
+
* - OAuth token resolution stays server-side
|
|
14
|
+
* - Platform-agnostic: agents don't need to know Linear exists
|
|
15
|
+
*/
|
|
16
|
+
/**
|
|
17
|
+
* Issue tracker client that proxies all calls through the dashboard server.
|
|
18
|
+
*
|
|
19
|
+
* Implements the same public interface as LinearAgentClient but serializes
|
|
20
|
+
* calls as JSON and sends them to POST /api/issue-tracker-proxy.
|
|
21
|
+
*
|
|
22
|
+
* All returned objects are plain JSON (no lazy-loaded SDK relations).
|
|
23
|
+
*/
|
|
24
|
+
export class ProxyIssueTrackerClient {
|
|
25
|
+
apiUrl;
|
|
26
|
+
apiKey;
|
|
27
|
+
organizationId;
|
|
28
|
+
timeoutMs;
|
|
29
|
+
constructor(config) {
|
|
30
|
+
this.apiUrl = config.apiUrl.replace(/\/$/, '');
|
|
31
|
+
this.apiKey = config.apiKey;
|
|
32
|
+
this.organizationId = config.organizationId;
|
|
33
|
+
this.timeoutMs = config.timeoutMs ?? 30_000;
|
|
34
|
+
}
|
|
35
|
+
// =========================================================================
|
|
36
|
+
// Issue operations
|
|
37
|
+
// =========================================================================
|
|
38
|
+
async getIssue(issueIdOrIdentifier) {
|
|
39
|
+
return this.call('getIssue', [issueIdOrIdentifier]);
|
|
40
|
+
}
|
|
41
|
+
async updateIssue(issueId, data) {
|
|
42
|
+
return this.call('updateIssue', [issueId, data]);
|
|
43
|
+
}
|
|
44
|
+
async createIssue(input) {
|
|
45
|
+
return this.call('createIssue', [input]);
|
|
46
|
+
}
|
|
47
|
+
async unassignIssue(issueId) {
|
|
48
|
+
return this.call('unassignIssue', [issueId]);
|
|
49
|
+
}
|
|
50
|
+
// =========================================================================
|
|
51
|
+
// Status operations
|
|
52
|
+
// =========================================================================
|
|
53
|
+
async getTeamStatuses(teamId) {
|
|
54
|
+
return this.call('getTeamStatuses', [teamId]);
|
|
55
|
+
}
|
|
56
|
+
async updateIssueStatus(issueId, statusName) {
|
|
57
|
+
return this.call('updateIssueStatus', [issueId, statusName]);
|
|
58
|
+
}
|
|
59
|
+
// =========================================================================
|
|
60
|
+
// Comment operations
|
|
61
|
+
// =========================================================================
|
|
62
|
+
async createComment(issueId, body) {
|
|
63
|
+
return this.call('createComment', [issueId, body]);
|
|
64
|
+
}
|
|
65
|
+
async getIssueComments(issueId) {
|
|
66
|
+
return this.call('getIssueComments', [issueId]);
|
|
67
|
+
}
|
|
68
|
+
// =========================================================================
|
|
69
|
+
// Agent session operations
|
|
70
|
+
// =========================================================================
|
|
71
|
+
async createAgentActivity(input) {
|
|
72
|
+
return this.call('createAgentActivity', [input]);
|
|
73
|
+
}
|
|
74
|
+
async updateAgentSession(input) {
|
|
75
|
+
return this.call('updateAgentSession', [input]);
|
|
76
|
+
}
|
|
77
|
+
async createAgentSessionOnIssue(input) {
|
|
78
|
+
return this.call('createAgentSessionOnIssue', [input]);
|
|
79
|
+
}
|
|
80
|
+
// =========================================================================
|
|
81
|
+
// Relation operations
|
|
82
|
+
// =========================================================================
|
|
83
|
+
async createIssueRelation(input) {
|
|
84
|
+
return this.call('createIssueRelation', [input]);
|
|
85
|
+
}
|
|
86
|
+
async getIssueRelations(issueId) {
|
|
87
|
+
return this.call('getIssueRelations', [issueId]);
|
|
88
|
+
}
|
|
89
|
+
async deleteIssueRelation(relationId) {
|
|
90
|
+
return this.call('deleteIssueRelation', [relationId]);
|
|
91
|
+
}
|
|
92
|
+
// =========================================================================
|
|
93
|
+
// Sub-issue operations
|
|
94
|
+
// =========================================================================
|
|
95
|
+
async getSubIssues(issueIdOrIdentifier) {
|
|
96
|
+
return this.call('getSubIssues', [issueIdOrIdentifier]);
|
|
97
|
+
}
|
|
98
|
+
async getSubIssueStatuses(issueIdOrIdentifier) {
|
|
99
|
+
return this.call('getSubIssueStatuses', [issueIdOrIdentifier]);
|
|
100
|
+
}
|
|
101
|
+
async getSubIssueGraph(issueIdOrIdentifier) {
|
|
102
|
+
return this.call('getSubIssueGraph', [issueIdOrIdentifier]);
|
|
103
|
+
}
|
|
104
|
+
async isParentIssue(issueIdOrIdentifier) {
|
|
105
|
+
return this.call('isParentIssue', [issueIdOrIdentifier]);
|
|
106
|
+
}
|
|
107
|
+
async isChildIssue(issueIdOrIdentifier) {
|
|
108
|
+
return this.call('isChildIssue', [issueIdOrIdentifier]);
|
|
109
|
+
}
|
|
110
|
+
// =========================================================================
|
|
111
|
+
// Project operations
|
|
112
|
+
// =========================================================================
|
|
113
|
+
async listProjectIssues(project) {
|
|
114
|
+
return this.call('listProjectIssues', [project]);
|
|
115
|
+
}
|
|
116
|
+
async getProjectRepositoryUrl(projectId) {
|
|
117
|
+
return this.call('getProjectRepositoryUrl', [projectId]);
|
|
118
|
+
}
|
|
119
|
+
// =========================================================================
|
|
120
|
+
// Identity operations
|
|
121
|
+
// =========================================================================
|
|
122
|
+
async getViewer() {
|
|
123
|
+
return this.call('getViewer', []);
|
|
124
|
+
}
|
|
125
|
+
async getTeam(teamIdOrKey) {
|
|
126
|
+
return this.call('getTeam', [teamIdOrKey]);
|
|
127
|
+
}
|
|
128
|
+
// =========================================================================
|
|
129
|
+
// Core RPC method
|
|
130
|
+
// =========================================================================
|
|
131
|
+
async call(method, args) {
|
|
132
|
+
const body = {
|
|
133
|
+
method,
|
|
134
|
+
args,
|
|
135
|
+
organizationId: this.organizationId,
|
|
136
|
+
};
|
|
137
|
+
const controller = new AbortController();
|
|
138
|
+
const timeout = setTimeout(() => controller.abort(), this.timeoutMs);
|
|
139
|
+
try {
|
|
140
|
+
const response = await fetch(`${this.apiUrl}/api/issue-tracker-proxy`, {
|
|
141
|
+
method: 'POST',
|
|
142
|
+
headers: {
|
|
143
|
+
'Content-Type': 'application/json',
|
|
144
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
145
|
+
},
|
|
146
|
+
body: JSON.stringify(body),
|
|
147
|
+
signal: controller.signal,
|
|
148
|
+
});
|
|
149
|
+
const result = (await response.json());
|
|
150
|
+
if (!result.success) {
|
|
151
|
+
const error = result.error ?? { code: 'UNKNOWN', message: 'Unknown error', retryable: false };
|
|
152
|
+
const err = new Error(`[ProxyClient] ${error.code}: ${error.message}`);
|
|
153
|
+
err.code = error.code;
|
|
154
|
+
err.retryable = error.retryable;
|
|
155
|
+
err.status = response.status;
|
|
156
|
+
throw err;
|
|
157
|
+
}
|
|
158
|
+
return result.data;
|
|
159
|
+
}
|
|
160
|
+
catch (error) {
|
|
161
|
+
if (error instanceof Error && error.name === 'AbortError') {
|
|
162
|
+
throw new Error(`[ProxyClient] Request timeout after ${this.timeoutMs}ms for ${method}`);
|
|
163
|
+
}
|
|
164
|
+
throw error;
|
|
165
|
+
}
|
|
166
|
+
finally {
|
|
167
|
+
clearTimeout(timeout);
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
/**
|
|
172
|
+
* Create a proxy client if AGENTFACTORY_API_URL is set, otherwise return null.
|
|
173
|
+
*
|
|
174
|
+
* @param fallbackApiKey - API key to use (default: WORKER_API_KEY env var)
|
|
175
|
+
* @param organizationId - Workspace ID for multi-tenant routing
|
|
176
|
+
*/
|
|
177
|
+
export function createProxyClientIfConfigured(fallbackApiKey, organizationId) {
|
|
178
|
+
const apiUrl = process.env.AGENTFACTORY_API_URL;
|
|
179
|
+
if (!apiUrl)
|
|
180
|
+
return null;
|
|
181
|
+
const apiKey = fallbackApiKey ?? process.env.WORKER_API_KEY;
|
|
182
|
+
if (!apiKey) {
|
|
183
|
+
console.warn('[ProxyClient] AGENTFACTORY_API_URL set but no WORKER_API_KEY — proxy disabled');
|
|
184
|
+
return null;
|
|
185
|
+
}
|
|
186
|
+
return new ProxyIssueTrackerClient({
|
|
187
|
+
apiUrl,
|
|
188
|
+
apiKey,
|
|
189
|
+
organizationId,
|
|
190
|
+
});
|
|
191
|
+
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Token Bucket Rate Limiter
|
|
3
|
+
*
|
|
4
|
+
* Proactive rate limiting for Linear API calls. Uses a token bucket algorithm
|
|
5
|
+
* to throttle requests below Linear's ~100 req/min limit.
|
|
6
|
+
*
|
|
7
|
+
* Default: 80 burst capacity, 1.5 tokens/sec refill (~90 req/min sustained).
|
|
8
|
+
*/
|
|
9
|
+
export interface TokenBucketConfig {
|
|
10
|
+
/** Maximum tokens (burst capacity). Default: 80 */
|
|
11
|
+
maxTokens: number;
|
|
12
|
+
/** Tokens added per second. Default: 1.5 (~90/min) */
|
|
13
|
+
refillRate: number;
|
|
14
|
+
}
|
|
15
|
+
export declare const DEFAULT_RATE_LIMIT_CONFIG: TokenBucketConfig;
|
|
16
|
+
export declare class TokenBucket {
|
|
17
|
+
private tokens;
|
|
18
|
+
private readonly maxTokens;
|
|
19
|
+
private readonly refillRate;
|
|
20
|
+
private lastRefill;
|
|
21
|
+
private waitQueue;
|
|
22
|
+
constructor(config?: Partial<TokenBucketConfig>);
|
|
23
|
+
/** Refill tokens based on elapsed time since last refill. */
|
|
24
|
+
private refill;
|
|
25
|
+
/** Drain waiters that can be satisfied after a refill. */
|
|
26
|
+
private drainWaiters;
|
|
27
|
+
/**
|
|
28
|
+
* Acquire a single token. Resolves immediately if tokens are available,
|
|
29
|
+
* otherwise queues the caller until a token becomes available via refill.
|
|
30
|
+
*/
|
|
31
|
+
acquire(): Promise<void>;
|
|
32
|
+
/** Schedule a timer to refill and drain waiters. */
|
|
33
|
+
private refillTimer;
|
|
34
|
+
private scheduleRefillDrain;
|
|
35
|
+
/**
|
|
36
|
+
* Penalize the bucket after receiving a 429 rate limit response.
|
|
37
|
+
*
|
|
38
|
+
* Drains all tokens to 0 and shifts the refill baseline forward by
|
|
39
|
+
* `seconds` so no new tokens appear until the penalty expires.
|
|
40
|
+
* Any already-queued waiters will wait for the penalty period plus
|
|
41
|
+
* normal refill time.
|
|
42
|
+
*
|
|
43
|
+
* @param seconds - How long to pause before tokens start refilling (from Retry-After header)
|
|
44
|
+
*/
|
|
45
|
+
penalize(seconds: number): void;
|
|
46
|
+
/** Current number of available tokens (for testing/monitoring). */
|
|
47
|
+
get availableTokens(): number;
|
|
48
|
+
/** Number of callers waiting for tokens (for testing/monitoring). */
|
|
49
|
+
get pendingCount(): number;
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Extract a Retry-After delay (in milliseconds) from an error thrown by
|
|
53
|
+
* the Linear SDK or a raw HTTP 429 response.
|
|
54
|
+
*
|
|
55
|
+
* Checks (in order):
|
|
56
|
+
* 1. `error.response.headers.get('retry-after')` (fetch Response)
|
|
57
|
+
* 2. `error.response.headers['retry-after']` (plain object headers)
|
|
58
|
+
* 3. `error.headers?.['retry-after']` (error-level headers)
|
|
59
|
+
*
|
|
60
|
+
* The Retry-After value is parsed as seconds (integer). If no valid value
|
|
61
|
+
* is found, returns `null`.
|
|
62
|
+
*/
|
|
63
|
+
export declare function extractRetryAfterMs(error: unknown): number | null;
|
|
64
|
+
//# sourceMappingURL=rate-limiter.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"rate-limiter.d.ts","sourceRoot":"","sources":["../../src/rate-limiter.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAEH,MAAM,WAAW,iBAAiB;IAChC,mDAAmD;IACnD,SAAS,EAAE,MAAM,CAAA;IACjB,sDAAsD;IACtD,UAAU,EAAE,MAAM,CAAA;CACnB;AAED,eAAO,MAAM,yBAAyB,EAAE,iBAGvC,CAAA;AAED,qBAAa,WAAW;IACtB,OAAO,CAAC,MAAM,CAAQ;IACtB,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAQ;IAClC,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAQ;IACnC,OAAO,CAAC,UAAU,CAAQ;IAC1B,OAAO,CAAC,SAAS,CAAwB;gBAE7B,MAAM,GAAE,OAAO,CAAC,iBAAiB,CAAM;IAQnD,6DAA6D;IAC7D,OAAO,CAAC,MAAM;IAad,0DAA0D;IAC1D,OAAO,CAAC,YAAY;IAQpB;;;OAGG;IACG,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAc9B,oDAAoD;IACpD,OAAO,CAAC,WAAW,CAA6C;IAEhE,OAAO,CAAC,mBAAmB;IAiB3B;;;;;;;;;OASG;IACH,QAAQ,CAAC,OAAO,EAAE,MAAM,GAAG,IAAI;IAO/B,mEAAmE;IACnE,IAAI,eAAe,IAAI,MAAM,CAG5B;IAED,qEAAqE;IACrE,IAAI,YAAY,IAAI,MAAM,CAEzB;CACF;AAED;;;;;;;;;;;GAWG;AACH,wBAAgB,mBAAmB,CAAC,KAAK,EAAE,OAAO,GAAG,MAAM,GAAG,IAAI,CAwBjE"}
|
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Token Bucket Rate Limiter
|
|
3
|
+
*
|
|
4
|
+
* Proactive rate limiting for Linear API calls. Uses a token bucket algorithm
|
|
5
|
+
* to throttle requests below Linear's ~100 req/min limit.
|
|
6
|
+
*
|
|
7
|
+
* Default: 80 burst capacity, 1.5 tokens/sec refill (~90 req/min sustained).
|
|
8
|
+
*/
|
|
9
|
+
export const DEFAULT_RATE_LIMIT_CONFIG = {
|
|
10
|
+
maxTokens: 80,
|
|
11
|
+
refillRate: 1.5,
|
|
12
|
+
};
|
|
13
|
+
export class TokenBucket {
|
|
14
|
+
tokens;
|
|
15
|
+
maxTokens;
|
|
16
|
+
refillRate;
|
|
17
|
+
lastRefill;
|
|
18
|
+
waitQueue = [];
|
|
19
|
+
constructor(config = {}) {
|
|
20
|
+
const resolved = { ...DEFAULT_RATE_LIMIT_CONFIG, ...config };
|
|
21
|
+
this.maxTokens = resolved.maxTokens;
|
|
22
|
+
this.refillRate = resolved.refillRate;
|
|
23
|
+
this.tokens = this.maxTokens;
|
|
24
|
+
this.lastRefill = Date.now();
|
|
25
|
+
}
|
|
26
|
+
/** Refill tokens based on elapsed time since last refill. */
|
|
27
|
+
refill() {
|
|
28
|
+
const now = Date.now();
|
|
29
|
+
const elapsed = (now - this.lastRefill) / 1000;
|
|
30
|
+
// During a penalty period, lastRefill is in the future so elapsed is negative.
|
|
31
|
+
// Skip refill entirely until the penalty expires.
|
|
32
|
+
if (elapsed <= 0)
|
|
33
|
+
return;
|
|
34
|
+
const newTokens = elapsed * this.refillRate;
|
|
35
|
+
this.tokens = Math.min(this.maxTokens, this.tokens + newTokens);
|
|
36
|
+
this.lastRefill = now;
|
|
37
|
+
}
|
|
38
|
+
/** Drain waiters that can be satisfied after a refill. */
|
|
39
|
+
drainWaiters() {
|
|
40
|
+
while (this.waitQueue.length > 0 && this.tokens >= 1) {
|
|
41
|
+
this.tokens -= 1;
|
|
42
|
+
const resolve = this.waitQueue.shift();
|
|
43
|
+
resolve();
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Acquire a single token. Resolves immediately if tokens are available,
|
|
48
|
+
* otherwise queues the caller until a token becomes available via refill.
|
|
49
|
+
*/
|
|
50
|
+
async acquire() {
|
|
51
|
+
this.refill();
|
|
52
|
+
if (this.tokens >= 1 && this.waitQueue.length === 0) {
|
|
53
|
+
this.tokens -= 1;
|
|
54
|
+
return;
|
|
55
|
+
}
|
|
56
|
+
return new Promise((resolve) => {
|
|
57
|
+
this.waitQueue.push(resolve);
|
|
58
|
+
this.scheduleRefillDrain();
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
/** Schedule a timer to refill and drain waiters. */
|
|
62
|
+
refillTimer = null;
|
|
63
|
+
scheduleRefillDrain() {
|
|
64
|
+
if (this.refillTimer !== null)
|
|
65
|
+
return;
|
|
66
|
+
// Time until 1 token is available
|
|
67
|
+
const msPerToken = 1000 / this.refillRate;
|
|
68
|
+
this.refillTimer = setTimeout(() => {
|
|
69
|
+
this.refillTimer = null;
|
|
70
|
+
this.refill();
|
|
71
|
+
this.drainWaiters();
|
|
72
|
+
// If there are still waiters, schedule again
|
|
73
|
+
if (this.waitQueue.length > 0) {
|
|
74
|
+
this.scheduleRefillDrain();
|
|
75
|
+
}
|
|
76
|
+
}, msPerToken);
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Penalize the bucket after receiving a 429 rate limit response.
|
|
80
|
+
*
|
|
81
|
+
* Drains all tokens to 0 and shifts the refill baseline forward by
|
|
82
|
+
* `seconds` so no new tokens appear until the penalty expires.
|
|
83
|
+
* Any already-queued waiters will wait for the penalty period plus
|
|
84
|
+
* normal refill time.
|
|
85
|
+
*
|
|
86
|
+
* @param seconds - How long to pause before tokens start refilling (from Retry-After header)
|
|
87
|
+
*/
|
|
88
|
+
penalize(seconds) {
|
|
89
|
+
this.tokens = 0;
|
|
90
|
+
// Push lastRefill into the future so refill() computes negative elapsed
|
|
91
|
+
// time until the penalty expires, effectively freezing token generation.
|
|
92
|
+
this.lastRefill = Date.now() + seconds * 1000;
|
|
93
|
+
}
|
|
94
|
+
/** Current number of available tokens (for testing/monitoring). */
|
|
95
|
+
get availableTokens() {
|
|
96
|
+
this.refill();
|
|
97
|
+
return Math.floor(this.tokens);
|
|
98
|
+
}
|
|
99
|
+
/** Number of callers waiting for tokens (for testing/monitoring). */
|
|
100
|
+
get pendingCount() {
|
|
101
|
+
return this.waitQueue.length;
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Extract a Retry-After delay (in milliseconds) from an error thrown by
|
|
106
|
+
* the Linear SDK or a raw HTTP 429 response.
|
|
107
|
+
*
|
|
108
|
+
* Checks (in order):
|
|
109
|
+
* 1. `error.response.headers.get('retry-after')` (fetch Response)
|
|
110
|
+
* 2. `error.response.headers['retry-after']` (plain object headers)
|
|
111
|
+
* 3. `error.headers?.['retry-after']` (error-level headers)
|
|
112
|
+
*
|
|
113
|
+
* The Retry-After value is parsed as seconds (integer). If no valid value
|
|
114
|
+
* is found, returns `null`.
|
|
115
|
+
*/
|
|
116
|
+
export function extractRetryAfterMs(error) {
|
|
117
|
+
if (typeof error !== 'object' || error === null)
|
|
118
|
+
return null;
|
|
119
|
+
const err = error;
|
|
120
|
+
// Check if this is a rate limit error (status 429)
|
|
121
|
+
const status = err.status ??
|
|
122
|
+
err.statusCode ??
|
|
123
|
+
err.response?.status;
|
|
124
|
+
if (status !== 429)
|
|
125
|
+
return null;
|
|
126
|
+
// Try to extract Retry-After from various locations
|
|
127
|
+
const headerValue = getRetryAfterHeader(err);
|
|
128
|
+
if (headerValue === null) {
|
|
129
|
+
// No Retry-After header — use a sensible default of 60s for Linear
|
|
130
|
+
return 60_000;
|
|
131
|
+
}
|
|
132
|
+
const seconds = parseInt(headerValue, 10);
|
|
133
|
+
if (Number.isNaN(seconds) || seconds <= 0)
|
|
134
|
+
return 60_000;
|
|
135
|
+
return seconds * 1000;
|
|
136
|
+
}
|
|
137
|
+
function getRetryAfterHeader(err) {
|
|
138
|
+
// error.response.headers.get('retry-after') — fetch-style Response
|
|
139
|
+
const response = err.response;
|
|
140
|
+
if (response) {
|
|
141
|
+
const headers = response.headers;
|
|
142
|
+
if (headers) {
|
|
143
|
+
// Headers object with .get() method (fetch API)
|
|
144
|
+
if (typeof headers.get === 'function') {
|
|
145
|
+
const val = headers.get('retry-after');
|
|
146
|
+
if (val)
|
|
147
|
+
return val;
|
|
148
|
+
}
|
|
149
|
+
// Plain object headers
|
|
150
|
+
const val = headers['retry-after'];
|
|
151
|
+
if (val)
|
|
152
|
+
return val;
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
// error.headers['retry-after']
|
|
156
|
+
const errorHeaders = err.headers;
|
|
157
|
+
if (errorHeaders) {
|
|
158
|
+
const val = errorHeaders['retry-after'];
|
|
159
|
+
if (val)
|
|
160
|
+
return val;
|
|
161
|
+
}
|
|
162
|
+
return null;
|
|
163
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"rate-limiter.test.d.ts","sourceRoot":"","sources":["../../src/rate-limiter.test.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import { TokenBucket, DEFAULT_RATE_LIMIT_CONFIG, extractRetryAfterMs } from './rate-limiter.js';
|
|
3
|
+
describe('TokenBucket', () => {
|
|
4
|
+
beforeEach(() => {
|
|
5
|
+
vi.useFakeTimers();
|
|
6
|
+
});
|
|
7
|
+
afterEach(() => {
|
|
8
|
+
vi.useRealTimers();
|
|
9
|
+
});
|
|
10
|
+
// ========================================================================
|
|
11
|
+
// Construction & defaults
|
|
12
|
+
// ========================================================================
|
|
13
|
+
it('uses default config when none provided', () => {
|
|
14
|
+
const bucket = new TokenBucket();
|
|
15
|
+
expect(bucket.availableTokens).toBe(DEFAULT_RATE_LIMIT_CONFIG.maxTokens);
|
|
16
|
+
});
|
|
17
|
+
it('accepts custom config', () => {
|
|
18
|
+
const bucket = new TokenBucket({ maxTokens: 10, refillRate: 5 });
|
|
19
|
+
expect(bucket.availableTokens).toBe(10);
|
|
20
|
+
});
|
|
21
|
+
it('allows partial config overrides', () => {
|
|
22
|
+
const bucket = new TokenBucket({ maxTokens: 20 });
|
|
23
|
+
expect(bucket.availableTokens).toBe(20);
|
|
24
|
+
});
|
|
25
|
+
// ========================================================================
|
|
26
|
+
// Token acquisition
|
|
27
|
+
// ========================================================================
|
|
28
|
+
it('resolves immediately when tokens are available', async () => {
|
|
29
|
+
const bucket = new TokenBucket({ maxTokens: 5, refillRate: 1 });
|
|
30
|
+
await bucket.acquire();
|
|
31
|
+
expect(bucket.availableTokens).toBe(4);
|
|
32
|
+
});
|
|
33
|
+
it('depletes tokens with multiple acquires', async () => {
|
|
34
|
+
const bucket = new TokenBucket({ maxTokens: 3, refillRate: 1 });
|
|
35
|
+
await bucket.acquire();
|
|
36
|
+
await bucket.acquire();
|
|
37
|
+
await bucket.acquire();
|
|
38
|
+
expect(bucket.availableTokens).toBe(0);
|
|
39
|
+
});
|
|
40
|
+
// ========================================================================
|
|
41
|
+
// Waiting when depleted
|
|
42
|
+
// ========================================================================
|
|
43
|
+
it('queues callers when tokens are exhausted', async () => {
|
|
44
|
+
const bucket = new TokenBucket({ maxTokens: 1, refillRate: 1 });
|
|
45
|
+
// Use up the only token
|
|
46
|
+
await bucket.acquire();
|
|
47
|
+
expect(bucket.availableTokens).toBe(0);
|
|
48
|
+
// This should not resolve immediately
|
|
49
|
+
let resolved = false;
|
|
50
|
+
const promise = bucket.acquire().then(() => {
|
|
51
|
+
resolved = true;
|
|
52
|
+
});
|
|
53
|
+
// Give microtasks a chance to run
|
|
54
|
+
await vi.advanceTimersByTimeAsync(0);
|
|
55
|
+
expect(resolved).toBe(false);
|
|
56
|
+
expect(bucket.pendingCount).toBe(1);
|
|
57
|
+
// Advance time so a token refills (1 token/sec => 1000ms for 1 token)
|
|
58
|
+
await vi.advanceTimersByTimeAsync(1000);
|
|
59
|
+
await promise;
|
|
60
|
+
expect(resolved).toBe(true);
|
|
61
|
+
expect(bucket.pendingCount).toBe(0);
|
|
62
|
+
});
|
|
63
|
+
it('drains multiple waiters as tokens refill', async () => {
|
|
64
|
+
const bucket = new TokenBucket({ maxTokens: 1, refillRate: 2 }); // 2 tokens/sec
|
|
65
|
+
await bucket.acquire();
|
|
66
|
+
const results = [];
|
|
67
|
+
const p1 = bucket.acquire().then(() => results.push(1));
|
|
68
|
+
const p2 = bucket.acquire().then(() => results.push(2));
|
|
69
|
+
expect(bucket.pendingCount).toBe(2);
|
|
70
|
+
// At 2 tokens/sec, each token takes 500ms
|
|
71
|
+
// First timer fires at 500ms, drains waiter 1, schedules next
|
|
72
|
+
await vi.advanceTimersByTimeAsync(500);
|
|
73
|
+
await Promise.resolve(); // let microtasks run
|
|
74
|
+
expect(results).toEqual([1]);
|
|
75
|
+
// Second timer fires at 1000ms total
|
|
76
|
+
await vi.advanceTimersByTimeAsync(500);
|
|
77
|
+
await Promise.resolve();
|
|
78
|
+
expect(results).toEqual([1, 2]);
|
|
79
|
+
await Promise.all([p1, p2]);
|
|
80
|
+
expect(bucket.pendingCount).toBe(0);
|
|
81
|
+
});
|
|
82
|
+
// ========================================================================
|
|
83
|
+
// Refill behavior
|
|
84
|
+
// ========================================================================
|
|
85
|
+
it('refills tokens over time', async () => {
|
|
86
|
+
const bucket = new TokenBucket({ maxTokens: 10, refillRate: 5 });
|
|
87
|
+
// Drain 5 tokens
|
|
88
|
+
for (let i = 0; i < 5; i++) {
|
|
89
|
+
await bucket.acquire();
|
|
90
|
+
}
|
|
91
|
+
expect(bucket.availableTokens).toBe(5);
|
|
92
|
+
// Advance 1 second => 5 new tokens refilled
|
|
93
|
+
vi.advanceTimersByTime(1000);
|
|
94
|
+
expect(bucket.availableTokens).toBe(10);
|
|
95
|
+
});
|
|
96
|
+
it('does not exceed maxTokens on refill', async () => {
|
|
97
|
+
const bucket = new TokenBucket({ maxTokens: 10, refillRate: 100 });
|
|
98
|
+
// Even after a long time, tokens should not exceed max
|
|
99
|
+
vi.advanceTimersByTime(10_000);
|
|
100
|
+
expect(bucket.availableTokens).toBe(10);
|
|
101
|
+
});
|
|
102
|
+
// ========================================================================
|
|
103
|
+
// penalize
|
|
104
|
+
// ========================================================================
|
|
105
|
+
it('penalize drains tokens to 0', () => {
|
|
106
|
+
const bucket = new TokenBucket({ maxTokens: 10, refillRate: 5 });
|
|
107
|
+
expect(bucket.availableTokens).toBe(10);
|
|
108
|
+
bucket.penalize(5);
|
|
109
|
+
expect(bucket.availableTokens).toBe(0);
|
|
110
|
+
});
|
|
111
|
+
it('penalize freezes token generation for the penalty period', () => {
|
|
112
|
+
const bucket = new TokenBucket({ maxTokens: 10, refillRate: 10 });
|
|
113
|
+
bucket.penalize(3); // 3 second penalty
|
|
114
|
+
// After 2 seconds (still within penalty), no tokens should be available
|
|
115
|
+
vi.advanceTimersByTime(2000);
|
|
116
|
+
expect(bucket.availableTokens).toBe(0);
|
|
117
|
+
// After 3 seconds total (penalty expired), refill should resume
|
|
118
|
+
vi.advanceTimersByTime(1000);
|
|
119
|
+
// Now tokens start refilling from 0 at 10/sec, but elapsed since penalty end is ~0
|
|
120
|
+
expect(bucket.availableTokens).toBe(0);
|
|
121
|
+
// After 4 seconds total (1 second of refill after penalty), 10 tokens
|
|
122
|
+
vi.advanceTimersByTime(1000);
|
|
123
|
+
expect(bucket.availableTokens).toBe(10);
|
|
124
|
+
});
|
|
125
|
+
// ========================================================================
|
|
126
|
+
// DEFAULT_RATE_LIMIT_CONFIG
|
|
127
|
+
// ========================================================================
|
|
128
|
+
it('exports sensible defaults', () => {
|
|
129
|
+
expect(DEFAULT_RATE_LIMIT_CONFIG.maxTokens).toBe(80);
|
|
130
|
+
expect(DEFAULT_RATE_LIMIT_CONFIG.refillRate).toBe(1.5);
|
|
131
|
+
});
|
|
132
|
+
});
|
|
133
|
+
// ===========================================================================
|
|
134
|
+
// extractRetryAfterMs
|
|
135
|
+
// ===========================================================================
|
|
136
|
+
describe('extractRetryAfterMs', () => {
|
|
137
|
+
it('returns null for non-object errors', () => {
|
|
138
|
+
expect(extractRetryAfterMs(null)).toBeNull();
|
|
139
|
+
expect(extractRetryAfterMs(undefined)).toBeNull();
|
|
140
|
+
expect(extractRetryAfterMs('string')).toBeNull();
|
|
141
|
+
expect(extractRetryAfterMs(42)).toBeNull();
|
|
142
|
+
});
|
|
143
|
+
it('returns null for non-429 errors', () => {
|
|
144
|
+
expect(extractRetryAfterMs({ status: 500 })).toBeNull();
|
|
145
|
+
expect(extractRetryAfterMs({ statusCode: 400 })).toBeNull();
|
|
146
|
+
expect(extractRetryAfterMs({ response: { status: 200 } })).toBeNull();
|
|
147
|
+
});
|
|
148
|
+
it('returns 60s default when 429 but no Retry-After header', () => {
|
|
149
|
+
expect(extractRetryAfterMs({ status: 429 })).toBe(60_000);
|
|
150
|
+
});
|
|
151
|
+
it('parses Retry-After from response.headers plain object', () => {
|
|
152
|
+
const error = {
|
|
153
|
+
status: 429,
|
|
154
|
+
response: {
|
|
155
|
+
status: 429,
|
|
156
|
+
headers: { 'retry-after': '30' },
|
|
157
|
+
},
|
|
158
|
+
};
|
|
159
|
+
expect(extractRetryAfterMs(error)).toBe(30_000);
|
|
160
|
+
});
|
|
161
|
+
it('parses Retry-After from response.headers.get() (fetch-style)', () => {
|
|
162
|
+
const headers = new Map([['retry-after', '45']]);
|
|
163
|
+
const error = {
|
|
164
|
+
status: 429,
|
|
165
|
+
response: {
|
|
166
|
+
status: 429,
|
|
167
|
+
headers: {
|
|
168
|
+
get: (name) => headers.get(name) ?? null,
|
|
169
|
+
},
|
|
170
|
+
},
|
|
171
|
+
};
|
|
172
|
+
expect(extractRetryAfterMs(error)).toBe(45_000);
|
|
173
|
+
});
|
|
174
|
+
it('parses Retry-After from error.headers', () => {
|
|
175
|
+
const error = {
|
|
176
|
+
status: 429,
|
|
177
|
+
headers: { 'retry-after': '10' },
|
|
178
|
+
};
|
|
179
|
+
expect(extractRetryAfterMs(error)).toBe(10_000);
|
|
180
|
+
});
|
|
181
|
+
it('detects 429 from response.status when top-level status is missing', () => {
|
|
182
|
+
const error = {
|
|
183
|
+
response: {
|
|
184
|
+
status: 429,
|
|
185
|
+
headers: { 'retry-after': '20' },
|
|
186
|
+
},
|
|
187
|
+
};
|
|
188
|
+
expect(extractRetryAfterMs(error)).toBe(20_000);
|
|
189
|
+
});
|
|
190
|
+
it('detects 429 from statusCode property', () => {
|
|
191
|
+
const error = {
|
|
192
|
+
statusCode: 429,
|
|
193
|
+
headers: { 'retry-after': '15' },
|
|
194
|
+
};
|
|
195
|
+
expect(extractRetryAfterMs(error)).toBe(15_000);
|
|
196
|
+
});
|
|
197
|
+
it('falls back to 60s for invalid Retry-After value', () => {
|
|
198
|
+
const error = {
|
|
199
|
+
status: 429,
|
|
200
|
+
response: {
|
|
201
|
+
status: 429,
|
|
202
|
+
headers: { 'retry-after': 'not-a-number' },
|
|
203
|
+
},
|
|
204
|
+
};
|
|
205
|
+
expect(extractRetryAfterMs(error)).toBe(60_000);
|
|
206
|
+
});
|
|
207
|
+
it('falls back to 60s for zero Retry-After', () => {
|
|
208
|
+
const error = {
|
|
209
|
+
status: 429,
|
|
210
|
+
response: {
|
|
211
|
+
status: 429,
|
|
212
|
+
headers: { 'retry-after': '0' },
|
|
213
|
+
},
|
|
214
|
+
};
|
|
215
|
+
expect(extractRetryAfterMs(error)).toBe(60_000);
|
|
216
|
+
});
|
|
217
|
+
});
|