anz-legislation 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +23 -0
- package/dist/cli.d.ts +6 -0
- package/dist/cli.js +198 -0
- package/dist/client.d.ts +84 -0
- package/dist/client.js +492 -0
- package/dist/commands/batch.d.ts +5 -0
- package/dist/commands/batch.js +121 -0
- package/dist/commands/cache.d.ts +5 -0
- package/dist/commands/cache.js +43 -0
- package/dist/commands/cite.d.ts +5 -0
- package/dist/commands/cite.js +68 -0
- package/dist/commands/config.d.ts +5 -0
- package/dist/commands/config.js +56 -0
- package/dist/commands/export.d.ts +8 -0
- package/dist/commands/export.js +169 -0
- package/dist/commands/generate.d.ts +10 -0
- package/dist/commands/generate.js +320 -0
- package/dist/commands/get.d.ts +5 -0
- package/dist/commands/get.js +99 -0
- package/dist/commands/help.d.ts +13 -0
- package/dist/commands/help.js +298 -0
- package/dist/commands/search.d.ts +5 -0
- package/dist/commands/search.js +96 -0
- package/dist/commands/stream.d.ts +5 -0
- package/dist/commands/stream.js +100 -0
- package/dist/config.d.ts +81 -0
- package/dist/config.js +209 -0
- package/dist/errors.d.ts +108 -0
- package/dist/errors.js +173 -0
- package/dist/mcp/server.d.ts +13 -0
- package/dist/mcp/server.js +428 -0
- package/dist/mcp-cli.d.ts +6 -0
- package/dist/mcp-cli.js +37 -0
- package/dist/models/canonical.d.ts +423 -0
- package/dist/models/canonical.js +92 -0
- package/dist/models/index.d.ts +892 -0
- package/dist/models/index.js +223 -0
- package/dist/output/index.d.ts +34 -0
- package/dist/output/index.js +195 -0
- package/dist/output/legal-metadata-publication.d.ts +18 -0
- package/dist/output/legal-metadata-publication.js +23 -0
- package/dist/providers/canonical-metadata.d.ts +3 -0
- package/dist/providers/canonical-metadata.js +202 -0
- package/dist/providers/commonwealth-provider.d.ts +27 -0
- package/dist/providers/commonwealth-provider.js +81 -0
- package/dist/providers/index.d.ts +20 -0
- package/dist/providers/index.js +27 -0
- package/dist/providers/legislation-provider.d.ts +227 -0
- package/dist/providers/legislation-provider.js +308 -0
- package/dist/providers/nz-provider.d.ts +36 -0
- package/dist/providers/nz-provider.js +130 -0
- package/dist/providers/output-adapters.d.ts +14 -0
- package/dist/providers/output-adapters.js +116 -0
- package/dist/providers/plugin-discovery.d.ts +39 -0
- package/dist/providers/plugin-discovery.js +91 -0
- package/dist/providers/plugin-loader.d.ts +86 -0
- package/dist/providers/plugin-loader.js +219 -0
- package/dist/providers/queensland-provider.d.ts +42 -0
- package/dist/providers/queensland-provider.js +105 -0
- package/dist/utils/api-optimization.d.ts +92 -0
- package/dist/utils/api-optimization.js +276 -0
- package/dist/utils/batch.d.ts +110 -0
- package/dist/utils/batch.js +269 -0
- package/dist/utils/branded-types.d.ts +0 -0
- package/dist/utils/branded-types.js +1 -0
- package/dist/utils/compatibility-matrix.d.ts +89 -0
- package/dist/utils/compatibility-matrix.js +214 -0
- package/dist/utils/config-validator.d.ts +39 -0
- package/dist/utils/config-validator.js +197 -0
- package/dist/utils/env-loader.d.ts +55 -0
- package/dist/utils/env-loader.js +77 -0
- package/dist/utils/health-monitor.d.ts +93 -0
- package/dist/utils/health-monitor.js +209 -0
- package/dist/utils/invocation.d.ts +4 -0
- package/dist/utils/invocation.js +33 -0
- package/dist/utils/logger.d.ts +94 -0
- package/dist/utils/logger.js +220 -0
- package/dist/utils/plugin-marketplace.d.ts +77 -0
- package/dist/utils/plugin-marketplace.js +191 -0
- package/dist/utils/presentation.d.ts +2 -0
- package/dist/utils/presentation.js +32 -0
- package/dist/utils/rate-limiter.d.ts +100 -0
- package/dist/utils/rate-limiter.js +256 -0
- package/dist/utils/scraper-cache.d.ts +115 -0
- package/dist/utils/scraper-cache.js +229 -0
- package/dist/utils/secure-config.d.ts +40 -0
- package/dist/utils/secure-config.js +195 -0
- package/dist/utils/streaming.d.ts +121 -0
- package/dist/utils/streaming.js +333 -0
- package/dist/utils/validation.d.ts +190 -0
- package/dist/utils/validation.js +209 -0
- package/dist/utils/version.d.ts +13 -0
- package/dist/utils/version.js +46 -0
- package/package.json +56 -0
package/dist/client.js
ADDED
|
@@ -0,0 +1,492 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* API Client for NZ Legislation API
|
|
3
|
+
* Handles HTTP requests, authentication, rate limiting, and caching
|
|
4
|
+
*/
|
|
5
|
+
import got from 'got';
|
|
6
|
+
import { LRUCache } from 'lru-cache';
|
|
7
|
+
import { z } from 'zod';
|
|
8
|
+
import { getConfig } from '@config';
|
|
9
|
+
import { ConfigError, createApiError, ErrorCode, NetworkError } from '@errors';
|
|
10
|
+
import { LegislationVersionSchema, SearchResultsSchema, VersionSchema, WorkSchema, WorkFromVersionSchema, } from '@models';
|
|
11
|
+
import { logger } from '@utils/logger';
|
|
12
|
+
/**
|
|
13
|
+
* Cache configuration
|
|
14
|
+
*/
|
|
15
|
+
const CACHE_CONFIG = {
|
|
16
|
+
// Max number of entries
|
|
17
|
+
max: 500,
|
|
18
|
+
// Default TTL: 1 hour
|
|
19
|
+
defaultTTL: 60 * 60 * 1000,
|
|
20
|
+
// TTL for search results: 30 minutes
|
|
21
|
+
searchTTL: 30 * 60 * 1000,
|
|
22
|
+
// TTL for work details: 2 hours
|
|
23
|
+
workTTL: 2 * 60 * 60 * 1000,
|
|
24
|
+
// TTL for versions: 1 hour
|
|
25
|
+
versionsTTL: 60 * 60 * 1000,
|
|
26
|
+
};
|
|
27
|
+
/**
|
|
28
|
+
* LRU Cache for API responses
|
|
29
|
+
*/
|
|
30
|
+
const cache = new LRUCache({
|
|
31
|
+
max: CACHE_CONFIG.max,
|
|
32
|
+
ttl: CACHE_CONFIG.defaultTTL,
|
|
33
|
+
updateAgeOnGet: false,
|
|
34
|
+
allowStale: false,
|
|
35
|
+
});
|
|
36
|
+
/**
|
|
37
|
+
* Rate limit state
|
|
38
|
+
*/
|
|
39
|
+
const rateLimitState = {
|
|
40
|
+
remaining: 10000,
|
|
41
|
+
resetTime: Date.now() + 86400000, // 24 hours from now
|
|
42
|
+
burstRemaining: 2000,
|
|
43
|
+
burstResetTime: Date.now() + 300000, // 5 minutes from now
|
|
44
|
+
};
|
|
45
|
+
const cacheMetrics = {
|
|
46
|
+
hits: 0,
|
|
47
|
+
misses: 0,
|
|
48
|
+
evictions: 0,
|
|
49
|
+
sets: 0,
|
|
50
|
+
};
|
|
51
|
+
/**
|
|
52
|
+
* Generate cache key from request parameters
|
|
53
|
+
*/
|
|
54
|
+
function generateCacheKey(endpoint, params) {
|
|
55
|
+
const paramString = params ? JSON.stringify(params) : '';
|
|
56
|
+
return `${endpoint}:${paramString}`;
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Get data from cache
|
|
60
|
+
*/
|
|
61
|
+
function getFromCache(key) {
|
|
62
|
+
const entry = cache.get(key);
|
|
63
|
+
if (!entry) {
|
|
64
|
+
cacheMetrics.misses++;
|
|
65
|
+
logger.debug('Cache miss', { key });
|
|
66
|
+
return null;
|
|
67
|
+
}
|
|
68
|
+
const age = Date.now() - entry.timestamp;
|
|
69
|
+
if (age > entry.ttl) {
|
|
70
|
+
cache.delete(key);
|
|
71
|
+
cacheMetrics.evictions++;
|
|
72
|
+
cacheMetrics.misses++;
|
|
73
|
+
logger.debug('Cache expired', { key, age: `${age}ms` });
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
cacheMetrics.hits++;
|
|
77
|
+
logger.debug('Cache hit', { key, age: `${age}ms`, metrics: cacheMetrics });
|
|
78
|
+
return entry.data;
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Set data in cache
|
|
82
|
+
*/
|
|
83
|
+
function setInCache(key, data, ttl) {
|
|
84
|
+
cache.set(key, {
|
|
85
|
+
data,
|
|
86
|
+
timestamp: Date.now(),
|
|
87
|
+
ttl,
|
|
88
|
+
});
|
|
89
|
+
cacheMetrics.sets++;
|
|
90
|
+
logger.debug('Cache set', { key, ttl: `${ttl}ms`, metrics: cacheMetrics });
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Clear cache (optionally by pattern)
|
|
94
|
+
*/
|
|
95
|
+
export function clearCache(pattern) {
|
|
96
|
+
if (!pattern) {
|
|
97
|
+
cache.clear();
|
|
98
|
+
logger.info('Cache cleared');
|
|
99
|
+
return;
|
|
100
|
+
}
|
|
101
|
+
const keys = cache.keys();
|
|
102
|
+
for (const key of keys) {
|
|
103
|
+
if (key.includes(pattern)) {
|
|
104
|
+
cache.delete(key);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
logger.info('Cache cleared', { pattern });
|
|
108
|
+
}
|
|
109
|
+
/**
|
|
110
|
+
* Get cache statistics
|
|
111
|
+
*/
|
|
112
|
+
export function getCacheStats() {
|
|
113
|
+
const total = cacheMetrics.hits + cacheMetrics.misses;
|
|
114
|
+
const hitRate = total > 0 ? ((cacheMetrics.hits / total) * 100).toFixed(2) : '0.00';
|
|
115
|
+
return {
|
|
116
|
+
size: cache.size,
|
|
117
|
+
maxSize: CACHE_CONFIG.max,
|
|
118
|
+
metrics: cacheMetrics,
|
|
119
|
+
hitRate: `${hitRate}%`,
|
|
120
|
+
keys: Array.from(cache.keys()).slice(0, 10), // First 10 keys
|
|
121
|
+
};
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Reset cache metrics (for testing)
|
|
125
|
+
*/
|
|
126
|
+
export function resetCacheMetrics() {
|
|
127
|
+
cacheMetrics.hits = 0;
|
|
128
|
+
cacheMetrics.misses = 0;
|
|
129
|
+
cacheMetrics.evictions = 0;
|
|
130
|
+
cacheMetrics.sets = 0;
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
* Check and enforce rate limits
|
|
134
|
+
*/
|
|
135
|
+
function checkRateLimit() {
|
|
136
|
+
const now = Date.now();
|
|
137
|
+
// Check daily limit
|
|
138
|
+
if (now >= rateLimitState.resetTime) {
|
|
139
|
+
rateLimitState.remaining = 10000;
|
|
140
|
+
rateLimitState.resetTime = now + 86400000;
|
|
141
|
+
}
|
|
142
|
+
// Check burst limit
|
|
143
|
+
if (now >= rateLimitState.burstResetTime) {
|
|
144
|
+
rateLimitState.burstRemaining = 2000;
|
|
145
|
+
rateLimitState.burstResetTime = now + 300000;
|
|
146
|
+
}
|
|
147
|
+
if (rateLimitState.remaining <= 0) {
|
|
148
|
+
const waitTime = Math.ceil((rateLimitState.resetTime - now) / 1000);
|
|
149
|
+
throw new Error(`Daily rate limit exceeded. Please wait ${waitTime} seconds or until midnight.`);
|
|
150
|
+
}
|
|
151
|
+
if (rateLimitState.burstRemaining <= 0) {
|
|
152
|
+
const waitTime = Math.ceil((rateLimitState.burstResetTime - now) / 1000);
|
|
153
|
+
throw new Error(`Burst rate limit exceeded. Please wait ${waitTime} seconds.`);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
/**
|
|
157
|
+
* Type guard to check if value is a string array
|
|
158
|
+
*/
|
|
159
|
+
function isStringArray(value) {
|
|
160
|
+
return Array.isArray(value) && value.every((item) => typeof item === 'string');
|
|
161
|
+
}
|
|
162
|
+
/**
|
|
163
|
+
* Helper to get a single header value (handles string[] and undefined)
|
|
164
|
+
* @param headers - Response headers object
|
|
165
|
+
* @param name - Header name to retrieve
|
|
166
|
+
* @returns Single header value or undefined
|
|
167
|
+
*/
|
|
168
|
+
function getHeaderValue(headers, name) {
|
|
169
|
+
const value = headers[name];
|
|
170
|
+
// Type guard: check if it's a string array
|
|
171
|
+
if (isStringArray(value)) {
|
|
172
|
+
return value[0];
|
|
173
|
+
}
|
|
174
|
+
// If it's a string or undefined, return as-is
|
|
175
|
+
return value;
|
|
176
|
+
}
|
|
177
|
+
/**
|
|
178
|
+
* Update rate limit state from response headers
|
|
179
|
+
*/
|
|
180
|
+
function updateRateLimitState(headers) {
|
|
181
|
+
const remaining = getHeaderValue(headers, 'x-ratelimit-remaining');
|
|
182
|
+
const reset = getHeaderValue(headers, 'x-ratelimit-reset');
|
|
183
|
+
const burstRemaining = getHeaderValue(headers, 'x-burst-remaining');
|
|
184
|
+
const burstReset = getHeaderValue(headers, 'x-burst-reset');
|
|
185
|
+
if (remaining) {
|
|
186
|
+
rateLimitState.remaining = parseInt(remaining, 10);
|
|
187
|
+
}
|
|
188
|
+
if (reset) {
|
|
189
|
+
rateLimitState.resetTime = parseInt(reset, 10) * 1000;
|
|
190
|
+
}
|
|
191
|
+
if (burstRemaining) {
|
|
192
|
+
rateLimitState.burstRemaining = parseInt(burstRemaining, 10);
|
|
193
|
+
}
|
|
194
|
+
if (burstReset) {
|
|
195
|
+
rateLimitState.burstResetTime = parseInt(burstReset, 10) * 1000;
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
/**
|
|
199
|
+
* Create HTTP client with proper configuration
|
|
200
|
+
*/
|
|
201
|
+
function createClient() {
|
|
202
|
+
const config = getConfig();
|
|
203
|
+
if (!config.apiKey) {
|
|
204
|
+
throw new ConfigError(ErrorCode.CONFIG_API_KEY_MISSING, 'API key is required. Set NZ_LEGISLATION_API_KEY or configure it with the CLI.');
|
|
205
|
+
}
|
|
206
|
+
return got.extend({
|
|
207
|
+
prefixUrl: config.baseUrl,
|
|
208
|
+
timeout: { request: config.timeout },
|
|
209
|
+
headers: {
|
|
210
|
+
Accept: 'application/json',
|
|
211
|
+
'User-Agent': 'nz-legislation-tool/1.0.0',
|
|
212
|
+
},
|
|
213
|
+
searchParams: {
|
|
214
|
+
api_key: config.apiKey,
|
|
215
|
+
},
|
|
216
|
+
retry: {
|
|
217
|
+
limit: 3,
|
|
218
|
+
methods: ['get'],
|
|
219
|
+
statusCodes: [408, 413, 429, 500, 502, 503, 504],
|
|
220
|
+
calculateDelay: ({ attemptCount, error }) => {
|
|
221
|
+
if (error.response?.statusCode === 429) {
|
|
222
|
+
const retryAfter = getHeaderValue(error.response.headers, 'retry-after');
|
|
223
|
+
if (retryAfter) {
|
|
224
|
+
return parseInt(retryAfter, 10) * 1000;
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
// Exponential backoff: 1s, 2s, 4s
|
|
228
|
+
return Math.pow(2, attemptCount - 1) * 1000;
|
|
229
|
+
},
|
|
230
|
+
},
|
|
231
|
+
hooks: {
|
|
232
|
+
afterResponse: [
|
|
233
|
+
(response) => {
|
|
234
|
+
updateRateLimitState(response.headers);
|
|
235
|
+
return response;
|
|
236
|
+
},
|
|
237
|
+
],
|
|
238
|
+
beforeError: [
|
|
239
|
+
(error) => {
|
|
240
|
+
// Add helpful context to errors
|
|
241
|
+
if (error.response?.statusCode === 401) {
|
|
242
|
+
error.message = 'Authentication failed. Please check your API key.';
|
|
243
|
+
}
|
|
244
|
+
else if (error.response?.statusCode === 404) {
|
|
245
|
+
error.message = 'Resource not found. Please check the ID.';
|
|
246
|
+
}
|
|
247
|
+
else if (error.response?.statusCode === 429) {
|
|
248
|
+
error.message = 'Rate limit exceeded. Please wait before making more requests.';
|
|
249
|
+
}
|
|
250
|
+
return error;
|
|
251
|
+
},
|
|
252
|
+
],
|
|
253
|
+
},
|
|
254
|
+
});
|
|
255
|
+
}
|
|
256
|
+
let httpClientFactory = createClient;
|
|
257
|
+
export function setHttpClientFactoryForTesting(factory) {
|
|
258
|
+
httpClientFactory = factory ?? createClient;
|
|
259
|
+
}
|
|
260
|
+
async function getWorkFromVersions(client, workId, cacheKey) {
|
|
261
|
+
const versionsData = (await client.get(`v0/works/${workId}/versions`).json());
|
|
262
|
+
const rawResults = Array.isArray(versionsData) ? versionsData : versionsData.results || [];
|
|
263
|
+
const candidates = z.array(WorkFromVersionSchema).parse(rawResults);
|
|
264
|
+
if (candidates.length === 0) {
|
|
265
|
+
throw createApiError(404, `v0/works/${workId}/versions`, `Failed to get work: Work not found for ID "${workId}"`);
|
|
266
|
+
}
|
|
267
|
+
const result = candidates.slice().sort((a, b) => b.date.localeCompare(a.date))[0];
|
|
268
|
+
result.versionCount = candidates.length;
|
|
269
|
+
setInCache(cacheKey, result, CACHE_CONFIG.workTTL);
|
|
270
|
+
return result;
|
|
271
|
+
}
|
|
272
|
+
/**
|
|
273
|
+
* Search for legislation works
|
|
274
|
+
*/
|
|
275
|
+
export async function searchWorks(params) {
|
|
276
|
+
const cacheKey = generateCacheKey('search', params);
|
|
277
|
+
// Try cache first
|
|
278
|
+
const cached = getFromCache(cacheKey);
|
|
279
|
+
if (cached) {
|
|
280
|
+
return cached;
|
|
281
|
+
}
|
|
282
|
+
logger.startTimer('searchWorks');
|
|
283
|
+
checkRateLimit();
|
|
284
|
+
const client = httpClientFactory();
|
|
285
|
+
try {
|
|
286
|
+
const data = await client
|
|
287
|
+
.get('v0/works', {
|
|
288
|
+
searchParams: {
|
|
289
|
+
...(params.query && { search_term: params.query }),
|
|
290
|
+
...(params.type && {
|
|
291
|
+
legislation_type: params.type === 'regulation' ? 'secondary_legislation' : params.type,
|
|
292
|
+
}),
|
|
293
|
+
...(params.status && { legislation_status: params.status.replace(/-/g, '_') }),
|
|
294
|
+
...(params.from && { from: params.from }),
|
|
295
|
+
...(params.to && { to: params.to }),
|
|
296
|
+
...(params.limit && { per_page: params.limit.toString() }),
|
|
297
|
+
...(params.offset && {
|
|
298
|
+
page: (Math.floor((params.offset || 0) / (params.limit || 20)) + 1).toString(),
|
|
299
|
+
}),
|
|
300
|
+
},
|
|
301
|
+
})
|
|
302
|
+
.json();
|
|
303
|
+
const result = SearchResultsSchema.parse(data);
|
|
304
|
+
// Cache the result
|
|
305
|
+
setInCache(cacheKey, result, CACHE_CONFIG.searchTTL);
|
|
306
|
+
const duration = logger.endTimer('searchWorks');
|
|
307
|
+
logger.debug('Search completed', {
|
|
308
|
+
results: result.results.length,
|
|
309
|
+
total: result.total,
|
|
310
|
+
duration: `${duration}ms`,
|
|
311
|
+
});
|
|
312
|
+
return result;
|
|
313
|
+
}
|
|
314
|
+
catch (error) {
|
|
315
|
+
logger.error('Search failed', error instanceof Error ? error : undefined, { params });
|
|
316
|
+
if (error instanceof NetworkError) {
|
|
317
|
+
throw error;
|
|
318
|
+
}
|
|
319
|
+
if (error instanceof Error && 'response' in error) {
|
|
320
|
+
const apiError = error;
|
|
321
|
+
if (apiError.response) {
|
|
322
|
+
throw createApiError(apiError.response.statusCode || 500, apiError.response.url || 'unknown', `Search failed: ${error.message}`);
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
throw new Error(`Search failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
/**
|
|
329
|
+
* Get a specific work by ID
|
|
330
|
+
*/
|
|
331
|
+
export async function getWork(workId) {
|
|
332
|
+
const cacheKey = generateCacheKey('work', { id: workId });
|
|
333
|
+
// Try cache first
|
|
334
|
+
const cached = getFromCache(cacheKey);
|
|
335
|
+
if (cached) {
|
|
336
|
+
return cached;
|
|
337
|
+
}
|
|
338
|
+
logger.startTimer('getWork');
|
|
339
|
+
checkRateLimit();
|
|
340
|
+
const client = httpClientFactory();
|
|
341
|
+
const preferVersionsEndpoint = workId.includes('_');
|
|
342
|
+
try {
|
|
343
|
+
if (preferVersionsEndpoint) {
|
|
344
|
+
const result = await getWorkFromVersions(client, workId, cacheKey);
|
|
345
|
+
const duration = logger.endTimer('getWork');
|
|
346
|
+
logger.debug('Work reconstructed from versions', {
|
|
347
|
+
workId,
|
|
348
|
+
versionCount: result.versionCount,
|
|
349
|
+
duration: `${duration}ms`,
|
|
350
|
+
});
|
|
351
|
+
return result;
|
|
352
|
+
}
|
|
353
|
+
const data = await client.get(`v0/works/${workId}`).json();
|
|
354
|
+
const result = WorkSchema.parse(data);
|
|
355
|
+
// Cache the result
|
|
356
|
+
setInCache(cacheKey, result, CACHE_CONFIG.workTTL);
|
|
357
|
+
const duration = logger.endTimer('getWork');
|
|
358
|
+
logger.debug('Work retrieved', { workId, duration: `${duration}ms` });
|
|
359
|
+
return result;
|
|
360
|
+
}
|
|
361
|
+
catch (error) {
|
|
362
|
+
const apiError = error instanceof Error && 'response' in error
|
|
363
|
+
? error
|
|
364
|
+
: undefined;
|
|
365
|
+
// The live v0 API currently exposes work details reliably via the versions
|
|
366
|
+
// collection, while the single-work endpoint returns 404 for valid work IDs.
|
|
367
|
+
if (apiError?.response?.statusCode === 404) {
|
|
368
|
+
try {
|
|
369
|
+
const result = await getWorkFromVersions(client, workId, cacheKey);
|
|
370
|
+
const duration = logger.endTimer('getWork');
|
|
371
|
+
logger.debug('Work reconstructed from versions', {
|
|
372
|
+
workId,
|
|
373
|
+
versionCount: result.versionCount,
|
|
374
|
+
duration: `${duration}ms`,
|
|
375
|
+
});
|
|
376
|
+
return result;
|
|
377
|
+
}
|
|
378
|
+
catch (fallbackError) {
|
|
379
|
+
if (fallbackError instanceof Error && 'response' in fallbackError) {
|
|
380
|
+
const fallbackApiError = fallbackError;
|
|
381
|
+
if (fallbackApiError.response) {
|
|
382
|
+
throw createApiError(fallbackApiError.response.statusCode || 500, fallbackApiError.response.url || 'unknown', `Failed to get work: ${fallbackError.message}`);
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
if (fallbackError instanceof Error && 'code' in fallbackError) {
|
|
386
|
+
throw fallbackError;
|
|
387
|
+
}
|
|
388
|
+
throw new Error(`Failed to get work: ${fallbackError instanceof Error ? fallbackError.message : 'Unknown error'}`);
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
logger.error('Failed to get work', error instanceof Error ? error : undefined, { workId });
|
|
392
|
+
if (error instanceof NetworkError) {
|
|
393
|
+
throw error;
|
|
394
|
+
}
|
|
395
|
+
if (apiError?.response) {
|
|
396
|
+
throw createApiError(apiError.response.statusCode || 500, apiError.response.url || 'unknown', `Failed to get work: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
|
397
|
+
}
|
|
398
|
+
throw new Error(`Failed to get work: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
/**
|
|
402
|
+
* Get all versions of a work
|
|
403
|
+
*/
|
|
404
|
+
export async function getWorkVersions(workId) {
|
|
405
|
+
const cacheKey = generateCacheKey('versions', { workId });
|
|
406
|
+
// Try cache first
|
|
407
|
+
const cached = getFromCache(cacheKey);
|
|
408
|
+
if (cached) {
|
|
409
|
+
return cached;
|
|
410
|
+
}
|
|
411
|
+
logger.startTimer('getWorkVersions');
|
|
412
|
+
checkRateLimit();
|
|
413
|
+
const client = httpClientFactory();
|
|
414
|
+
try {
|
|
415
|
+
const data = (await client.get(`v0/works/${workId}/versions`).json());
|
|
416
|
+
const rawResults = Array.isArray(data) ? data : data.results || [];
|
|
417
|
+
const result = z.array(VersionSchema).parse(rawResults);
|
|
418
|
+
// Cache the result
|
|
419
|
+
setInCache(cacheKey, result, CACHE_CONFIG.versionsTTL);
|
|
420
|
+
const duration = logger.endTimer('getWorkVersions');
|
|
421
|
+
logger.debug('Versions retrieved', { workId, count: result.length, duration: `${duration}ms` });
|
|
422
|
+
return result;
|
|
423
|
+
}
|
|
424
|
+
catch (error) {
|
|
425
|
+
logger.error('Failed to get versions', error instanceof Error ? error : undefined, { workId });
|
|
426
|
+
if (error instanceof NetworkError) {
|
|
427
|
+
throw error;
|
|
428
|
+
}
|
|
429
|
+
if (error instanceof Error && 'response' in error) {
|
|
430
|
+
const apiError = error;
|
|
431
|
+
if (apiError.response) {
|
|
432
|
+
throw createApiError(apiError.response.statusCode || 500, apiError.response.url || 'unknown', `Failed to get versions: ${error.message}`);
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
throw new Error(`Failed to get versions: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
/**
|
|
439
|
+
* Get a specific version of a work
|
|
440
|
+
*/
|
|
441
|
+
export async function getVersion(versionId) {
|
|
442
|
+
const cacheKey = generateCacheKey('version', { versionId });
|
|
443
|
+
// Try cache first
|
|
444
|
+
const cached = getFromCache(cacheKey);
|
|
445
|
+
if (cached) {
|
|
446
|
+
return cached;
|
|
447
|
+
}
|
|
448
|
+
logger.startTimer('getVersion');
|
|
449
|
+
checkRateLimit();
|
|
450
|
+
const client = httpClientFactory();
|
|
451
|
+
try {
|
|
452
|
+
const data = await client.get(`v0/versions/${versionId}`).json();
|
|
453
|
+
const result = LegislationVersionSchema.parse(data);
|
|
454
|
+
// Cache the result
|
|
455
|
+
setInCache(cacheKey, result, CACHE_CONFIG.versionsTTL);
|
|
456
|
+
const duration = logger.endTimer('getVersion');
|
|
457
|
+
logger.debug('Version retrieved', { versionId, duration: `${duration}ms` });
|
|
458
|
+
return result;
|
|
459
|
+
}
|
|
460
|
+
catch (error) {
|
|
461
|
+
logger.error('Failed to get version', error instanceof Error ? error : undefined, {
|
|
462
|
+
versionId,
|
|
463
|
+
});
|
|
464
|
+
if (error instanceof NetworkError) {
|
|
465
|
+
throw error;
|
|
466
|
+
}
|
|
467
|
+
if (error instanceof Error && 'response' in error) {
|
|
468
|
+
const apiError = error;
|
|
469
|
+
if (apiError.response) {
|
|
470
|
+
throw createApiError(apiError.response.statusCode || 500, apiError.response.url || 'unknown', `Failed to get version: ${error.message}`);
|
|
471
|
+
}
|
|
472
|
+
}
|
|
473
|
+
throw new Error(`Failed to get version: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
/**
|
|
477
|
+
* Get rate limit status
|
|
478
|
+
*/
|
|
479
|
+
export function getRateLimitStatus() {
|
|
480
|
+
return {
|
|
481
|
+
remaining: rateLimitState.remaining,
|
|
482
|
+
resetTime: new Date(rateLimitState.resetTime),
|
|
483
|
+
burstRemaining: rateLimitState.burstRemaining,
|
|
484
|
+
burstResetTime: new Date(rateLimitState.burstResetTime),
|
|
485
|
+
};
|
|
486
|
+
}
|
|
487
|
+
/**
|
|
488
|
+
* Get cache metrics
|
|
489
|
+
*/
|
|
490
|
+
export function getCacheMetrics() {
|
|
491
|
+
return { ...cacheMetrics };
|
|
492
|
+
}
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Batch Command - Execute bulk operations
|
|
3
|
+
*/
|
|
4
|
+
import { readFileSync } from 'fs';
|
|
5
|
+
import { Command } from 'commander';
|
|
6
|
+
import ora from 'ora';
|
|
7
|
+
import { BatchExecutor, createBatchFromFile, createBatchFromIds, formatBatchResults, saveBatchResults, } from '@utils/batch';
|
|
8
|
+
export const batchCommand = new Command()
|
|
9
|
+
.name('batch')
|
|
10
|
+
.description('Execute bulk operations with batching')
|
|
11
|
+
.option('-f, --file <path>', 'Input file with IDs (CSV or JSON)')
|
|
12
|
+
.option('-i, --ids <ids>', 'Comma-separated list of IDs')
|
|
13
|
+
.requiredOption('-t, --type <type>', 'Operation type (search, getWork, getVersions, getVersion)')
|
|
14
|
+
.option('-o, --output <path>', 'Output file path')
|
|
15
|
+
.option('-F, --format <format>', 'Output format (json, csv)', 'json')
|
|
16
|
+
.option('-c, --concurrency <number>', 'Concurrent requests', '5')
|
|
17
|
+
.option('-r, --retry', 'Retry failed requests', false)
|
|
18
|
+
.option('--id-column <column>', 'Column name for ID in CSV (default: "id")')
|
|
19
|
+
.action(async (options) => {
|
|
20
|
+
if (!options.file && !options.ids) {
|
|
21
|
+
console.error('Error: Either --file or --ids is required');
|
|
22
|
+
process.exit(1);
|
|
23
|
+
}
|
|
24
|
+
const spinner = ora('Preparing batch...').start();
|
|
25
|
+
try {
|
|
26
|
+
let requests = [];
|
|
27
|
+
// Load requests from file or IDs
|
|
28
|
+
if (options.file) {
|
|
29
|
+
spinner.text = 'Loading input file...';
|
|
30
|
+
const content = readFileSync(options.file, 'utf-8');
|
|
31
|
+
if (options.file.endsWith('.json')) {
|
|
32
|
+
const data = JSON.parse(content);
|
|
33
|
+
if (Array.isArray(data)) {
|
|
34
|
+
requests = createBatchFromFile(data, options.type, options.idColumn || 'id');
|
|
35
|
+
}
|
|
36
|
+
else {
|
|
37
|
+
throw new Error('JSON file must contain an array of objects');
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
else if (options.file.endsWith('.csv')) {
|
|
41
|
+
// Simple CSV parsing (for production, use a proper CSV parser)
|
|
42
|
+
const lines = content.trim().split('\n');
|
|
43
|
+
const headers = lines[0].split(',');
|
|
44
|
+
const idColumnIndex = options.idColumn
|
|
45
|
+
? headers.indexOf(options.idColumn)
|
|
46
|
+
: headers.indexOf('id');
|
|
47
|
+
if (idColumnIndex === -1) {
|
|
48
|
+
throw new Error(`ID column "${options.idColumn || 'id'}" not found in CSV`);
|
|
49
|
+
}
|
|
50
|
+
const rows = lines.slice(1).map(line => {
|
|
51
|
+
const values = line.split(',');
|
|
52
|
+
return { [headers[idColumnIndex]]: values[idColumnIndex] };
|
|
53
|
+
});
|
|
54
|
+
requests = createBatchFromFile(rows, options.type, options.idColumn || 'id');
|
|
55
|
+
}
|
|
56
|
+
else {
|
|
57
|
+
throw new Error('Unsupported file format. Use CSV or JSON.');
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
else if (options.ids) {
|
|
61
|
+
const ids = options.ids
|
|
62
|
+
.split(',')
|
|
63
|
+
.map(id => id.trim())
|
|
64
|
+
.filter(id => id);
|
|
65
|
+
requests = createBatchFromIds(ids, options.type);
|
|
66
|
+
}
|
|
67
|
+
if (requests.length === 0) {
|
|
68
|
+
spinner.fail('No requests to process');
|
|
69
|
+
process.exit(1);
|
|
70
|
+
}
|
|
71
|
+
spinner.succeed(`Prepared ${requests.length} requests`);
|
|
72
|
+
// Create batch executor
|
|
73
|
+
const executor = new BatchExecutor({
|
|
74
|
+
concurrency: parseInt(options.concurrency, 10),
|
|
75
|
+
retryFailed: options.retry,
|
|
76
|
+
maxRetries: 3,
|
|
77
|
+
});
|
|
78
|
+
// Track progress
|
|
79
|
+
let lastProgress = 0;
|
|
80
|
+
executor.on('progress', (progress) => {
|
|
81
|
+
if (progress.percent - lastProgress >= 10) {
|
|
82
|
+
console.log(` Progress: ${progress.percent}% (${progress.completed}/${progress.total})`);
|
|
83
|
+
lastProgress = progress.percent;
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
// Execute batch
|
|
87
|
+
console.log('\nExecuting batch operations...');
|
|
88
|
+
const results = await executor.execute(requests);
|
|
89
|
+
// Format results
|
|
90
|
+
const { successful, failed, cached, summary } = formatBatchResults(results);
|
|
91
|
+
// Print summary
|
|
92
|
+
console.log('\nBatch Execution Summary:');
|
|
93
|
+
console.log('─'.repeat(50));
|
|
94
|
+
console.log(` Total: ${summary.total}`);
|
|
95
|
+
console.log(` Successful: ${successful.length} (${summary.successRate}%)`);
|
|
96
|
+
console.log(` Failed: ${failed.length}`);
|
|
97
|
+
console.log(` From Cache: ${cached.length} (${summary.cacheHitRate}%)`);
|
|
98
|
+
console.log(` Avg Duration: ${summary.averageDuration}ms`);
|
|
99
|
+
console.log(` Total Time: ${summary.totalDuration}ms`);
|
|
100
|
+
// Save results if output specified
|
|
101
|
+
if (options.output) {
|
|
102
|
+
saveBatchResults(results, options.output, options.format);
|
|
103
|
+
console.log(`\n✓ Results saved to: ${options.output}`);
|
|
104
|
+
}
|
|
105
|
+
// Exit with error if any failed
|
|
106
|
+
if (failed.length > 0) {
|
|
107
|
+
console.log(`\n⚠ ${failed.length} request(s) failed`);
|
|
108
|
+
if (failed.length <= 10) {
|
|
109
|
+
console.log('\nFailed requests:');
|
|
110
|
+
failed.forEach(r => {
|
|
111
|
+
console.log(` - ${r.id}: ${r.error?.message}`);
|
|
112
|
+
});
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
catch (error) {
|
|
117
|
+
spinner.fail('Batch operation failed');
|
|
118
|
+
console.error('Error:', error instanceof Error ? error.message : error);
|
|
119
|
+
process.exit(1);
|
|
120
|
+
}
|
|
121
|
+
});
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Cache command - Manage API response cache
|
|
3
|
+
*/
|
|
4
|
+
import { Command } from 'commander';
|
|
5
|
+
import { clearCache, getCacheStats } from '@client';
|
|
6
|
+
import { logger } from '@utils/logger';
|
|
7
|
+
export const cacheCommand = new Command()
|
|
8
|
+
.name('cache')
|
|
9
|
+
.description('Manage API response cache')
|
|
10
|
+
.option('--clear', 'Clear all cached data')
|
|
11
|
+
.option('--pattern <pattern>', 'Clear cache entries matching pattern')
|
|
12
|
+
.option('--stats', 'Show cache statistics')
|
|
13
|
+
.action((options) => {
|
|
14
|
+
try {
|
|
15
|
+
if (options.stats || (!options.clear && !options.pattern)) {
|
|
16
|
+
// Show stats by default
|
|
17
|
+
const stats = getCacheStats();
|
|
18
|
+
console.log('Cache Statistics:');
|
|
19
|
+
console.log(` Size: ${stats.size} / ${stats.maxSize} entries`);
|
|
20
|
+
if (stats.keys.length > 0) {
|
|
21
|
+
console.log(' Recent keys:');
|
|
22
|
+
stats.keys.forEach((key, i) => {
|
|
23
|
+
console.log(` ${i + 1}. ${key}`);
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
if (options.clear) {
|
|
28
|
+
if (options.pattern) {
|
|
29
|
+
clearCache(options.pattern);
|
|
30
|
+
console.log(`Cache entries matching "${options.pattern}" cleared.`);
|
|
31
|
+
}
|
|
32
|
+
else {
|
|
33
|
+
clearCache();
|
|
34
|
+
console.log('Cache cleared.');
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
catch (error) {
|
|
39
|
+
logger.error('Cache operation failed', error instanceof Error ? error : undefined);
|
|
40
|
+
console.error('Error:', error instanceof Error ? error.message : 'Unknown error');
|
|
41
|
+
process.exit(1);
|
|
42
|
+
}
|
|
43
|
+
});
|