@lastbrain/ai-ui-react 1.0.22 → 1.0.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/hooks/useModelManagement.d.ts.map +1 -1
- package/dist/hooks/useModelManagement.js +18 -0
- package/dist/hooks/usePrompts.d.ts.map +1 -1
- package/dist/hooks/usePrompts.js +30 -4
- package/dist/index.d.ts +1 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -0
- package/dist/utils/cache.d.ts +32 -0
- package/dist/utils/cache.d.ts.map +1 -0
- package/dist/utils/cache.js +86 -0
- package/dist/utils/modelManagement.js +2 -2
- package/package.json +1 -1
- package/src/hooks/useModelManagement.ts +27 -0
- package/src/hooks/usePrompts.ts +41 -4
- package/src/index.ts +1 -0
- package/src/utils/cache.ts +111 -0
- package/src/utils/modelManagement.ts +2 -2
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"useModelManagement.d.ts","sourceRoot":"","sources":["../../src/hooks/useModelManagement.ts"],"names":[],"mappings":"AAEA,OAAO,EAIL,KAAK,kBAAkB,EACxB,MAAM,0BAA0B,CAAC;
|
|
1
|
+
{"version":3,"file":"useModelManagement.d.ts","sourceRoot":"","sources":["../../src/hooks/useModelManagement.ts"],"names":[],"mappings":"AAEA,OAAO,EAIL,KAAK,kBAAkB,EACxB,MAAM,0BAA0B,CAAC;AAQlC,MAAM,WAAW,OAAO;IACtB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,GAAG,OAAO,GAAG,OAAO,GAAG,OAAO,CAAC;IAC/C,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,yBAA0B,SAAQ,kBAAkB;IACnE,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,MAAM,GAAG,OAAO,GAAG,OAAO,GAAG,OAAO,CAAC;CACjD;AAED,MAAM,WAAW,wBAAwB;IAEvC,eAAe,EAAE,OAAO,EAAE,CAAC;IAC3B,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,OAAO,EAAE,OAAO,CAAC;IACjB,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;IAGrB,WAAW,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,OAAO,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACpE,aAAa,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IACnC,iBAAiB,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAGvC,aAAa,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5C,eAAe,EAAE,MAAM,OAAO,EAAE,CAAC;IACjC,iBAAiB,EAAE,MAAM,OAAO,EAAE,CAAC;CACpC;AAED;;GAEG;AACH,wBAAgB,kBAAkB,CAChC,OAAO,GAAE,yBAA8B,GACtC,wBAAwB,CA6M1B"}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { useState, useCallback, useEffect } from "react";
|
|
2
2
|
import { useAiContext } from "../context/AiProvider";
|
|
3
3
|
import { toggleUserModel, getAvailableModels, getUserModels, } from "../utils/modelManagement";
|
|
4
|
+
import { getCached, setCache, isRateLimited, getRateLimitResetTime, } from "../utils/cache";
|
|
4
5
|
/**
|
|
5
6
|
* Hook pour gérer les modèles IA d'un utilisateur
|
|
6
7
|
*/
|
|
@@ -21,6 +22,21 @@ export function useModelManagement(options = {}) {
|
|
|
21
22
|
try {
|
|
22
23
|
setLoading(true);
|
|
23
24
|
setError(null);
|
|
25
|
+
// Check cache first (5 minutes TTL for models)
|
|
26
|
+
const cacheKey = `models_${category || "all"}`;
|
|
27
|
+
const cached = getCached(cacheKey, 300000);
|
|
28
|
+
if (cached) {
|
|
29
|
+
console.log("[useModelManagement] Using cached models");
|
|
30
|
+
setAvailableModels(cached);
|
|
31
|
+
setLoading(false);
|
|
32
|
+
return;
|
|
33
|
+
}
|
|
34
|
+
// Check rate limit (max 5 calls per minute)
|
|
35
|
+
if (isRateLimited("models_fetch", 5, 60000)) {
|
|
36
|
+
const resetTime = getRateLimitResetTime("models_fetch");
|
|
37
|
+
const resetSeconds = Math.ceil(resetTime / 1000);
|
|
38
|
+
throw new Error(`Rate limit exceeded. Please wait ${resetSeconds}s before trying again.`);
|
|
39
|
+
}
|
|
24
40
|
console.log("[useModelManagement] Fetching available models with options:", {
|
|
25
41
|
apiKey: effectiveOptions.apiKey
|
|
26
42
|
? effectiveOptions.apiKey.substring(0, 10) + "..."
|
|
@@ -33,6 +49,8 @@ export function useModelManagement(options = {}) {
|
|
|
33
49
|
? models.filter((m) => m.category === category)
|
|
34
50
|
: models;
|
|
35
51
|
setAvailableModels(filteredModels);
|
|
52
|
+
// Cache the results
|
|
53
|
+
setCache(cacheKey, filteredModels);
|
|
36
54
|
console.log("[useModelManagement] Set filtered models:", filteredModels.length);
|
|
37
55
|
}
|
|
38
56
|
catch (err) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"usePrompts.d.ts","sourceRoot":"","sources":["../../src/hooks/usePrompts.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"usePrompts.d.ts","sourceRoot":"","sources":["../../src/hooks/usePrompts.ts"],"names":[],"mappings":"AAYA,MAAM,WAAW,MAAM;IACrB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC;IACvB,SAAS,EAAE,OAAO,CAAC;IACnB,QAAQ,EAAE,OAAO,CAAC;IAClB,IAAI,EAAE,MAAM,EAAE,CAAC;IACf,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACtB,UAAU,EAAE,MAAM,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,YAAa,SAAQ,MAAM;IAC1C,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,EAAE,MAAM,CAAC;IACnB,YAAY,EAAE,MAAM,CAAC;CACtB;AAED,MAAM,WAAW,iBAAiB;IAChC,IAAI,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;IACxB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,MAAM,CAAC,EAAE,OAAO,CAAC;CAClB;AAED,MAAM,WAAW,gBAAgB;IAC/B,OAAO,EAAE,MAAM,EAAE,GAAG,YAAY,EAAE,CAAC;IACnC,OAAO,EAAE,OAAO,CAAC;IACjB,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,YAAY,EAAE,CAAC,OAAO,CAAC,EAAE,iBAAiB,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC7D,YAAY,EAAE,CACZ,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI,GAAG,YAAY,GAAG,YAAY,CAAC,KACnD,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAC;IAC5B,YAAY,EAAE,CAAC,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,CAAC,MAAM,CAAC,KAAK,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAC;IAC5E,YAAY,EAAE,CAAC,EAAE,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,CAAC,CAAC;IAC/C,aAAa,EAAE,CACb,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,OAAO,GAAG,MAAM,GAAG,QAAQ,KAClC,OAAO,CAAC,IAAI,CAAC,CAAC;CACpB;AAED,wBAAgB,UAAU,IAAI,gBAAgB,CA8Q7C"}
|
package/dist/hooks/usePrompts.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use client";
|
|
2
2
|
import { useState, useCallback } from "react";
|
|
3
3
|
import { useAiContext } from "../context/AiProvider";
|
|
4
|
+
import { getCached, setCache, clearCache, isRateLimited, getRateLimitResetTime, } from "../utils/cache";
|
|
4
5
|
export function usePrompts() {
|
|
5
6
|
const { apiKeyId, baseUrl } = useAiContext();
|
|
6
7
|
const [prompts, setPrompts] = useState([]);
|
|
@@ -10,6 +11,22 @@ export function usePrompts() {
|
|
|
10
11
|
try {
|
|
11
12
|
setLoading(true);
|
|
12
13
|
setError(null);
|
|
14
|
+
// Generate cache key based on options
|
|
15
|
+
const cacheKey = `prompts_${JSON.stringify(options || {})}`;
|
|
16
|
+
// Check cache first (60 seconds TTL)
|
|
17
|
+
const cached = getCached(cacheKey, 60000);
|
|
18
|
+
if (cached) {
|
|
19
|
+
console.log("[usePrompts] Using cached data");
|
|
20
|
+
setPrompts(cached);
|
|
21
|
+
setLoading(false);
|
|
22
|
+
return;
|
|
23
|
+
}
|
|
24
|
+
// Check rate limit (max 10 calls per minute)
|
|
25
|
+
if (isRateLimited("prompts_fetch", 10, 60000)) {
|
|
26
|
+
const resetTime = getRateLimitResetTime("prompts_fetch");
|
|
27
|
+
const resetSeconds = Math.ceil(resetTime / 1000);
|
|
28
|
+
throw new Error(`Rate limit exceeded. Please wait ${resetSeconds}s before trying again.`);
|
|
29
|
+
}
|
|
13
30
|
const params = new URLSearchParams();
|
|
14
31
|
if (options?.type)
|
|
15
32
|
params.append("type", options.type);
|
|
@@ -53,7 +70,10 @@ export function usePrompts() {
|
|
|
53
70
|
});
|
|
54
71
|
const data = await response.json();
|
|
55
72
|
if (response.ok) {
|
|
56
|
-
|
|
73
|
+
const promptsData = data.prompts || [];
|
|
74
|
+
setPrompts(promptsData);
|
|
75
|
+
// Cache the results
|
|
76
|
+
setCache(cacheKey, promptsData);
|
|
57
77
|
}
|
|
58
78
|
else {
|
|
59
79
|
setError(data.error || "Failed to fetch prompts");
|
|
@@ -71,7 +91,7 @@ export function usePrompts() {
|
|
|
71
91
|
setError(null);
|
|
72
92
|
const isExternalProxy = baseUrl && baseUrl.includes("/api/lastbrain");
|
|
73
93
|
const endpoint = isExternalProxy
|
|
74
|
-
? `${baseUrl}/
|
|
94
|
+
? `${baseUrl}/prompts` // Proxy handles auth routes
|
|
75
95
|
: "/api/ai/auth/prompts";
|
|
76
96
|
const headers = { "Content-Type": "application/json" };
|
|
77
97
|
if (!isExternalProxy && apiKeyId) {
|
|
@@ -85,6 +105,8 @@ export function usePrompts() {
|
|
|
85
105
|
});
|
|
86
106
|
const result = await response.json();
|
|
87
107
|
if (response.ok) {
|
|
108
|
+
// Invalidate cache after creating a prompt
|
|
109
|
+
clearCache();
|
|
88
110
|
return result.prompt;
|
|
89
111
|
}
|
|
90
112
|
else {
|
|
@@ -121,6 +143,8 @@ export function usePrompts() {
|
|
|
121
143
|
});
|
|
122
144
|
const result = await response.json();
|
|
123
145
|
if (response.ok) {
|
|
146
|
+
// Invalidate cache after updating a prompt
|
|
147
|
+
clearCache();
|
|
124
148
|
return result.prompt;
|
|
125
149
|
}
|
|
126
150
|
else {
|
|
@@ -139,7 +163,7 @@ export function usePrompts() {
|
|
|
139
163
|
const isExternalProxy = baseUrl && baseUrl.includes("/api/lastbrain");
|
|
140
164
|
const isPublicApi = baseUrl && baseUrl.includes("/api/public/v1");
|
|
141
165
|
const endpoint = isExternalProxy
|
|
142
|
-
? `${baseUrl}/
|
|
166
|
+
? `${baseUrl}/prompts?id=${id}` // Proxy handles auth routes
|
|
143
167
|
: isPublicApi
|
|
144
168
|
? `${baseUrl}/prompts?id=${id}`
|
|
145
169
|
: baseUrl
|
|
@@ -156,6 +180,8 @@ export function usePrompts() {
|
|
|
156
180
|
});
|
|
157
181
|
const result = await response.json();
|
|
158
182
|
if (response.ok) {
|
|
183
|
+
// Invalidate cache after deleting a prompt
|
|
184
|
+
clearCache();
|
|
159
185
|
return true;
|
|
160
186
|
}
|
|
161
187
|
else {
|
|
@@ -173,7 +199,7 @@ export function usePrompts() {
|
|
|
173
199
|
const isExternalProxy = baseUrl && baseUrl.includes("/api/lastbrain");
|
|
174
200
|
const isPublicApi = baseUrl && baseUrl.includes("/api/public/v1");
|
|
175
201
|
const endpoint = isExternalProxy
|
|
176
|
-
? `${baseUrl}/
|
|
202
|
+
? `${baseUrl}/prompts/stats` // Proxy handles auth routes internally
|
|
177
203
|
: isPublicApi
|
|
178
204
|
? `${baseUrl}/prompts/stats`
|
|
179
205
|
: baseUrl
|
package/dist/index.d.ts
CHANGED
|
@@ -17,6 +17,7 @@ export * from "./components/AiImageButton";
|
|
|
17
17
|
export * from "./components/AiSettingsButton";
|
|
18
18
|
export * from "./components/AiStatusButton";
|
|
19
19
|
export * from "./utils/modelManagement";
|
|
20
|
+
export * from "./utils/cache";
|
|
20
21
|
export * from "./examples/AiImageGenerator";
|
|
21
22
|
export * from "./examples/AiPromptPanelAdvanced";
|
|
22
23
|
//# sourceMappingURL=index.d.ts.map
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,cAAc,SAAS,CAAC;AAGxB,cAAc,sBAAsB,CAAC;AAGrC,cAAc,qBAAqB,CAAC;AACpC,cAAc,qBAAqB,CAAC;AACpC,cAAc,qBAAqB,CAAC;AACpC,cAAc,uBAAuB,CAAC;AACtC,cAAc,wBAAwB,CAAC;AACvC,cAAc,oBAAoB,CAAC;AACnC,cAAc,4BAA4B,CAAC;AAG3C,cAAc,4BAA4B,CAAC;AAC3C,cAAc,4BAA4B,CAAC;AAC3C,cAAc,sBAAsB,CAAC;AACrC,cAAc,yBAAyB,CAAC;AACxC,cAAc,uBAAuB,CAAC;AACtC,cAAc,0BAA0B,CAAC;AACzC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,+BAA+B,CAAC;AAC9C,cAAc,6BAA6B,CAAC;AAG5C,cAAc,yBAAyB,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,cAAc,SAAS,CAAC;AAGxB,cAAc,sBAAsB,CAAC;AAGrC,cAAc,qBAAqB,CAAC;AACpC,cAAc,qBAAqB,CAAC;AACpC,cAAc,qBAAqB,CAAC;AACpC,cAAc,uBAAuB,CAAC;AACtC,cAAc,wBAAwB,CAAC;AACvC,cAAc,oBAAoB,CAAC;AACnC,cAAc,4BAA4B,CAAC;AAG3C,cAAc,4BAA4B,CAAC;AAC3C,cAAc,4BAA4B,CAAC;AAC3C,cAAc,sBAAsB,CAAC;AACrC,cAAc,yBAAyB,CAAC;AACxC,cAAc,uBAAuB,CAAC;AACtC,cAAc,0BAA0B,CAAC;AACzC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,+BAA+B,CAAC;AAC9C,cAAc,6BAA6B,CAAC;AAG5C,cAAc,yBAAyB,CAAC;AACxC,cAAc,eAAe,CAAC;AAG9B,cAAc,6BAA6B,CAAC;AAC5C,cAAc,kCAAkC,CAAC"}
|
package/dist/index.js
CHANGED
|
@@ -22,6 +22,7 @@ export * from "./components/AiSettingsButton";
|
|
|
22
22
|
export * from "./components/AiStatusButton";
|
|
23
23
|
// Utils
|
|
24
24
|
export * from "./utils/modelManagement";
|
|
25
|
+
export * from "./utils/cache";
|
|
25
26
|
// Examples
|
|
26
27
|
export * from "./examples/AiImageGenerator";
|
|
27
28
|
export * from "./examples/AiPromptPanelAdvanced";
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Simple cache and rate limiting utilities for API calls
|
|
3
|
+
*/
|
|
4
|
+
/**
|
|
5
|
+
* Get cached data if valid
|
|
6
|
+
*/
|
|
7
|
+
export declare function getCached<T>(key: string, maxAgeMs?: number): T | null;
|
|
8
|
+
/**
|
|
9
|
+
* Set cached data
|
|
10
|
+
*/
|
|
11
|
+
export declare function setCache<T>(key: string, data: T): void;
|
|
12
|
+
/**
|
|
13
|
+
* Clear cache for a specific key or all cache
|
|
14
|
+
*/
|
|
15
|
+
export declare function clearCache(key?: string): void;
|
|
16
|
+
/**
|
|
17
|
+
* Check if rate limit is exceeded
|
|
18
|
+
* @param key - Unique identifier for the rate limit
|
|
19
|
+
* @param maxCalls - Maximum number of calls allowed
|
|
20
|
+
* @param windowMs - Time window in milliseconds
|
|
21
|
+
* @returns true if rate limit is exceeded
|
|
22
|
+
*/
|
|
23
|
+
export declare function isRateLimited(key: string, maxCalls?: number, windowMs?: number): boolean;
|
|
24
|
+
/**
|
|
25
|
+
* Get remaining time before rate limit reset
|
|
26
|
+
*/
|
|
27
|
+
export declare function getRateLimitResetTime(key: string): number;
|
|
28
|
+
/**
|
|
29
|
+
* Reset rate limit for a key
|
|
30
|
+
*/
|
|
31
|
+
export declare function resetRateLimit(key?: string): void;
|
|
32
|
+
//# sourceMappingURL=cache.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cache.d.ts","sourceRoot":"","sources":["../../src/utils/cache.ts"],"names":[],"mappings":"AAAA;;GAEG;AAeH;;GAEG;AACH,wBAAgB,SAAS,CAAC,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,QAAQ,GAAE,MAAc,GAAG,CAAC,GAAG,IAAI,CAW5E;AAED;;GAEG;AACH,wBAAgB,QAAQ,CAAC,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,GAAG,IAAI,CAKtD;AAED;;GAEG;AACH,wBAAgB,UAAU,CAAC,GAAG,CAAC,EAAE,MAAM,GAAG,IAAI,CAM7C;AAED;;;;;;GAMG;AACH,wBAAgB,aAAa,CAC3B,GAAG,EAAE,MAAM,EACX,QAAQ,GAAE,MAAW,EACrB,QAAQ,GAAE,MAAc,GACvB,OAAO,CAuBT;AAED;;GAEG;AACH,wBAAgB,qBAAqB,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAMzD;AAED;;GAEG;AACH,wBAAgB,cAAc,CAAC,GAAG,CAAC,EAAE,MAAM,GAAG,IAAI,CAMjD"}
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Simple cache and rate limiting utilities for API calls
|
|
3
|
+
*/
|
|
4
|
+
const cache = new Map();
|
|
5
|
+
const rateLimits = new Map();
|
|
6
|
+
/**
|
|
7
|
+
* Get cached data if valid
|
|
8
|
+
*/
|
|
9
|
+
export function getCached(key, maxAgeMs = 60000) {
|
|
10
|
+
const entry = cache.get(key);
|
|
11
|
+
if (!entry)
|
|
12
|
+
return null;
|
|
13
|
+
const now = Date.now();
|
|
14
|
+
if (now - entry.timestamp > maxAgeMs) {
|
|
15
|
+
cache.delete(key);
|
|
16
|
+
return null;
|
|
17
|
+
}
|
|
18
|
+
return entry.data;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Set cached data
|
|
22
|
+
*/
|
|
23
|
+
export function setCache(key, data) {
|
|
24
|
+
cache.set(key, {
|
|
25
|
+
data,
|
|
26
|
+
timestamp: Date.now(),
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Clear cache for a specific key or all cache
|
|
31
|
+
*/
|
|
32
|
+
export function clearCache(key) {
|
|
33
|
+
if (key) {
|
|
34
|
+
cache.delete(key);
|
|
35
|
+
}
|
|
36
|
+
else {
|
|
37
|
+
cache.clear();
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Check if rate limit is exceeded
|
|
42
|
+
* @param key - Unique identifier for the rate limit
|
|
43
|
+
* @param maxCalls - Maximum number of calls allowed
|
|
44
|
+
* @param windowMs - Time window in milliseconds
|
|
45
|
+
* @returns true if rate limit is exceeded
|
|
46
|
+
*/
|
|
47
|
+
export function isRateLimited(key, maxCalls = 10, windowMs = 60000) {
|
|
48
|
+
const now = Date.now();
|
|
49
|
+
const entry = rateLimits.get(key);
|
|
50
|
+
if (!entry || now > entry.resetAt) {
|
|
51
|
+
// No entry or expired, create new
|
|
52
|
+
rateLimits.set(key, {
|
|
53
|
+
count: 1,
|
|
54
|
+
resetAt: now + windowMs,
|
|
55
|
+
});
|
|
56
|
+
return false;
|
|
57
|
+
}
|
|
58
|
+
if (entry.count >= maxCalls) {
|
|
59
|
+
console.warn(`[Rate Limit] Exceeded ${maxCalls} calls for "${key}". Reset at ${new Date(entry.resetAt).toLocaleTimeString()}`);
|
|
60
|
+
return true;
|
|
61
|
+
}
|
|
62
|
+
// Increment counter
|
|
63
|
+
entry.count++;
|
|
64
|
+
return false;
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Get remaining time before rate limit reset
|
|
68
|
+
*/
|
|
69
|
+
export function getRateLimitResetTime(key) {
|
|
70
|
+
const entry = rateLimits.get(key);
|
|
71
|
+
if (!entry)
|
|
72
|
+
return 0;
|
|
73
|
+
const now = Date.now();
|
|
74
|
+
return Math.max(0, entry.resetAt - now);
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Reset rate limit for a key
|
|
78
|
+
*/
|
|
79
|
+
export function resetRateLimit(key) {
|
|
80
|
+
if (key) {
|
|
81
|
+
rateLimits.delete(key);
|
|
82
|
+
}
|
|
83
|
+
else {
|
|
84
|
+
rateLimits.clear();
|
|
85
|
+
}
|
|
86
|
+
}
|
|
@@ -41,7 +41,7 @@ export async function getAvailableModels(options = {}) {
|
|
|
41
41
|
const isExternalProxy = baseUrl && baseUrl.includes("/api/lastbrain");
|
|
42
42
|
const isPublicApi = baseUrl && baseUrl.includes("/api/public/v1");
|
|
43
43
|
const endpoint = isExternalProxy
|
|
44
|
-
? `${baseUrl}/ai/
|
|
44
|
+
? `${baseUrl}/ai/models/available` // Proxy routes to public API
|
|
45
45
|
: isPublicApi
|
|
46
46
|
? `${baseUrl}/ai/models/available` // → /api/public/v1/ai/models/available
|
|
47
47
|
: baseUrl
|
|
@@ -73,7 +73,7 @@ export async function getUserModels(options = {}) {
|
|
|
73
73
|
const isExternalProxy = baseUrl && baseUrl.includes("/api/lastbrain");
|
|
74
74
|
const isPublicApi = baseUrl && baseUrl.includes("/api/public/v1");
|
|
75
75
|
const endpoint = isExternalProxy
|
|
76
|
-
? `${baseUrl}/ai/
|
|
76
|
+
? `${baseUrl}/ai/user/models` // Proxy routes to public API
|
|
77
77
|
: isPublicApi
|
|
78
78
|
? `${baseUrl}/ai/user/models` // → /api/public/v1/ai/user/models
|
|
79
79
|
: baseUrl
|
package/package.json
CHANGED
|
@@ -6,6 +6,12 @@ import {
|
|
|
6
6
|
getUserModels,
|
|
7
7
|
type ModelToggleOptions,
|
|
8
8
|
} from "../utils/modelManagement";
|
|
9
|
+
import {
|
|
10
|
+
getCached,
|
|
11
|
+
setCache,
|
|
12
|
+
isRateLimited,
|
|
13
|
+
getRateLimitResetTime,
|
|
14
|
+
} from "../utils/cache";
|
|
9
15
|
|
|
10
16
|
export interface AIModel {
|
|
11
17
|
id: string;
|
|
@@ -67,6 +73,25 @@ export function useModelManagement(
|
|
|
67
73
|
setLoading(true);
|
|
68
74
|
setError(null);
|
|
69
75
|
|
|
76
|
+
// Check cache first (5 minutes TTL for models)
|
|
77
|
+
const cacheKey = `models_${category || "all"}`;
|
|
78
|
+
const cached = getCached<AIModel[]>(cacheKey, 300000);
|
|
79
|
+
if (cached) {
|
|
80
|
+
console.log("[useModelManagement] Using cached models");
|
|
81
|
+
setAvailableModels(cached);
|
|
82
|
+
setLoading(false);
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Check rate limit (max 5 calls per minute)
|
|
87
|
+
if (isRateLimited("models_fetch", 5, 60000)) {
|
|
88
|
+
const resetTime = getRateLimitResetTime("models_fetch");
|
|
89
|
+
const resetSeconds = Math.ceil(resetTime / 1000);
|
|
90
|
+
throw new Error(
|
|
91
|
+
`Rate limit exceeded. Please wait ${resetSeconds}s before trying again.`
|
|
92
|
+
);
|
|
93
|
+
}
|
|
94
|
+
|
|
70
95
|
console.log(
|
|
71
96
|
"[useModelManagement] Fetching available models with options:",
|
|
72
97
|
{
|
|
@@ -85,6 +110,8 @@ export function useModelManagement(
|
|
|
85
110
|
: models;
|
|
86
111
|
|
|
87
112
|
setAvailableModels(filteredModels);
|
|
113
|
+
// Cache the results
|
|
114
|
+
setCache(cacheKey, filteredModels);
|
|
88
115
|
console.log(
|
|
89
116
|
"[useModelManagement] Set filtered models:",
|
|
90
117
|
filteredModels.length
|
package/src/hooks/usePrompts.ts
CHANGED
|
@@ -2,6 +2,13 @@
|
|
|
2
2
|
|
|
3
3
|
import { useState, useCallback } from "react";
|
|
4
4
|
import { useAiContext } from "../context/AiProvider";
|
|
5
|
+
import {
|
|
6
|
+
getCached,
|
|
7
|
+
setCache,
|
|
8
|
+
clearCache,
|
|
9
|
+
isRateLimited,
|
|
10
|
+
getRateLimitResetTime,
|
|
11
|
+
} from "../utils/cache";
|
|
5
12
|
|
|
6
13
|
export interface Prompt {
|
|
7
14
|
id: string;
|
|
@@ -58,6 +65,27 @@ export function usePrompts(): UsePromptsReturn {
|
|
|
58
65
|
setLoading(true);
|
|
59
66
|
setError(null);
|
|
60
67
|
|
|
68
|
+
// Generate cache key based on options
|
|
69
|
+
const cacheKey = `prompts_${JSON.stringify(options || {})}`;
|
|
70
|
+
|
|
71
|
+
// Check cache first (60 seconds TTL)
|
|
72
|
+
const cached = getCached<Prompt[] | PublicPrompt[]>(cacheKey, 60000);
|
|
73
|
+
if (cached) {
|
|
74
|
+
console.log("[usePrompts] Using cached data");
|
|
75
|
+
setPrompts(cached);
|
|
76
|
+
setLoading(false);
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Check rate limit (max 10 calls per minute)
|
|
81
|
+
if (isRateLimited("prompts_fetch", 10, 60000)) {
|
|
82
|
+
const resetTime = getRateLimitResetTime("prompts_fetch");
|
|
83
|
+
const resetSeconds = Math.ceil(resetTime / 1000);
|
|
84
|
+
throw new Error(
|
|
85
|
+
`Rate limit exceeded. Please wait ${resetSeconds}s before trying again.`
|
|
86
|
+
);
|
|
87
|
+
}
|
|
88
|
+
|
|
61
89
|
const params = new URLSearchParams();
|
|
62
90
|
if (options?.type) params.append("type", options.type);
|
|
63
91
|
if (options?.favorite !== undefined)
|
|
@@ -106,7 +134,10 @@ export function usePrompts(): UsePromptsReturn {
|
|
|
106
134
|
const data = await response.json();
|
|
107
135
|
|
|
108
136
|
if (response.ok) {
|
|
109
|
-
|
|
137
|
+
const promptsData = data.prompts || [];
|
|
138
|
+
setPrompts(promptsData);
|
|
139
|
+
// Cache the results
|
|
140
|
+
setCache(cacheKey, promptsData);
|
|
110
141
|
} else {
|
|
111
142
|
setError(data.error || "Failed to fetch prompts");
|
|
112
143
|
}
|
|
@@ -126,7 +157,7 @@ export function usePrompts(): UsePromptsReturn {
|
|
|
126
157
|
|
|
127
158
|
const isExternalProxy = baseUrl && baseUrl.includes("/api/lastbrain");
|
|
128
159
|
const endpoint = isExternalProxy
|
|
129
|
-
? `${baseUrl}/
|
|
160
|
+
? `${baseUrl}/prompts` // Proxy handles auth routes
|
|
130
161
|
: "/api/ai/auth/prompts";
|
|
131
162
|
|
|
132
163
|
const headers: HeadersInit = { "Content-Type": "application/json" };
|
|
@@ -144,6 +175,8 @@ export function usePrompts(): UsePromptsReturn {
|
|
|
144
175
|
const result = await response.json();
|
|
145
176
|
|
|
146
177
|
if (response.ok) {
|
|
178
|
+
// Invalidate cache after creating a prompt
|
|
179
|
+
clearCache();
|
|
147
180
|
return result.prompt;
|
|
148
181
|
} else {
|
|
149
182
|
setError(result.error || "Failed to create prompt");
|
|
@@ -188,6 +221,8 @@ export function usePrompts(): UsePromptsReturn {
|
|
|
188
221
|
const result = await response.json();
|
|
189
222
|
|
|
190
223
|
if (response.ok) {
|
|
224
|
+
// Invalidate cache after updating a prompt
|
|
225
|
+
clearCache();
|
|
191
226
|
return result.prompt;
|
|
192
227
|
} else {
|
|
193
228
|
setError(result.error || "Failed to update prompt");
|
|
@@ -210,7 +245,7 @@ export function usePrompts(): UsePromptsReturn {
|
|
|
210
245
|
const isPublicApi = baseUrl && baseUrl.includes("/api/public/v1");
|
|
211
246
|
|
|
212
247
|
const endpoint = isExternalProxy
|
|
213
|
-
? `${baseUrl}/
|
|
248
|
+
? `${baseUrl}/prompts?id=${id}` // Proxy handles auth routes
|
|
214
249
|
: isPublicApi
|
|
215
250
|
? `${baseUrl}/prompts?id=${id}`
|
|
216
251
|
: baseUrl
|
|
@@ -231,6 +266,8 @@ export function usePrompts(): UsePromptsReturn {
|
|
|
231
266
|
const result = await response.json();
|
|
232
267
|
|
|
233
268
|
if (response.ok) {
|
|
269
|
+
// Invalidate cache after deleting a prompt
|
|
270
|
+
clearCache();
|
|
234
271
|
return true;
|
|
235
272
|
} else {
|
|
236
273
|
setError(result.error || "Failed to delete prompt");
|
|
@@ -251,7 +288,7 @@ export function usePrompts(): UsePromptsReturn {
|
|
|
251
288
|
const isPublicApi = baseUrl && baseUrl.includes("/api/public/v1");
|
|
252
289
|
|
|
253
290
|
const endpoint = isExternalProxy
|
|
254
|
-
? `${baseUrl}/
|
|
291
|
+
? `${baseUrl}/prompts/stats` // Proxy handles auth routes internally
|
|
255
292
|
: isPublicApi
|
|
256
293
|
? `${baseUrl}/prompts/stats`
|
|
257
294
|
: baseUrl
|
package/src/index.ts
CHANGED
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Simple cache and rate limiting utilities for API calls
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
interface CacheEntry<T> {
|
|
6
|
+
data: T;
|
|
7
|
+
timestamp: number;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
interface RateLimitEntry {
|
|
11
|
+
count: number;
|
|
12
|
+
resetAt: number;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const cache = new Map<string, CacheEntry<any>>();
|
|
16
|
+
const rateLimits = new Map<string, RateLimitEntry>();
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Get cached data if valid
|
|
20
|
+
*/
|
|
21
|
+
export function getCached<T>(key: string, maxAgeMs: number = 60000): T | null {
|
|
22
|
+
const entry = cache.get(key);
|
|
23
|
+
if (!entry) return null;
|
|
24
|
+
|
|
25
|
+
const now = Date.now();
|
|
26
|
+
if (now - entry.timestamp > maxAgeMs) {
|
|
27
|
+
cache.delete(key);
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
return entry.data;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Set cached data
|
|
36
|
+
*/
|
|
37
|
+
export function setCache<T>(key: string, data: T): void {
|
|
38
|
+
cache.set(key, {
|
|
39
|
+
data,
|
|
40
|
+
timestamp: Date.now(),
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Clear cache for a specific key or all cache
|
|
46
|
+
*/
|
|
47
|
+
export function clearCache(key?: string): void {
|
|
48
|
+
if (key) {
|
|
49
|
+
cache.delete(key);
|
|
50
|
+
} else {
|
|
51
|
+
cache.clear();
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Check if rate limit is exceeded
|
|
57
|
+
* @param key - Unique identifier for the rate limit
|
|
58
|
+
* @param maxCalls - Maximum number of calls allowed
|
|
59
|
+
* @param windowMs - Time window in milliseconds
|
|
60
|
+
* @returns true if rate limit is exceeded
|
|
61
|
+
*/
|
|
62
|
+
export function isRateLimited(
|
|
63
|
+
key: string,
|
|
64
|
+
maxCalls: number = 10,
|
|
65
|
+
windowMs: number = 60000
|
|
66
|
+
): boolean {
|
|
67
|
+
const now = Date.now();
|
|
68
|
+
const entry = rateLimits.get(key);
|
|
69
|
+
|
|
70
|
+
if (!entry || now > entry.resetAt) {
|
|
71
|
+
// No entry or expired, create new
|
|
72
|
+
rateLimits.set(key, {
|
|
73
|
+
count: 1,
|
|
74
|
+
resetAt: now + windowMs,
|
|
75
|
+
});
|
|
76
|
+
return false;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
if (entry.count >= maxCalls) {
|
|
80
|
+
console.warn(
|
|
81
|
+
`[Rate Limit] Exceeded ${maxCalls} calls for "${key}". Reset at ${new Date(entry.resetAt).toLocaleTimeString()}`
|
|
82
|
+
);
|
|
83
|
+
return true;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Increment counter
|
|
87
|
+
entry.count++;
|
|
88
|
+
return false;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Get remaining time before rate limit reset
|
|
93
|
+
*/
|
|
94
|
+
export function getRateLimitResetTime(key: string): number {
|
|
95
|
+
const entry = rateLimits.get(key);
|
|
96
|
+
if (!entry) return 0;
|
|
97
|
+
|
|
98
|
+
const now = Date.now();
|
|
99
|
+
return Math.max(0, entry.resetAt - now);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Reset rate limit for a key
|
|
104
|
+
*/
|
|
105
|
+
export function resetRateLimit(key?: string): void {
|
|
106
|
+
if (key) {
|
|
107
|
+
rateLimits.delete(key);
|
|
108
|
+
} else {
|
|
109
|
+
rateLimits.clear();
|
|
110
|
+
}
|
|
111
|
+
}
|
|
@@ -73,7 +73,7 @@ export async function getAvailableModels(
|
|
|
73
73
|
const isPublicApi = baseUrl && baseUrl.includes("/api/public/v1");
|
|
74
74
|
|
|
75
75
|
const endpoint = isExternalProxy
|
|
76
|
-
? `${baseUrl}/ai/
|
|
76
|
+
? `${baseUrl}/ai/models/available` // Proxy routes to public API
|
|
77
77
|
: isPublicApi
|
|
78
78
|
? `${baseUrl}/ai/models/available` // → /api/public/v1/ai/models/available
|
|
79
79
|
: baseUrl
|
|
@@ -125,7 +125,7 @@ export async function getUserModels(
|
|
|
125
125
|
const isPublicApi = baseUrl && baseUrl.includes("/api/public/v1");
|
|
126
126
|
|
|
127
127
|
const endpoint = isExternalProxy
|
|
128
|
-
? `${baseUrl}/ai/
|
|
128
|
+
? `${baseUrl}/ai/user/models` // Proxy routes to public API
|
|
129
129
|
: isPublicApi
|
|
130
130
|
? `${baseUrl}/ai/user/models` // → /api/public/v1/ai/user/models
|
|
131
131
|
: baseUrl
|