@lastbrain/ai-ui-react 1.0.35 → 1.0.36
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/AiChipLabel.d.ts.map +1 -1
- package/dist/components/AiChipLabel.js +5 -1
- package/dist/components/AiImageButton.d.ts.map +1 -1
- package/dist/components/AiImageButton.js +13 -1
- package/dist/components/AiInput.d.ts.map +1 -1
- package/dist/components/AiInput.js +5 -1
- package/dist/components/AiSelect.d.ts.map +1 -1
- package/dist/components/AiSelect.js +5 -1
- package/dist/components/AiTextarea.d.ts.map +1 -1
- package/dist/components/AiTextarea.js +5 -1
- package/dist/context/AiProvider.d.ts.map +1 -1
- package/dist/context/AiProvider.js +76 -20
- package/dist/hooks/useAiModels.d.ts +1 -1
- package/dist/hooks/useAiModels.d.ts.map +1 -1
- package/dist/hooks/useAiModels.js +36 -5
- package/dist/hooks/useModelManagement.d.ts.map +1 -1
- package/package.json +2 -2
- package/src/components/AiChipLabel.tsx +5 -1
- package/src/components/AiImageButton.tsx +14 -1
- package/src/components/AiInput.tsx +5 -1
- package/src/components/AiPromptPanel.tsx +12 -8
- package/src/components/AiSelect.tsx +5 -1
- package/src/components/AiTextarea.tsx +5 -1
- package/src/context/AiProvider.tsx +92 -25
- package/src/hooks/useAiModels.ts +58 -11
- package/src/hooks/useModelManagement.ts +7 -2
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"AiChipLabel.d.ts","sourceRoot":"","sources":["../../src/components/AiChipLabel.tsx"],"names":[],"mappings":"AAEA,OAAO,KAA0C,MAAM,OAAO,CAAC;AAS/D,MAAM,WAAW,gBAAgB;IAC/B,QAAQ,EAAE,KAAK,CAAC,SAAS,CAAC;IAC1B,OAAO,CAAC,EAAE,SAAS,GAAG,SAAS,GAAG,SAAS,GAAG,QAAQ,CAAC;IACvD,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC;CAC7B;AAED,wBAAgB,WAAW,CAAC,EAC1B,QAAQ,EACR,OAAmB,EACnB,SAAS,EACT,KAAK,EAAE,WAAW,GACnB,EAAE,gBAAgB,2CAgClB;AAED,MAAM,WAAW,gBAAgB;IAC/B,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,IAAI,CAAC;IACrC,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,wBAAgB,WAAW,CAAC,EAC1B,KAAU,EACV,QAAQ,EACR,WAAoE,EACpE,OAAO,EACP,QAAQ,EACR,eAAuB,EACvB,SAAS,EACT,YAAY,EACZ,aAAa,EACb,OAAO,EAAE,WAAW,EACpB,QAAQ,EAAE,YAAY,GACvB,EAAE,gBAAgB,
|
|
1
|
+
{"version":3,"file":"AiChipLabel.d.ts","sourceRoot":"","sources":["../../src/components/AiChipLabel.tsx"],"names":[],"mappings":"AAEA,OAAO,KAA0C,MAAM,OAAO,CAAC;AAS/D,MAAM,WAAW,gBAAgB;IAC/B,QAAQ,EAAE,KAAK,CAAC,SAAS,CAAC;IAC1B,OAAO,CAAC,EAAE,SAAS,GAAG,SAAS,GAAG,SAAS,GAAG,QAAQ,CAAC;IACvD,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC;CAC7B;AAED,wBAAgB,WAAW,CAAC,EAC1B,QAAQ,EACR,OAAmB,EACnB,SAAS,EACT,KAAK,EAAE,WAAW,GACnB,EAAE,gBAAgB,2CAgClB;AAED,MAAM,WAAW,gBAAgB;IAC/B,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,IAAI,CAAC;IACrC,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,wBAAgB,WAAW,CAAC,EAC1B,KAAU,EACV,QAAQ,EACR,WAAoE,EACpE,OAAO,EACP,QAAQ,EACR,eAAuB,EACvB,SAAS,EACT,YAAY,EACZ,aAAa,EACb,OAAO,EAAE,WAAW,EACpB,QAAQ,EAAE,YAAY,GACvB,EAAE,gBAAgB,2CAkMlB"}
|
|
@@ -42,7 +42,11 @@ export function AiChipInput({ value = [], onChange, placeholder = "Tapez et appu
|
|
|
42
42
|
const baseUrl = propBaseUrl ?? aiContext.baseUrl;
|
|
43
43
|
const apiKeyId = propApiKeyId ?? aiContext.apiKeyId;
|
|
44
44
|
// Hooks pour l'IA avec les valeurs du contexte
|
|
45
|
-
const { models } = useAiModels({
|
|
45
|
+
const { models } = useAiModels({
|
|
46
|
+
baseUrl,
|
|
47
|
+
apiKeyId,
|
|
48
|
+
modelType: "text-or-language",
|
|
49
|
+
});
|
|
46
50
|
const { generateText } = useAiCallText({ baseUrl, apiKeyId });
|
|
47
51
|
const addChip = (text) => {
|
|
48
52
|
if (!text.trim())
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"AiImageButton.d.ts","sourceRoot":"","sources":["../../src/components/AiImageButton.tsx"],"names":[],"mappings":"AAEA,OAAO,EAAY,KAAK,oBAAoB,EAAE,MAAM,OAAO,CAAC;AAE5D,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AAU5C,MAAM,WAAW,kBACf,SACE,IAAI,CAAC,WAAW,EAAE,SAAS,GAAG,MAAM,CAAC,EACrC,IAAI,CAAC,oBAAoB,CAAC,iBAAiB,CAAC,EAAE,SAAS,GAAG,UAAU,CAAC;IACvE,OAAO,CAAC,EAAE,CACR,QAAQ,EAAE,MAAM,EAChB,QAAQ,CAAC,EAAE;QAAE,SAAS,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAA;KAAE,KAC7C,IAAI,CAAC;IACV,MAAM,CAAC,EAAE,OAAO,GAAG,QAAQ,CAAC;IAC5B,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,WAAW,CAAC,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC7C,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,wBAAgB,aAAa,CAAC,EAC5B,OAAO,EAAE,WAAW,EACpB,QAAQ,EAAE,YAAY,EACtB,MAAgB,EAChB,OAAO,EAAE,QAAQ,EACjB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,OAAO,EACf,OAAO,EACP,OAAO,EACP,QAAQ,EACR,SAAS,EACT,QAAQ,EACR,aAAoB,EACpB,WAAW,EACX,YAAY,EACZ,aAAa,EACb,GAAG,WAAW,EACf,EAAE,kBAAkB,
|
|
1
|
+
{"version":3,"file":"AiImageButton.d.ts","sourceRoot":"","sources":["../../src/components/AiImageButton.tsx"],"names":[],"mappings":"AAEA,OAAO,EAAY,KAAK,oBAAoB,EAAE,MAAM,OAAO,CAAC;AAE5D,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AAU5C,MAAM,WAAW,kBACf,SACE,IAAI,CAAC,WAAW,EAAE,SAAS,GAAG,MAAM,CAAC,EACrC,IAAI,CAAC,oBAAoB,CAAC,iBAAiB,CAAC,EAAE,SAAS,GAAG,UAAU,CAAC;IACvE,OAAO,CAAC,EAAE,CACR,QAAQ,EAAE,MAAM,EAChB,QAAQ,CAAC,EAAE;QAAE,SAAS,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAA;KAAE,KAC7C,IAAI,CAAC;IACV,MAAM,CAAC,EAAE,OAAO,GAAG,QAAQ,CAAC;IAC5B,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,WAAW,CAAC,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC7C,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,wBAAgB,aAAa,CAAC,EAC5B,OAAO,EAAE,WAAW,EACpB,QAAQ,EAAE,YAAY,EACtB,MAAgB,EAChB,OAAO,EAAE,QAAQ,EACjB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,OAAO,EACf,OAAO,EACP,OAAO,EACP,QAAQ,EACR,SAAS,EACT,QAAQ,EACR,aAAoB,EACpB,WAAW,EACX,YAAY,EACZ,aAAa,EACb,GAAG,WAAW,EACf,EAAE,kBAAkB,2CAgYpB"}
|
|
@@ -22,6 +22,10 @@ export function AiImageButton({ baseUrl: propBaseUrl, apiKeyId: propApiKeyId, ui
|
|
|
22
22
|
// Récupérer uniquement les modèles image
|
|
23
23
|
const { models } = useAiModels({ baseUrl, apiKeyId, modelType: "image" });
|
|
24
24
|
const { generateImage, loading } = useAiCallImage({ baseUrl, apiKeyId });
|
|
25
|
+
console.log("[AiImageButton] Models received:", {
|
|
26
|
+
count: models.length,
|
|
27
|
+
models: models.map((m) => ({ id: m.id, name: m.name, type: m.type })),
|
|
28
|
+
});
|
|
25
29
|
const handleOpenPanel = () => {
|
|
26
30
|
setIsOpen(true);
|
|
27
31
|
};
|
|
@@ -190,7 +194,15 @@ export function AiImageButton({ baseUrl: propBaseUrl, apiKeyId: propApiKeyId, ui
|
|
|
190
194
|
} }), _jsx("span", { style: { letterSpacing: "0.025em" }, children: "G\u00E9n\u00E9ration..." })] })) : (_jsxs(_Fragment, { children: [_jsx(ImageIcon, { size: 18, style: {
|
|
191
195
|
color: "white",
|
|
192
196
|
filter: "drop-shadow(0 0 2px rgba(255,255,255,0.2))",
|
|
193
|
-
} }), _jsx("span", { style: { letterSpacing: "0.025em" }, children: children || "Générer une image" })] })) }), isOpen && (_jsx(AiPromptPanel, { isOpen: isOpen, onClose: handleClosePanel, onSubmit: handleSubmit, uiMode: uiMode, models:
|
|
197
|
+
} }), _jsx("span", { style: { letterSpacing: "0.025em" }, children: children || "Générer une image" })] })) }), isOpen && (_jsx(AiPromptPanel, { isOpen: isOpen, onClose: handleClosePanel, onSubmit: handleSubmit, uiMode: uiMode, models: (() => {
|
|
198
|
+
const filteredModels = models.filter((m) => m.type === "image");
|
|
199
|
+
console.log("[AiImageButton] Passing to AiPromptPanel:", {
|
|
200
|
+
originalCount: models.length,
|
|
201
|
+
filteredCount: filteredModels.length,
|
|
202
|
+
models: filteredModels,
|
|
203
|
+
});
|
|
204
|
+
return filteredModels;
|
|
205
|
+
})() }))] }), showImageCard && generatedImage && (_jsxs("div", { className: "relative", style: {
|
|
194
206
|
maxWidth: "320px",
|
|
195
207
|
borderRadius: "12px",
|
|
196
208
|
padding: "16px",
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"AiInput.d.ts","sourceRoot":"","sources":["../../src/components/AiInput.tsx"],"names":[],"mappings":"AAEA,OAAc,EAAoB,KAAK,mBAAmB,EAAE,MAAM,OAAO,CAAC;AAE1E,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AAQ5C,MAAM,WAAW,YACf,SACE,IAAI,CAAC,WAAW,EAAE,MAAM,CAAC,EACzB,IAAI,CAAC,mBAAmB,CAAC,gBAAgB,CAAC,EAAE,SAAS,CAAC;IACxD,MAAM,CAAC,EAAE,OAAO,GAAG,QAAQ,CAAC;IAC5B,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC;AAED,wBAAgB,OAAO,CAAC,EACtB,OAAO,EACP,QAAQ,EACR,MAAgB,EAChB,OAAO,EACP,KAAK,EACL,MAAM,EACN,QAAgB,EAChB,qBAA6B,EAC7B,YAAY,EACZ,aAAa,EACb,OAAO,EACP,OAAO,EACP,QAAQ,EACR,SAAS,EACT,GAAG,UAAU,EACd,EAAE,YAAY,
|
|
1
|
+
{"version":3,"file":"AiInput.d.ts","sourceRoot":"","sources":["../../src/components/AiInput.tsx"],"names":[],"mappings":"AAEA,OAAc,EAAoB,KAAK,mBAAmB,EAAE,MAAM,OAAO,CAAC;AAE1E,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AAQ5C,MAAM,WAAW,YACf,SACE,IAAI,CAAC,WAAW,EAAE,MAAM,CAAC,EACzB,IAAI,CAAC,mBAAmB,CAAC,gBAAgB,CAAC,EAAE,SAAS,CAAC;IACxD,MAAM,CAAC,EAAE,OAAO,GAAG,QAAQ,CAAC;IAC5B,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC;AAED,wBAAgB,OAAO,CAAC,EACtB,OAAO,EACP,QAAQ,EACR,MAAgB,EAChB,OAAO,EACP,KAAK,EACL,MAAM,EACN,QAAgB,EAChB,qBAA6B,EAC7B,YAAY,EACZ,aAAa,EACb,OAAO,EACP,OAAO,EACP,QAAQ,EACR,SAAS,EACT,GAAG,UAAU,EACd,EAAE,YAAY,2CAiLd"}
|
|
@@ -15,7 +15,11 @@ export function AiInput({ baseUrl, apiKeyId, uiMode = "modal", context, model, p
|
|
|
15
15
|
const [isButtonHovered, setIsButtonHovered] = useState(false);
|
|
16
16
|
const inputRef = useRef(null);
|
|
17
17
|
const { showUsageToast, toastData, toastKey, clearToast } = useUsageToast();
|
|
18
|
-
const { models } = useAiModels({
|
|
18
|
+
const { models } = useAiModels({
|
|
19
|
+
baseUrl,
|
|
20
|
+
apiKeyId,
|
|
21
|
+
modelType: "text-or-language",
|
|
22
|
+
});
|
|
19
23
|
const { generateText, loading } = useAiCallText({ baseUrl, apiKeyId });
|
|
20
24
|
const hasConfiguration = Boolean(model && prompt);
|
|
21
25
|
const handleOpenPanel = () => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"AiSelect.d.ts","sourceRoot":"","sources":["../../src/components/AiSelect.tsx"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,EAAY,KAAK,oBAAoB,EAAE,MAAM,OAAO,CAAC;AACnE,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AAQ5C,MAAM,WAAW,aACf,SACE,IAAI,CAAC,WAAW,EAAE,MAAM,CAAC,EACzB,IAAI,CAAC,oBAAoB,CAAC,iBAAiB,CAAC,EAAE,SAAS,CAAC;IAC1D,QAAQ,EAAE,KAAK,CAAC,SAAS,CAAC;IAC1B,MAAM,CAAC,EAAE,OAAO,GAAG,QAAQ,CAAC;CAC7B;AAED,wBAAgB,QAAQ,CAAC,EACvB,OAAO,EACP,QAAQ,EACR,MAAgB,EAChB,OAAO,EACP,KAAK,EACL,MAAM,EACN,YAAY,EACZ,aAAa,EACb,OAAO,EACP,OAAO,EACP,QAAQ,EACR,SAAS,EACT,QAAQ,EACR,GAAG,WAAW,EACf,EAAE,aAAa,
|
|
1
|
+
{"version":3,"file":"AiSelect.d.ts","sourceRoot":"","sources":["../../src/components/AiSelect.tsx"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,EAAY,KAAK,oBAAoB,EAAE,MAAM,OAAO,CAAC;AACnE,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AAQ5C,MAAM,WAAW,aACf,SACE,IAAI,CAAC,WAAW,EAAE,MAAM,CAAC,EACzB,IAAI,CAAC,oBAAoB,CAAC,iBAAiB,CAAC,EAAE,SAAS,CAAC;IAC1D,QAAQ,EAAE,KAAK,CAAC,SAAS,CAAC;IAC1B,MAAM,CAAC,EAAE,OAAO,GAAG,QAAQ,CAAC;CAC7B;AAED,wBAAgB,QAAQ,CAAC,EACvB,OAAO,EACP,QAAQ,EACR,MAAgB,EAChB,OAAO,EACP,KAAK,EACL,MAAM,EACN,YAAY,EACZ,aAAa,EACb,OAAO,EACP,OAAO,EACP,QAAQ,EACR,SAAS,EACT,QAAQ,EACR,GAAG,WAAW,EACf,EAAE,aAAa,2CAqFf"}
|
|
@@ -11,7 +11,11 @@ export function AiSelect({ baseUrl, apiKeyId, uiMode = "modal", context, model,
|
|
|
11
11
|
const [isOpen, setIsOpen] = useState(false);
|
|
12
12
|
const [isFocused, setIsFocused] = useState(false);
|
|
13
13
|
const { showUsageToast, toastData, toastKey, clearToast } = useUsageToast();
|
|
14
|
-
const { models } = useAiModels({
|
|
14
|
+
const { models } = useAiModels({
|
|
15
|
+
baseUrl,
|
|
16
|
+
apiKeyId,
|
|
17
|
+
modelType: "text-or-language",
|
|
18
|
+
});
|
|
15
19
|
const { generateText, loading } = useAiCallText({ baseUrl, apiKeyId });
|
|
16
20
|
const handleOpenPanel = () => {
|
|
17
21
|
setIsOpen(true);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"AiTextarea.d.ts","sourceRoot":"","sources":["../../src/components/AiTextarea.tsx"],"names":[],"mappings":"AAEA,OAAc,EAIZ,KAAK,sBAAsB,EAC5B,MAAM,OAAO,CAAC;AAEf,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AAQ5C,MAAM,WAAW,eACf,SACE,IAAI,CAAC,WAAW,EAAE,MAAM,CAAC,EACzB,IAAI,CAAC,sBAAsB,CAAC,mBAAmB,CAAC,EAAE,SAAS,CAAC;IAC9D,MAAM,CAAC,EAAE,OAAO,GAAG,QAAQ,CAAC;CAC7B;AAED,wBAAgB,UAAU,CAAC,EACzB,OAAO,EACP,QAAQ,EACR,MAAgB,EAChB,OAAO,EACP,KAAK,EACL,MAAM,EACN,QAAgB,EAChB,qBAAqB,EACrB,YAAY,EACZ,aAAa,EACb,OAAO,EACP,OAAO,EACP,QAAQ,EACR,SAAS,EACT,GAAG,aAAa,EACjB,EAAE,eAAe,
|
|
1
|
+
{"version":3,"file":"AiTextarea.d.ts","sourceRoot":"","sources":["../../src/components/AiTextarea.tsx"],"names":[],"mappings":"AAEA,OAAc,EAIZ,KAAK,sBAAsB,EAC5B,MAAM,OAAO,CAAC;AAEf,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AAQ5C,MAAM,WAAW,eACf,SACE,IAAI,CAAC,WAAW,EAAE,MAAM,CAAC,EACzB,IAAI,CAAC,sBAAsB,CAAC,mBAAmB,CAAC,EAAE,SAAS,CAAC;IAC9D,MAAM,CAAC,EAAE,OAAO,GAAG,QAAQ,CAAC;CAC7B;AAED,wBAAgB,UAAU,CAAC,EACzB,OAAO,EACP,QAAQ,EACR,MAAgB,EAChB,OAAO,EACP,KAAK,EACL,MAAM,EACN,QAAgB,EAChB,qBAAqB,EACrB,YAAY,EACZ,aAAa,EACb,OAAO,EACP,OAAO,EACP,QAAQ,EACR,SAAS,EACT,GAAG,aAAa,EACjB,EAAE,eAAe,2CAkMjB"}
|
|
@@ -17,7 +17,11 @@ export function AiTextarea({ baseUrl, apiKeyId, uiMode = "modal", context, model
|
|
|
17
17
|
const [isButtonHovered, setIsButtonHovered] = useState(false);
|
|
18
18
|
const textareaRef = useRef(null);
|
|
19
19
|
const { showUsageToast, toastData, toastKey, clearToast } = useUsageToast();
|
|
20
|
-
const { models } = useAiModels({
|
|
20
|
+
const { models } = useAiModels({
|
|
21
|
+
baseUrl,
|
|
22
|
+
apiKeyId,
|
|
23
|
+
modelType: "text-or-language",
|
|
24
|
+
});
|
|
21
25
|
const { generateText, loading } = useAiCallText({ baseUrl, apiKeyId });
|
|
22
26
|
const hasConfiguration = Boolean(model && prompt);
|
|
23
27
|
const handleOpenPanel = () => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"AiProvider.d.ts","sourceRoot":"","sources":["../../src/context/AiProvider.tsx"],"names":[],"mappings":"AAEA,OAAO,
|
|
1
|
+
{"version":3,"file":"AiProvider.d.ts","sourceRoot":"","sources":["../../src/context/AiProvider.tsx"],"names":[],"mappings":"AAEA,OAAO,EAOL,KAAK,SAAS,EACf,MAAM,OAAO,CAAC;AACf,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AACvC,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,uBAAuB,CAAC;AAItD,MAAM,WAAW,OAAO;IACtB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,GAAG,OAAO,GAAG,OAAO,GAAG,OAAO,CAAC;IAC/C,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,YAAY;IAC3B,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,MAAM,EAAE,QAAQ,EAAE,CAAC;CACpB;AAED,MAAM,WAAW,cAAc;IAC7B,OAAO,EAAE,MAAM,CAAC;IAChB,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;IAEf,SAAS,EAAE,YAAY,EAAE,CAAC;IAC1B,SAAS,EAAE,QAAQ,EAAE,CAAC;IACtB,eAAe,EAAE,OAAO,EAAE,CAAC;IAC3B,UAAU,EAAE,MAAM,EAAE,CAAC;IAErB,gBAAgB,EAAE,OAAO,CAAC;IAC1B,iBAAiB,EAAE,OAAO,CAAC;IAE3B,gBAAgB,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IACtC,iBAAiB,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAEvC,eAAe,EAAE,CACf,IAAI,EAAE,MAAM,GAAG,UAAU,GAAG,OAAO,GAAG,OAAO,KAC1C,QAAQ,EAAE,CAAC;IAChB,aAAa,EAAE,MAAM,QAAQ,EAAE,CAAC;IAChC,cAAc,EAAE,MAAM,QAAQ,EAAE,CAAC;CAClC;AAED,QAAA,MAAM,SAAS,qDAAuD,CAAC;AAEvE,MAAM,WAAW,eAAe;IAC9B,OAAO,EAAE,MAAM,CAAC;IAChB,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,QAAQ,EAAE,SAAS,CAAC;CACrB;AAED,wBAAgB,UAAU,CAAC,EACzB,OAAO,EACP,QAAQ,EACR,MAAgB,EAChB,QAAQ,GACT,EAAE,eAAe,2CAwLjB;AAED,wBAAgB,YAAY,IAAI,cAAc,CAM7C;AAED,OAAO,EAAE,SAAS,EAAE,CAAC"}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use client";
|
|
2
2
|
import { jsx as _jsx } from "react/jsx-runtime";
|
|
3
|
-
import { createContext, useContext, useState, useEffect, useCallback, } from "react";
|
|
3
|
+
import { createContext, useContext, useState, useEffect, useCallback, useRef, } from "react";
|
|
4
|
+
import { createClient } from "@lastbrain/ai-ui-core";
|
|
4
5
|
import { getAvailableModels, getUserModels } from "../utils/modelManagement";
|
|
5
6
|
const AiContext = createContext(undefined);
|
|
6
7
|
export function AiProvider({ baseUrl, apiKeyId, uiMode = "modal", children, }) {
|
|
@@ -10,30 +11,53 @@ export function AiProvider({ baseUrl, apiKeyId, uiMode = "modal", children, }) {
|
|
|
10
11
|
const [userModels, setUserModels] = useState([]);
|
|
11
12
|
const [loadingProviders, setLoadingProviders] = useState(false);
|
|
12
13
|
const [loadingUserModels, setLoadingUserModels] = useState(false);
|
|
14
|
+
// Flags pour éviter les appels multiples et les boucles infinies
|
|
15
|
+
const isFetchingProviders = useRef(false);
|
|
16
|
+
const isFetchingUserModels = useRef(false);
|
|
17
|
+
const hasFetchedProviders = useRef(false);
|
|
18
|
+
const hasFetchedUserModels = useRef(false);
|
|
19
|
+
const providersAvailable = useRef(true); // false si 404
|
|
20
|
+
const userModelsAvailable = useRef(true); // false si 404
|
|
13
21
|
// Récupérer les providers et leurs modèles + available models en parallèle
|
|
14
22
|
const fetchProviders = useCallback(async () => {
|
|
15
|
-
|
|
16
|
-
|
|
23
|
+
// Éviter les appels multiples
|
|
24
|
+
if (isFetchingProviders.current) {
|
|
25
|
+
console.log("[AiProvider] Already fetching providers, skipping");
|
|
26
|
+
return;
|
|
27
|
+
}
|
|
28
|
+
// Si déjà fetch et non disponible, ne pas réessayer
|
|
29
|
+
if (hasFetchedProviders.current && !providersAvailable.current) {
|
|
30
|
+
console.log("[AiProvider] Providers API not available (404), skipping");
|
|
31
|
+
return;
|
|
32
|
+
}
|
|
33
|
+
isFetchingProviders.current = true;
|
|
17
34
|
setLoadingProviders(true);
|
|
18
35
|
try {
|
|
36
|
+
console.log("[AiProvider] Fetching providers from:", baseUrl);
|
|
37
|
+
// Utiliser createClient pour avoir buildUrl qui gère les routes correctement
|
|
38
|
+
const client = createClient({ baseUrl, apiKeyId });
|
|
19
39
|
// Fetch providers et available models en parallèle
|
|
20
|
-
const [
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
}
|
|
40
|
+
const [providersData, availableModelsData] = await Promise.all([
|
|
41
|
+
client
|
|
42
|
+
.getModels()
|
|
43
|
+
.then((models) => {
|
|
44
|
+
// getModels retourne directement les modèles, pas les providers
|
|
45
|
+
// On doit reconstruire la structure providers
|
|
46
|
+
return { providers: [{ id: "default", name: "Default", models }] };
|
|
47
|
+
})
|
|
48
|
+
.catch((error) => {
|
|
49
|
+
console.error("[AiProvider] Error fetching models:", error);
|
|
50
|
+
if (error.message?.includes("404")) {
|
|
51
|
+
providersAvailable.current = false;
|
|
52
|
+
}
|
|
53
|
+
return { providers: [] };
|
|
27
54
|
}),
|
|
28
55
|
getAvailableModels({ baseUrl, apiKey: apiKeyId }).catch((error) => {
|
|
29
56
|
console.warn("[AiProvider] Could not fetch available models:", error);
|
|
30
57
|
return [];
|
|
31
58
|
}),
|
|
32
59
|
]);
|
|
33
|
-
|
|
34
|
-
throw new Error(`Failed to fetch providers: ${providersResponse.status}`);
|
|
35
|
-
}
|
|
36
|
-
const providersData = await providersResponse.json();
|
|
60
|
+
console.log("[AiProvider] Providers data received:", providersData);
|
|
37
61
|
if (providersData.providers && Array.isArray(providersData.providers)) {
|
|
38
62
|
setProviders(providersData.providers);
|
|
39
63
|
// Extraire tous les modèles
|
|
@@ -43,10 +67,13 @@ export function AiProvider({ baseUrl, apiKeyId, uiMode = "modal", children, }) {
|
|
|
43
67
|
models.push(...provider.models);
|
|
44
68
|
}
|
|
45
69
|
}
|
|
70
|
+
console.log("[AiProvider] Extracted models:", models.length, models);
|
|
46
71
|
setAllModels(models);
|
|
47
72
|
}
|
|
48
73
|
// Stocker available models
|
|
74
|
+
console.log("[AiProvider] Available models:", availableModelsData.length);
|
|
49
75
|
setAvailableModels(availableModelsData);
|
|
76
|
+
hasFetchedProviders.current = true;
|
|
50
77
|
}
|
|
51
78
|
catch (error) {
|
|
52
79
|
console.error("[AiProvider] Error fetching providers:", error);
|
|
@@ -54,31 +81,47 @@ export function AiProvider({ baseUrl, apiKeyId, uiMode = "modal", children, }) {
|
|
|
54
81
|
setProviders([]);
|
|
55
82
|
setAllModels([]);
|
|
56
83
|
setAvailableModels([]);
|
|
84
|
+
hasFetchedProviders.current = true;
|
|
57
85
|
}
|
|
58
86
|
finally {
|
|
59
87
|
setLoadingProviders(false);
|
|
88
|
+
isFetchingProviders.current = false;
|
|
60
89
|
}
|
|
61
|
-
}, [baseUrl, apiKeyId
|
|
90
|
+
}, [baseUrl, apiKeyId]); // Retirer loadingProviders des dépendances
|
|
62
91
|
// Récupérer les modèles activés par l'utilisateur
|
|
63
92
|
const fetchUserModels = useCallback(async () => {
|
|
64
|
-
|
|
65
|
-
|
|
93
|
+
// Éviter les appels multiples
|
|
94
|
+
if (isFetchingUserModels.current) {
|
|
95
|
+
console.log("[AiProvider] Already fetching user models, skipping");
|
|
96
|
+
return;
|
|
97
|
+
}
|
|
98
|
+
// Si déjà fetch et non disponible, ne pas réessayer
|
|
99
|
+
if (hasFetchedUserModels.current && !userModelsAvailable.current) {
|
|
100
|
+
console.log("[AiProvider] User models API not available (404), skipping");
|
|
101
|
+
return;
|
|
102
|
+
}
|
|
103
|
+
isFetchingUserModels.current = true;
|
|
66
104
|
setLoadingUserModels(true);
|
|
67
105
|
try {
|
|
68
106
|
const models = await getUserModels({ baseUrl, apiKey: apiKeyId });
|
|
69
107
|
setUserModels(models);
|
|
108
|
+
hasFetchedUserModels.current = true;
|
|
70
109
|
}
|
|
71
110
|
catch (error) {
|
|
72
111
|
console.error("[AiProvider] Error fetching user models:", error);
|
|
73
112
|
// En cas d'erreur 404, ne pas logger comme erreur critique
|
|
74
113
|
if (error instanceof Error && error.message.includes("404")) {
|
|
114
|
+
console.warn("[AiProvider] User models API not available (404)");
|
|
115
|
+
userModelsAvailable.current = false;
|
|
75
116
|
setUserModels([]);
|
|
76
117
|
}
|
|
118
|
+
hasFetchedUserModels.current = true;
|
|
77
119
|
}
|
|
78
120
|
finally {
|
|
79
121
|
setLoadingUserModels(false);
|
|
122
|
+
isFetchingUserModels.current = false;
|
|
80
123
|
}
|
|
81
|
-
}, [baseUrl, apiKeyId
|
|
124
|
+
}, [baseUrl, apiKeyId]); // Retirer loadingUserModels des dépendances
|
|
82
125
|
// Récupérer les données au montage du provider
|
|
83
126
|
useEffect(() => {
|
|
84
127
|
fetchProviders(); // Fetch providers + available models en même temps
|
|
@@ -86,13 +129,26 @@ export function AiProvider({ baseUrl, apiKeyId, uiMode = "modal", children, }) {
|
|
|
86
129
|
}, [fetchProviders, fetchUserModels]);
|
|
87
130
|
// Helpers pour filtrer les modèles par type
|
|
88
131
|
const getModelsByType = useCallback((type) => {
|
|
89
|
-
|
|
132
|
+
const filtered = allModels.filter((model) => model.type === type);
|
|
133
|
+
console.log(`[AiProvider] getModelsByType(${type}):`, {
|
|
134
|
+
total: allModels.length,
|
|
135
|
+
filtered: filtered.length,
|
|
136
|
+
models: filtered.map((m) => ({ id: m.id, name: m.name, type: m.type })),
|
|
137
|
+
});
|
|
138
|
+
return filtered;
|
|
90
139
|
}, [allModels]);
|
|
91
140
|
const getTextModels = useCallback(() => {
|
|
92
141
|
return allModels.filter((model) => model.type === "text" || model.type === "language");
|
|
93
142
|
}, [allModels]);
|
|
94
143
|
const getImageModels = useCallback(() => {
|
|
95
|
-
|
|
144
|
+
const imageModels = allModels.filter((model) => model.type === "image");
|
|
145
|
+
console.log("[AiProvider] getImageModels:", {
|
|
146
|
+
totalModels: allModels.length,
|
|
147
|
+
imageModels: imageModels.length,
|
|
148
|
+
allTypes: allModels.map((m) => ({ id: m.id, type: m.type })),
|
|
149
|
+
imageModelsList: imageModels.map((m) => ({ id: m.id, name: m.name })),
|
|
150
|
+
});
|
|
151
|
+
return imageModels;
|
|
96
152
|
}, [allModels]);
|
|
97
153
|
const value = {
|
|
98
154
|
baseUrl,
|
|
@@ -5,7 +5,7 @@ export interface UseAiModelsOptions {
|
|
|
5
5
|
modelType?: "text" | "language" | "image" | "embed" | "text-or-language";
|
|
6
6
|
}
|
|
7
7
|
export interface UseAiModelsResult {
|
|
8
|
-
models: ModelRef[]
|
|
8
|
+
models: ModelRef[];
|
|
9
9
|
loading: boolean;
|
|
10
10
|
error: Error | null;
|
|
11
11
|
refetch: () => void;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"useAiModels.d.ts","sourceRoot":"","sources":["../../src/hooks/useAiModels.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,uBAAuB,CAAC;AAGtD,MAAM,WAAW,kBAAkB;IACjC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,SAAS,CAAC,EAAE,MAAM,GAAG,UAAU,GAAG,OAAO,GAAG,OAAO,GAAG,kBAAkB,CAAC;CAC1E;AAED,MAAM,WAAW,iBAAiB;IAChC,MAAM,EAAE,QAAQ,EAAE,
|
|
1
|
+
{"version":3,"file":"useAiModels.d.ts","sourceRoot":"","sources":["../../src/hooks/useAiModels.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,uBAAuB,CAAC;AAGtD,MAAM,WAAW,kBAAkB;IACjC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,SAAS,CAAC,EAAE,MAAM,GAAG,UAAU,GAAG,OAAO,GAAG,OAAO,GAAG,kBAAkB,CAAC;CAC1E;AAED,MAAM,WAAW,iBAAiB;IAChC,MAAM,EAAE,QAAQ,EAAE,CAAC;IACnB,OAAO,EAAE,OAAO,CAAC;IACjB,KAAK,EAAE,KAAK,GAAG,IAAI,CAAC;IACpB,OAAO,EAAE,MAAM,IAAI,CAAC;CACrB;AAED;;;GAGG;AACH,wBAAgB,WAAW,CAAC,OAAO,CAAC,EAAE,kBAAkB,GAAG,iBAAiB,CA+F3E"}
|
|
@@ -13,16 +13,46 @@ export function useAiModels(options) {
|
|
|
13
13
|
(!options?.apiKeyId || options.apiKeyId === context.apiKeyId);
|
|
14
14
|
// Filtrer les modèles selon le type demandé
|
|
15
15
|
const filteredModels = useMemo(() => {
|
|
16
|
-
|
|
17
|
-
|
|
16
|
+
console.log("[useAiModels] Filtering models:", {
|
|
17
|
+
useContextData,
|
|
18
|
+
allModelsLength: context.allModels.length,
|
|
19
|
+
modelType: options?.modelType,
|
|
20
|
+
loading: context.loadingProviders,
|
|
21
|
+
});
|
|
22
|
+
if (!useContextData) {
|
|
23
|
+
console.log("[useAiModels] Not using context data");
|
|
24
|
+
return [];
|
|
25
|
+
}
|
|
26
|
+
// Pendant le chargement, retourner un tableau vide
|
|
27
|
+
if (context.loadingProviders) {
|
|
28
|
+
console.log("[useAiModels] Still loading...");
|
|
29
|
+
return [];
|
|
30
|
+
}
|
|
31
|
+
// Si pas de modèles après le chargement
|
|
32
|
+
if (!context.allModels.length) {
|
|
33
|
+
console.log("[useAiModels] No models in context");
|
|
34
|
+
return [];
|
|
35
|
+
}
|
|
18
36
|
if (!options?.modelType) {
|
|
37
|
+
console.log("[useAiModels] Returning all models:", context.allModels.length);
|
|
19
38
|
return context.allModels;
|
|
20
39
|
}
|
|
21
40
|
// Cas spécial: text ou language
|
|
22
41
|
if (options.modelType === "text-or-language") {
|
|
23
|
-
|
|
42
|
+
const textModels = context.getTextModels();
|
|
43
|
+
console.log("[useAiModels] Returning text models:", textModels.length);
|
|
44
|
+
return textModels;
|
|
45
|
+
}
|
|
46
|
+
// Cas spécial: image
|
|
47
|
+
if (options.modelType === "image") {
|
|
48
|
+
const imageModels = context.getImageModels();
|
|
49
|
+
console.log("[useAiModels] Returning image models:", imageModels.length, imageModels);
|
|
50
|
+
return imageModels;
|
|
24
51
|
}
|
|
25
|
-
|
|
52
|
+
const filtered = context.getModelsByType(options.modelType);
|
|
53
|
+
console.log(`[useAiModels] Returning ${options.modelType} models:`, filtered.length);
|
|
54
|
+
return filtered;
|
|
55
|
+
return filtered;
|
|
26
56
|
}, [useContextData, context, options?.modelType]);
|
|
27
57
|
const refetch = useCallback(() => {
|
|
28
58
|
if (useContextData) {
|
|
@@ -39,8 +69,9 @@ export function useAiModels(options) {
|
|
|
39
69
|
}
|
|
40
70
|
// Fallback: si les options ne correspondent pas, retourner des valeurs vides
|
|
41
71
|
// (cas rare, la plupart du temps on utilise le contexte)
|
|
72
|
+
console.log("[useAiModels] Using fallback (no context match)");
|
|
42
73
|
return {
|
|
43
|
-
models:
|
|
74
|
+
models: [],
|
|
44
75
|
loading: false,
|
|
45
76
|
error: new Error("useAiModels called with different baseUrl/apiKeyId than context"),
|
|
46
77
|
refetch: () => { },
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"useModelManagement.d.ts","sourceRoot":"","sources":["../../src/hooks/useModelManagement.ts"],"names":[],"mappings":"AACA,OAAO,EAAgB,KAAK,OAAO,EAAE,MAAM,uBAAuB,CAAC;AACnE,OAAO,EAEL,KAAK,kBAAkB,EACxB,MAAM,0BAA0B,CAAC;AAElC,MAAM,WAAW,yBAA0B,SAAQ,kBAAkB;IACnE,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,MAAM,GAAG,OAAO,GAAG,OAAO,GAAG,OAAO,CAAC;CACjD;AAED,MAAM,WAAW,wBAAwB;IAEvC,eAAe,EAAE,OAAO,EAAE,CAAC;IAC3B,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,OAAO,EAAE,OAAO,CAAC;IACjB,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;IAGrB,WAAW,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,OAAO,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACpE,aAAa,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IACnC,iBAAiB,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAGvC,aAAa,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5C,eAAe,EAAE,MAAM,OAAO,EAAE,CAAC;IACjC,iBAAiB,EAAE,MAAM,OAAO,EAAE,CAAC;CACpC;AAED;;;GAGG;AACH,wBAAgB,kBAAkB,CAChC,OAAO,GAAE,yBAA8B,GACtC,wBAAwB,
|
|
1
|
+
{"version":3,"file":"useModelManagement.d.ts","sourceRoot":"","sources":["../../src/hooks/useModelManagement.ts"],"names":[],"mappings":"AACA,OAAO,EAAgB,KAAK,OAAO,EAAE,MAAM,uBAAuB,CAAC;AACnE,OAAO,EAEL,KAAK,kBAAkB,EACxB,MAAM,0BAA0B,CAAC;AAElC,MAAM,WAAW,yBAA0B,SAAQ,kBAAkB;IACnE,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,MAAM,GAAG,OAAO,GAAG,OAAO,GAAG,OAAO,CAAC;CACjD;AAED,MAAM,WAAW,wBAAwB;IAEvC,eAAe,EAAE,OAAO,EAAE,CAAC;IAC3B,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,OAAO,EAAE,OAAO,CAAC;IACjB,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;IAGrB,WAAW,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,OAAO,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACpE,aAAa,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IACnC,iBAAiB,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAGvC,aAAa,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5C,eAAe,EAAE,MAAM,OAAO,EAAE,CAAC;IACjC,iBAAiB,EAAE,MAAM,OAAO,EAAE,CAAC;CACpC;AAED;;;GAGG;AACH,wBAAgB,kBAAkB,CAChC,OAAO,GAAE,yBAA8B,GACtC,wBAAwB,CAiG1B"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@lastbrain/ai-ui-react",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.36",
|
|
4
4
|
"description": "Headless React components for LastBrain AI UI Kit",
|
|
5
5
|
"private": false,
|
|
6
6
|
"type": "module",
|
|
@@ -48,7 +48,7 @@
|
|
|
48
48
|
},
|
|
49
49
|
"dependencies": {
|
|
50
50
|
"lucide-react": "^0.257.0",
|
|
51
|
-
"@lastbrain/ai-ui-core": "1.0.
|
|
51
|
+
"@lastbrain/ai-ui-core": "1.0.25"
|
|
52
52
|
},
|
|
53
53
|
"devDependencies": {
|
|
54
54
|
"@types/react": "^19.2.0",
|
|
@@ -93,7 +93,11 @@ export function AiChipInput({
|
|
|
93
93
|
const apiKeyId = propApiKeyId ?? aiContext.apiKeyId;
|
|
94
94
|
|
|
95
95
|
// Hooks pour l'IA avec les valeurs du contexte
|
|
96
|
-
const { models } = useAiModels({
|
|
96
|
+
const { models } = useAiModels({
|
|
97
|
+
baseUrl,
|
|
98
|
+
apiKeyId,
|
|
99
|
+
modelType: "text-or-language",
|
|
100
|
+
});
|
|
97
101
|
const { generateText } = useAiCallText({ baseUrl, apiKeyId });
|
|
98
102
|
|
|
99
103
|
const addChip = (text: string) => {
|
|
@@ -67,6 +67,11 @@ export function AiImageButton({
|
|
|
67
67
|
const { models } = useAiModels({ baseUrl, apiKeyId, modelType: "image" });
|
|
68
68
|
const { generateImage, loading } = useAiCallImage({ baseUrl, apiKeyId });
|
|
69
69
|
|
|
70
|
+
console.log("[AiImageButton] Models received:", {
|
|
71
|
+
count: models.length,
|
|
72
|
+
models: models.map((m) => ({ id: m.id, name: m.name, type: m.type })),
|
|
73
|
+
});
|
|
74
|
+
|
|
70
75
|
const handleOpenPanel = () => {
|
|
71
76
|
setIsOpen(true);
|
|
72
77
|
};
|
|
@@ -288,7 +293,15 @@ export function AiImageButton({
|
|
|
288
293
|
onClose={handleClosePanel}
|
|
289
294
|
onSubmit={handleSubmit}
|
|
290
295
|
uiMode={uiMode}
|
|
291
|
-
models={
|
|
296
|
+
models={(() => {
|
|
297
|
+
const filteredModels = models.filter((m) => m.type === "image");
|
|
298
|
+
console.log("[AiImageButton] Passing to AiPromptPanel:", {
|
|
299
|
+
originalCount: models.length,
|
|
300
|
+
filteredCount: filteredModels.length,
|
|
301
|
+
models: filteredModels,
|
|
302
|
+
});
|
|
303
|
+
return filteredModels;
|
|
304
|
+
})()}
|
|
292
305
|
/>
|
|
293
306
|
)}
|
|
294
307
|
</div>
|
|
@@ -44,7 +44,11 @@ export function AiInput({
|
|
|
44
44
|
const inputRef = useRef<HTMLInputElement>(null);
|
|
45
45
|
const { showUsageToast, toastData, toastKey, clearToast } = useUsageToast();
|
|
46
46
|
|
|
47
|
-
const { models } = useAiModels({
|
|
47
|
+
const { models } = useAiModels({
|
|
48
|
+
baseUrl,
|
|
49
|
+
apiKeyId,
|
|
50
|
+
modelType: "text-or-language",
|
|
51
|
+
});
|
|
48
52
|
const { generateText, loading } = useAiCallText({ baseUrl, apiKeyId });
|
|
49
53
|
|
|
50
54
|
const hasConfiguration = Boolean(model && prompt);
|
|
@@ -1078,15 +1078,19 @@ function AiPromptPanelInternal({
|
|
|
1078
1078
|
margin: "0",
|
|
1079
1079
|
}}
|
|
1080
1080
|
>
|
|
1081
|
-
{
|
|
1082
|
-
(
|
|
1083
|
-
|
|
1081
|
+
{
|
|
1082
|
+
effectiveAvailableModels.filter(
|
|
1083
|
+
(m) => m.category === "text"
|
|
1084
|
+
).length
|
|
1085
|
+
}{" "}
|
|
1084
1086
|
modèles disponibles •{" "}
|
|
1085
|
-
{
|
|
1086
|
-
|
|
1087
|
-
(
|
|
1088
|
-
|
|
1089
|
-
|
|
1087
|
+
{
|
|
1088
|
+
effectiveUserModels.filter((id) =>
|
|
1089
|
+
effectiveAvailableModels.some(
|
|
1090
|
+
(m) => m.id === id && m.category === "text"
|
|
1091
|
+
)
|
|
1092
|
+
).length
|
|
1093
|
+
}{" "}
|
|
1090
1094
|
activés
|
|
1091
1095
|
</p>
|
|
1092
1096
|
</div>
|
|
@@ -37,7 +37,11 @@ export function AiSelect({
|
|
|
37
37
|
const [isFocused, setIsFocused] = useState(false);
|
|
38
38
|
const { showUsageToast, toastData, toastKey, clearToast } = useUsageToast();
|
|
39
39
|
|
|
40
|
-
const { models } = useAiModels({
|
|
40
|
+
const { models } = useAiModels({
|
|
41
|
+
baseUrl,
|
|
42
|
+
apiKeyId,
|
|
43
|
+
modelType: "text-or-language",
|
|
44
|
+
});
|
|
41
45
|
const { generateText, loading } = useAiCallText({ baseUrl, apiKeyId });
|
|
42
46
|
|
|
43
47
|
const handleOpenPanel = () => {
|
|
@@ -50,7 +50,11 @@ export function AiTextarea({
|
|
|
50
50
|
const textareaRef = useRef<HTMLTextAreaElement>(null);
|
|
51
51
|
const { showUsageToast, toastData, toastKey, clearToast } = useUsageToast();
|
|
52
52
|
|
|
53
|
-
const { models } = useAiModels({
|
|
53
|
+
const { models } = useAiModels({
|
|
54
|
+
baseUrl,
|
|
55
|
+
apiKeyId,
|
|
56
|
+
modelType: "text-or-language",
|
|
57
|
+
});
|
|
54
58
|
const { generateText, loading } = useAiCallText({ baseUrl, apiKeyId });
|
|
55
59
|
|
|
56
60
|
const hasConfiguration = Boolean(model && prompt);
|
|
@@ -6,6 +6,7 @@ import {
|
|
|
6
6
|
useState,
|
|
7
7
|
useEffect,
|
|
8
8
|
useCallback,
|
|
9
|
+
useRef,
|
|
9
10
|
type ReactNode,
|
|
10
11
|
} from "react";
|
|
11
12
|
import type { UiMode } from "../types";
|
|
@@ -47,7 +48,9 @@ export interface AiContextValue {
|
|
|
47
48
|
refetchProviders: () => Promise<void>;
|
|
48
49
|
refetchUserModels: () => Promise<void>;
|
|
49
50
|
// Helpers pour filtrer les modèles
|
|
50
|
-
getModelsByType: (
|
|
51
|
+
getModelsByType: (
|
|
52
|
+
type: "text" | "language" | "image" | "embed"
|
|
53
|
+
) => ModelRef[];
|
|
51
54
|
getTextModels: () => ModelRef[];
|
|
52
55
|
getImageModels: () => ModelRef[];
|
|
53
56
|
}
|
|
@@ -74,36 +77,63 @@ export function AiProvider({
|
|
|
74
77
|
const [loadingProviders, setLoadingProviders] = useState(false);
|
|
75
78
|
const [loadingUserModels, setLoadingUserModels] = useState(false);
|
|
76
79
|
|
|
80
|
+
// Flags pour éviter les appels multiples et les boucles infinies
|
|
81
|
+
const isFetchingProviders = useRef(false);
|
|
82
|
+
const isFetchingUserModels = useRef(false);
|
|
83
|
+
const hasFetchedProviders = useRef(false);
|
|
84
|
+
const hasFetchedUserModels = useRef(false);
|
|
85
|
+
const providersAvailable = useRef(true); // false si 404
|
|
86
|
+
const userModelsAvailable = useRef(true); // false si 404
|
|
87
|
+
|
|
77
88
|
// Récupérer les providers et leurs modèles + available models en parallèle
|
|
78
89
|
const fetchProviders = useCallback(async () => {
|
|
79
|
-
|
|
80
|
-
|
|
90
|
+
// Éviter les appels multiples
|
|
91
|
+
if (isFetchingProviders.current) {
|
|
92
|
+
console.log("[AiProvider] Already fetching providers, skipping");
|
|
93
|
+
return;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// Si déjà fetch et non disponible, ne pas réessayer
|
|
97
|
+
if (hasFetchedProviders.current && !providersAvailable.current) {
|
|
98
|
+
console.log("[AiProvider] Providers API not available (404), skipping");
|
|
99
|
+
return;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
isFetchingProviders.current = true;
|
|
81
103
|
setLoadingProviders(true);
|
|
104
|
+
|
|
82
105
|
try {
|
|
106
|
+
console.log("[AiProvider] Fetching providers from:", baseUrl);
|
|
107
|
+
|
|
108
|
+
// Utiliser createClient pour avoir buildUrl qui gère les routes correctement
|
|
109
|
+
const client = createClient({ baseUrl, apiKeyId });
|
|
110
|
+
|
|
83
111
|
// Fetch providers et available models en parallèle
|
|
84
|
-
const [
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
112
|
+
const [providersData, availableModelsData] = await Promise.all([
|
|
113
|
+
client
|
|
114
|
+
.getModels()
|
|
115
|
+
.then((models) => {
|
|
116
|
+
// getModels retourne directement les modèles, pas les providers
|
|
117
|
+
// On doit reconstruire la structure providers
|
|
118
|
+
return { providers: [{ id: "default", name: "Default", models }] };
|
|
119
|
+
})
|
|
120
|
+
.catch((error) => {
|
|
121
|
+
console.error("[AiProvider] Error fetching models:", error);
|
|
122
|
+
if (error.message?.includes("404")) {
|
|
123
|
+
providersAvailable.current = false;
|
|
124
|
+
}
|
|
125
|
+
return { providers: [] };
|
|
126
|
+
}),
|
|
92
127
|
getAvailableModels({ baseUrl, apiKey: apiKeyId }).catch((error) => {
|
|
93
128
|
console.warn("[AiProvider] Could not fetch available models:", error);
|
|
94
129
|
return [];
|
|
95
130
|
}),
|
|
96
131
|
]);
|
|
132
|
+
console.log("[AiProvider] Providers data received:", providersData);
|
|
97
133
|
|
|
98
|
-
if (!providersResponse.ok) {
|
|
99
|
-
throw new Error(`Failed to fetch providers: ${providersResponse.status}`);
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
const providersData = await providersResponse.json();
|
|
103
|
-
|
|
104
134
|
if (providersData.providers && Array.isArray(providersData.providers)) {
|
|
105
135
|
setProviders(providersData.providers);
|
|
106
|
-
|
|
136
|
+
|
|
107
137
|
// Extraire tous les modèles
|
|
108
138
|
const models: ModelRef[] = [];
|
|
109
139
|
for (const provider of providersData.providers) {
|
|
@@ -111,40 +141,62 @@ export function AiProvider({
|
|
|
111
141
|
models.push(...provider.models);
|
|
112
142
|
}
|
|
113
143
|
}
|
|
144
|
+
console.log("[AiProvider] Extracted models:", models.length, models);
|
|
114
145
|
setAllModels(models);
|
|
115
146
|
}
|
|
116
147
|
|
|
117
148
|
// Stocker available models
|
|
149
|
+
console.log("[AiProvider] Available models:", availableModelsData.length);
|
|
118
150
|
setAvailableModels(availableModelsData);
|
|
151
|
+
hasFetchedProviders.current = true;
|
|
119
152
|
} catch (error) {
|
|
120
153
|
console.error("[AiProvider] Error fetching providers:", error);
|
|
121
154
|
// En cas d'erreur, utiliser des valeurs vides
|
|
122
155
|
setProviders([]);
|
|
123
156
|
setAllModels([]);
|
|
124
157
|
setAvailableModels([]);
|
|
158
|
+
hasFetchedProviders.current = true;
|
|
125
159
|
} finally {
|
|
126
160
|
setLoadingProviders(false);
|
|
161
|
+
isFetchingProviders.current = false;
|
|
127
162
|
}
|
|
128
|
-
}, [baseUrl, apiKeyId
|
|
163
|
+
}, [baseUrl, apiKeyId]); // Retirer loadingProviders des dépendances
|
|
129
164
|
|
|
130
165
|
// Récupérer les modèles activés par l'utilisateur
|
|
131
166
|
const fetchUserModels = useCallback(async () => {
|
|
132
|
-
|
|
133
|
-
|
|
167
|
+
// Éviter les appels multiples
|
|
168
|
+
if (isFetchingUserModels.current) {
|
|
169
|
+
console.log("[AiProvider] Already fetching user models, skipping");
|
|
170
|
+
return;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
// Si déjà fetch et non disponible, ne pas réessayer
|
|
174
|
+
if (hasFetchedUserModels.current && !userModelsAvailable.current) {
|
|
175
|
+
console.log("[AiProvider] User models API not available (404), skipping");
|
|
176
|
+
return;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
isFetchingUserModels.current = true;
|
|
134
180
|
setLoadingUserModels(true);
|
|
181
|
+
|
|
135
182
|
try {
|
|
136
183
|
const models = await getUserModels({ baseUrl, apiKey: apiKeyId });
|
|
137
184
|
setUserModels(models);
|
|
185
|
+
hasFetchedUserModels.current = true;
|
|
138
186
|
} catch (error) {
|
|
139
187
|
console.error("[AiProvider] Error fetching user models:", error);
|
|
140
188
|
// En cas d'erreur 404, ne pas logger comme erreur critique
|
|
141
189
|
if (error instanceof Error && error.message.includes("404")) {
|
|
190
|
+
console.warn("[AiProvider] User models API not available (404)");
|
|
191
|
+
userModelsAvailable.current = false;
|
|
142
192
|
setUserModels([]);
|
|
143
193
|
}
|
|
194
|
+
hasFetchedUserModels.current = true;
|
|
144
195
|
} finally {
|
|
145
196
|
setLoadingUserModels(false);
|
|
197
|
+
isFetchingUserModels.current = false;
|
|
146
198
|
}
|
|
147
|
-
}, [baseUrl, apiKeyId
|
|
199
|
+
}, [baseUrl, apiKeyId]); // Retirer loadingUserModels des dépendances
|
|
148
200
|
|
|
149
201
|
// Récupérer les données au montage du provider
|
|
150
202
|
useEffect(() => {
|
|
@@ -155,17 +207,32 @@ export function AiProvider({
|
|
|
155
207
|
// Helpers pour filtrer les modèles par type
|
|
156
208
|
const getModelsByType = useCallback(
|
|
157
209
|
(type: "text" | "language" | "image" | "embed") => {
|
|
158
|
-
|
|
210
|
+
const filtered = allModels.filter((model) => model.type === type);
|
|
211
|
+
console.log(`[AiProvider] getModelsByType(${type}):`, {
|
|
212
|
+
total: allModels.length,
|
|
213
|
+
filtered: filtered.length,
|
|
214
|
+
models: filtered.map((m) => ({ id: m.id, name: m.name, type: m.type })),
|
|
215
|
+
});
|
|
216
|
+
return filtered;
|
|
159
217
|
},
|
|
160
218
|
[allModels]
|
|
161
219
|
);
|
|
162
220
|
|
|
163
221
|
const getTextModels = useCallback(() => {
|
|
164
|
-
return allModels.filter(
|
|
222
|
+
return allModels.filter(
|
|
223
|
+
(model) => model.type === "text" || model.type === "language"
|
|
224
|
+
);
|
|
165
225
|
}, [allModels]);
|
|
166
226
|
|
|
167
227
|
const getImageModels = useCallback(() => {
|
|
168
|
-
|
|
228
|
+
const imageModels = allModels.filter((model) => model.type === "image");
|
|
229
|
+
console.log("[AiProvider] getImageModels:", {
|
|
230
|
+
totalModels: allModels.length,
|
|
231
|
+
imageModels: imageModels.length,
|
|
232
|
+
allTypes: allModels.map((m) => ({ id: m.id, type: m.type })),
|
|
233
|
+
imageModelsList: imageModels.map((m) => ({ id: m.id, name: m.name })),
|
|
234
|
+
});
|
|
235
|
+
return imageModels;
|
|
169
236
|
}, [allModels]);
|
|
170
237
|
|
|
171
238
|
const value: AiContextValue = {
|
package/src/hooks/useAiModels.ts
CHANGED
|
@@ -11,7 +11,7 @@ export interface UseAiModelsOptions {
|
|
|
11
11
|
}
|
|
12
12
|
|
|
13
13
|
export interface UseAiModelsResult {
|
|
14
|
-
models: ModelRef[]
|
|
14
|
+
models: ModelRef[];
|
|
15
15
|
loading: boolean;
|
|
16
16
|
error: Error | null;
|
|
17
17
|
refetch: () => void;
|
|
@@ -23,27 +23,71 @@ export interface UseAiModelsResult {
|
|
|
23
23
|
*/
|
|
24
24
|
export function useAiModels(options?: UseAiModelsOptions): UseAiModelsResult {
|
|
25
25
|
const context = useAiContext();
|
|
26
|
-
|
|
26
|
+
|
|
27
27
|
// Si les options ne correspondent pas au contexte, on peut faire un appel direct
|
|
28
28
|
// Mais dans la plupart des cas, on utilise le contexte
|
|
29
|
-
const useContextData =
|
|
29
|
+
const useContextData =
|
|
30
30
|
(!options?.baseUrl || options.baseUrl === context.baseUrl) &&
|
|
31
31
|
(!options?.apiKeyId || options.apiKeyId === context.apiKeyId);
|
|
32
32
|
|
|
33
33
|
// Filtrer les modèles selon le type demandé
|
|
34
34
|
const filteredModels = useMemo(() => {
|
|
35
|
-
|
|
36
|
-
|
|
35
|
+
console.log("[useAiModels] Filtering models:", {
|
|
36
|
+
useContextData,
|
|
37
|
+
allModelsLength: context.allModels.length,
|
|
38
|
+
modelType: options?.modelType,
|
|
39
|
+
loading: context.loadingProviders,
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
if (!useContextData) {
|
|
43
|
+
console.log("[useAiModels] Not using context data");
|
|
44
|
+
return [];
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Pendant le chargement, retourner un tableau vide
|
|
48
|
+
if (context.loadingProviders) {
|
|
49
|
+
console.log("[useAiModels] Still loading...");
|
|
50
|
+
return [];
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Si pas de modèles après le chargement
|
|
54
|
+
if (!context.allModels.length) {
|
|
55
|
+
console.log("[useAiModels] No models in context");
|
|
56
|
+
return [];
|
|
57
|
+
}
|
|
58
|
+
|
|
37
59
|
if (!options?.modelType) {
|
|
60
|
+
console.log(
|
|
61
|
+
"[useAiModels] Returning all models:",
|
|
62
|
+
context.allModels.length
|
|
63
|
+
);
|
|
38
64
|
return context.allModels;
|
|
39
65
|
}
|
|
40
|
-
|
|
66
|
+
|
|
41
67
|
// Cas spécial: text ou language
|
|
42
68
|
if (options.modelType === "text-or-language") {
|
|
43
|
-
|
|
69
|
+
const textModels = context.getTextModels();
|
|
70
|
+
console.log("[useAiModels] Returning text models:", textModels.length);
|
|
71
|
+
return textModels;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// Cas spécial: image
|
|
75
|
+
if (options.modelType === "image") {
|
|
76
|
+
const imageModels = context.getImageModels();
|
|
77
|
+
console.log(
|
|
78
|
+
"[useAiModels] Returning image models:",
|
|
79
|
+
imageModels.length,
|
|
80
|
+
imageModels
|
|
81
|
+
);
|
|
82
|
+
return imageModels;
|
|
44
83
|
}
|
|
45
|
-
|
|
46
|
-
|
|
84
|
+
const filtered = context.getModelsByType(options.modelType);
|
|
85
|
+
console.log(
|
|
86
|
+
`[useAiModels] Returning ${options.modelType} models:`,
|
|
87
|
+
filtered.length
|
|
88
|
+
);
|
|
89
|
+
return filtered;
|
|
90
|
+
return filtered;
|
|
47
91
|
}, [useContextData, context, options?.modelType]);
|
|
48
92
|
|
|
49
93
|
const refetch = useCallback(() => {
|
|
@@ -63,10 +107,13 @@ export function useAiModels(options?: UseAiModelsOptions): UseAiModelsResult {
|
|
|
63
107
|
|
|
64
108
|
// Fallback: si les options ne correspondent pas, retourner des valeurs vides
|
|
65
109
|
// (cas rare, la plupart du temps on utilise le contexte)
|
|
110
|
+
console.log("[useAiModels] Using fallback (no context match)");
|
|
66
111
|
return {
|
|
67
|
-
models:
|
|
112
|
+
models: [],
|
|
68
113
|
loading: false,
|
|
69
|
-
error: new Error(
|
|
114
|
+
error: new Error(
|
|
115
|
+
"useAiModels called with different baseUrl/apiKeyId than context"
|
|
116
|
+
),
|
|
70
117
|
refetch: () => {},
|
|
71
118
|
};
|
|
72
119
|
}
|
|
@@ -87,7 +87,10 @@ export function useModelManagement(
|
|
|
87
87
|
} catch (err) {
|
|
88
88
|
const errorMessage =
|
|
89
89
|
err instanceof Error ? err.message : "Erreur inconnue";
|
|
90
|
-
console.error(
|
|
90
|
+
console.error(
|
|
91
|
+
"[useModelManagement] Error toggling model:",
|
|
92
|
+
errorMessage
|
|
93
|
+
);
|
|
91
94
|
setError(errorMessage);
|
|
92
95
|
throw err;
|
|
93
96
|
} finally {
|
|
@@ -105,7 +108,9 @@ export function useModelManagement(
|
|
|
105
108
|
);
|
|
106
109
|
|
|
107
110
|
const getActiveModels = useCallback(() => {
|
|
108
|
-
return filteredModels.filter((model) =>
|
|
111
|
+
return filteredModels.filter((model) =>
|
|
112
|
+
context.userModels.includes(model.id)
|
|
113
|
+
);
|
|
109
114
|
}, [filteredModels, context.userModels]);
|
|
110
115
|
|
|
111
116
|
const getInactiveModels = useCallback(() => {
|