@complior/engine 0.9.2 → 0.9.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@complior/engine",
|
|
3
|
-
"version": "0.9.
|
|
3
|
+
"version": "0.9.3",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"main": "src/index.ts",
|
|
6
6
|
"description": "AI Act Compliance Engine — deterministic scanner, auto-fixer, reporter, and MCP server for EU AI Act. Powers the Complior CLI daemon.",
|
|
@@ -15,7 +15,6 @@ import type { AgentMode } from '../../llm/tools/types.js';
|
|
|
15
15
|
import { getAgentConfig, getAllModes } from '../../llm/agents/modes.js';
|
|
16
16
|
import { createCostTracker, type CostTracker } from '../../llm/routing/cost-tracker.js';
|
|
17
17
|
import { createRateLimiter } from '../../infra/rate-limiter.js';
|
|
18
|
-
import { complior } from '@complior/sdk';
|
|
19
18
|
import { parseBody } from '../utils/validation.js';
|
|
20
19
|
|
|
21
20
|
const ChatRequestSchema = z.object({
|
|
@@ -139,7 +138,7 @@ export const createChatRoute = (deps: ChatRouteDeps) => {
|
|
|
139
138
|
|
|
140
139
|
return streamSSE(c, async (stream) => {
|
|
141
140
|
try {
|
|
142
|
-
const result =
|
|
141
|
+
const result = streamText({
|
|
143
142
|
model: model,
|
|
144
143
|
system: systemPrompt,
|
|
145
144
|
messages: chatService.getConversationHistory(),
|
|
@@ -2,7 +2,6 @@ import { Hono } from 'hono';
|
|
|
2
2
|
import { z } from 'zod';
|
|
3
3
|
import { generateText } from 'ai';
|
|
4
4
|
import type { LlmPort } from '../../ports/llm.port.js';
|
|
5
|
-
import { complior } from '@complior/sdk';
|
|
6
5
|
import { parseBody } from '../utils/validation.js';
|
|
7
6
|
|
|
8
7
|
const VerifySchema = z.object({
|
|
@@ -25,7 +24,7 @@ export const createProviderRoute = (llm: LlmPort) => {
|
|
|
25
24
|
|
|
26
25
|
const model = await llm.getModel(provider, testModelId, apiKey);
|
|
27
26
|
|
|
28
|
-
await
|
|
27
|
+
await generateText({
|
|
29
28
|
model,
|
|
30
29
|
prompt: 'Say "ok"',
|
|
31
30
|
maxOutputTokens: 1,
|