@layer-ai/core 0.2.2 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/lib/db/postgres.d.ts.map +1 -1
- package/dist/lib/db/postgres.js +28 -13
- package/dist/routes/v2/complete.d.ts.map +1 -1
- package/dist/routes/v2/complete.js +3 -2
- package/dist/services/providers/anthropic.d.ts.map +1 -1
- package/dist/services/providers/anthropic.js +4 -2
- package/dist/services/providers/base-adapter.d.ts.map +1 -1
- package/dist/services/providers/base-adapter.js +4 -3
- package/dist/services/providers/google-adapter.d.ts +21 -0
- package/dist/services/providers/google-adapter.d.ts.map +1 -0
- package/dist/services/providers/google-adapter.js +408 -0
- package/dist/services/providers/google.d.ts.map +1 -1
- package/dist/services/providers/google.js +4 -2
- package/dist/services/providers/openai.d.ts.map +1 -1
- package/dist/services/providers/openai.js +4 -2
- package/dist/services/providers/tests/test-google-adapter.d.ts +2 -0
- package/dist/services/providers/tests/test-google-adapter.d.ts.map +1 -0
- package/dist/services/providers/tests/test-google-adapter.js +224 -0
- package/dist/services/task-analysis.d.ts.map +1 -1
- package/dist/services/task-analysis.js +67 -2
- package/package.json +2 -2
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"postgres.d.ts","sourceRoot":"","sources":["../../../src/lib/db/postgres.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,IAAI,CAAC;AACpB,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAyB,MAAM,eAAe,CAAC;AAO/E,iBAAS,OAAO,IAAI,EAAE,CAAC,IAAI,CAqB1B;AA0BD,eAAO,MAAM,EAAE;gBAEK,MAAM,WAAW,GAAG,EAAE;0BASZ,MAAM,GAAG,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC;oBAQnC,MAAM,GAAG,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC;sBAQ3B,MAAM,gBAAgB,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;6BASrC,MAAM,GAAG,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;yBAQnC,MAAM,WAAW,MAAM,aAAa,MAAM,QAAQ,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;kCAQjE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;8BAO1B,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;qBAQnC,MAAM,UAAU,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;iCAS7B,MAAM,YAAY,MAAM,GAAG,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC;4BAQpD,MAAM,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;uBAQ7B,MAAM,QAAQ,GAAG,GAAG,OAAO,CAAC,IAAI,CAAC;
|
|
1
|
+
{"version":3,"file":"postgres.d.ts","sourceRoot":"","sources":["../../../src/lib/db/postgres.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,IAAI,CAAC;AACpB,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAyB,MAAM,eAAe,CAAC;AAO/E,iBAAS,OAAO,IAAI,EAAE,CAAC,IAAI,CAqB1B;AA0BD,eAAO,MAAM,EAAE;gBAEK,MAAM,WAAW,GAAG,EAAE;0BASZ,MAAM,GAAG,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC;oBAQnC,MAAM,GAAG,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC;sBAQ3B,MAAM,gBAAgB,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;6BASrC,MAAM,GAAG,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;yBAQnC,MAAM,WAAW,MAAM,aAAa,MAAM,QAAQ,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;kCAQjE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;8BAO1B,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;qBAQnC,MAAM,UAAU,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;iCAS7B,MAAM,YAAY,MAAM,GAAG,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC;4BAQpD,MAAM,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;uBAQ7B,MAAM,QAAQ,GAAG,GAAG,OAAO,CAAC,IAAI,CAAC;oBA2BpC,MAAM,GAAG,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC;mBAQ9B,MAAM,QAAQ,GAAG,GAAG,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC;mBA0CxC,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;qBASvB,GAAG,GAAG,OAAO,CAAC,IAAI,CAAC;iCAgBP,MAAM,GAAG,OAAO,CAAC;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,SAAS,EAAE,IAAI,CAAA;KAAE,GAAG,IAAI,CAAC;6BAQhE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;qCAehB,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CAM9D,CAAC;AAEF,eAAe,OAAO,CAAC"}
|
package/dist/lib/db/postgres.js
CHANGED
|
@@ -31,7 +31,7 @@ function toCamelCase(obj) {
|
|
|
31
31
|
const camelKey = key.replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
|
|
32
32
|
let value = obj[key];
|
|
33
33
|
// Convert numeric strings to numbers for specific fields
|
|
34
|
-
if ((camelKey === 'temperature' || camelKey === 'topP') && typeof value === 'string') {
|
|
34
|
+
if ((camelKey === 'temperature' || camelKey === 'topP' || camelKey === 'costWeight' || camelKey === 'latencyWeight' || camelKey === 'qualityWeight') && typeof value === 'string') {
|
|
35
35
|
value = parseFloat(value);
|
|
36
36
|
}
|
|
37
37
|
if (camelKey === 'maxTokens' && typeof value === 'string') {
|
|
@@ -94,11 +94,12 @@ export const db = {
|
|
|
94
94
|
return result.rows.map(toCamelCase);
|
|
95
95
|
},
|
|
96
96
|
async createGate(userId, data) {
|
|
97
|
-
const result = await getPool().query(`INSERT INTO gates (user_id, name, description, model, system_prompt, allow_overrides, temperature, max_tokens, top_p, tags, routing_strategy, fallback_models)
|
|
98
|
-
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) RETURNING *`, [
|
|
97
|
+
const result = await getPool().query(`INSERT INTO gates (user_id, name, description, task_type, model, system_prompt, allow_overrides, temperature, max_tokens, top_p, tags, routing_strategy, fallback_models, cost_weight, latency_weight, quality_weight, reanalysis_period)
|
|
98
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17) RETURNING *`, [
|
|
99
99
|
userId,
|
|
100
100
|
data.name,
|
|
101
101
|
data.description,
|
|
102
|
+
data.taskType,
|
|
102
103
|
data.model,
|
|
103
104
|
data.systemPrompt,
|
|
104
105
|
data.allowOverrides ? JSON.stringify(data.allowOverrides) : null,
|
|
@@ -107,7 +108,11 @@ export const db = {
|
|
|
107
108
|
data.topP,
|
|
108
109
|
JSON.stringify(data.tags || []),
|
|
109
110
|
data.routingStrategy || 'single',
|
|
110
|
-
JSON.stringify(data.fallbackModels || [])
|
|
111
|
+
JSON.stringify(data.fallbackModels || []),
|
|
112
|
+
data.costWeight ?? 0.33,
|
|
113
|
+
data.latencyWeight ?? 0.33,
|
|
114
|
+
data.qualityWeight ?? 0.34,
|
|
115
|
+
data.reanalysisPeriod || 'never'
|
|
111
116
|
]);
|
|
112
117
|
return toCamelCase(result.rows[0]);
|
|
113
118
|
},
|
|
@@ -118,19 +123,25 @@ export const db = {
|
|
|
118
123
|
async updateGate(id, data) {
|
|
119
124
|
const result = await getPool().query(`UPDATE gates SET
|
|
120
125
|
description = COALESCE($2, description),
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
126
|
+
task_type = COALESCE($3, task_type),
|
|
127
|
+
model = COALESCE($4, model),
|
|
128
|
+
system_prompt = COALESCE($5, system_prompt),
|
|
129
|
+
allow_overrides = COALESCE($6, allow_overrides),
|
|
130
|
+
temperature = COALESCE($7, temperature),
|
|
131
|
+
max_tokens = COALESCE($8, max_tokens),
|
|
132
|
+
top_p = COALESCE($9, top_p),
|
|
133
|
+
tags = COALESCE($10, tags),
|
|
134
|
+
routing_strategy = COALESCE($11, routing_strategy),
|
|
135
|
+
fallback_models = COALESCE($12, fallback_models),
|
|
136
|
+
cost_weight = COALESCE($13, cost_weight),
|
|
137
|
+
latency_weight = COALESCE($14, latency_weight),
|
|
138
|
+
quality_weight = COALESCE($15, quality_weight),
|
|
139
|
+
reanalysis_period = COALESCE($16, reanalysis_period),
|
|
130
140
|
updated_at = NOW()
|
|
131
141
|
WHERE id = $1 RETURNING *`, [
|
|
132
142
|
id,
|
|
133
143
|
data.description,
|
|
144
|
+
data.taskType,
|
|
134
145
|
data.model,
|
|
135
146
|
data.systemPrompt,
|
|
136
147
|
data.allowOverrides ? JSON.stringify(data.allowOverrides) : null,
|
|
@@ -140,6 +151,10 @@ export const db = {
|
|
|
140
151
|
data.tags ? JSON.stringify(data.tags) : null,
|
|
141
152
|
data.routingStrategy,
|
|
142
153
|
data.fallbackModels ? JSON.stringify(data.fallbackModels) : null,
|
|
154
|
+
data.costWeight,
|
|
155
|
+
data.latencyWeight,
|
|
156
|
+
data.qualityWeight,
|
|
157
|
+
data.reanalysisPeriod,
|
|
143
158
|
]);
|
|
144
159
|
return result.rows[0] ? toCamelCase(result.rows[0]) : null;
|
|
145
160
|
},
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"complete.d.ts","sourceRoot":"","sources":["../../../src/routes/v2/complete.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,MAAM,IAAI,UAAU,EAAE,MAAM,SAAS,CAAC;
|
|
1
|
+
{"version":3,"file":"complete.d.ts","sourceRoot":"","sources":["../../../src/routes/v2/complete.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,MAAM,IAAI,UAAU,EAAE,MAAM,SAAS,CAAC;AAYpD,QAAA,MAAM,MAAM,EAAE,UAAqB,CAAC;AAgQpC,eAAe,MAAM,CAAC"}
|
|
@@ -4,6 +4,7 @@ import { cache } from '../../lib/db/redis.js';
|
|
|
4
4
|
import { authenticate } from '../../middleware/auth.js';
|
|
5
5
|
import { OpenAIAdapter } from '../../services/providers/openai-adapter.js';
|
|
6
6
|
import { AnthropicAdapter } from '../../services/providers/anthropic-adapter.js';
|
|
7
|
+
import { GoogleAdapter } from '../../services/providers/google-adapter.js';
|
|
7
8
|
import { MODEL_REGISTRY, OverrideField } from '@layer-ai/sdk';
|
|
8
9
|
const router = Router();
|
|
9
10
|
// MARK:- Helper Functions
|
|
@@ -70,8 +71,8 @@ async function callProvider(request) {
|
|
|
70
71
|
return await adapter.call(request);
|
|
71
72
|
}
|
|
72
73
|
case 'google':
|
|
73
|
-
|
|
74
|
-
|
|
74
|
+
const adapter = new GoogleAdapter();
|
|
75
|
+
return await adapter.call(request);
|
|
75
76
|
default:
|
|
76
77
|
throw new Error(`Unknown provider: ${provider}`);
|
|
77
78
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../../../src/services/providers/anthropic.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,OAAO,EAAkB,MAAM,eAAe,CAAC;AAe7D,MAAM,WAAW,yBAAyB;IACxC,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB;AAED,MAAM,WAAW,gBAAgB;IAC/B,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,EAAE,MAAM,CAAC;IACrB,gBAAgB,EAAE,MAAM,CAAC;IACzB,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,wBAAsB,gBAAgB,CAAC,MAAM,EAAE,yBAAyB,GAAG,OAAO,CAAC,gBAAgB,CAAC,
|
|
1
|
+
{"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../../../src/services/providers/anthropic.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,OAAO,EAAkB,MAAM,eAAe,CAAC;AAe7D,MAAM,WAAW,yBAAyB;IACxC,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB;AAED,MAAM,WAAW,gBAAgB;IAC/B,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,EAAE,MAAM,CAAC;IACrB,gBAAgB,EAAE,MAAM,CAAC;IACzB,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,wBAAsB,gBAAgB,CAAC,MAAM,EAAE,yBAAyB,GAAG,OAAO,CAAC,gBAAgB,CAAC,CAoDnG"}
|
|
@@ -45,8 +45,10 @@ export async function createCompletion(params) {
|
|
|
45
45
|
const completionTokens = response.usage.output_tokens;
|
|
46
46
|
const totalTokens = promptTokens + completionTokens;
|
|
47
47
|
// Calculate cost
|
|
48
|
-
const
|
|
49
|
-
const costUsd = (
|
|
48
|
+
const modelInfo = MODEL_REGISTRY[params.model];
|
|
49
|
+
const costUsd = ('pricing' in modelInfo && modelInfo.pricing?.input && modelInfo.pricing?.output)
|
|
50
|
+
? (promptTokens / 1000 * modelInfo.pricing.input) + (completionTokens / 1000 * modelInfo.pricing.output)
|
|
51
|
+
: 0;
|
|
50
52
|
return {
|
|
51
53
|
content,
|
|
52
54
|
promptTokens,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"base-adapter.d.ts","sourceRoot":"","sources":["../../../src/services/providers/base-adapter.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,YAAY,EACZ,aAAa,EACb,IAAI,EACJ,WAAW,EACX,SAAS,EACT,YAAY,EACZ,UAAU,EACV,SAAS,EACT,WAAW,EACX,aAAa,EACb,aAAa,EACb,YAAY,EACZ,UAAU,EACV,cAAc,EACd,eAAe,EAGhB,MAAM,eAAe,CAAC;AAEvB,OAAO,EAAE,eAAe,EAAE,CAAC;AAE3B,8BAAsB,mBAAmB;IACvC,SAAS,CAAC,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAEpC,SAAS,CAAC,YAAY,CAAC,EAAE,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IAC9C,SAAS,CAAC,mBAAmB,CAAC,EAAE,MAAM,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;IAC5D,SAAS,CAAC,kBAAkB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,CAAC,CAAC;IAC/D,SAAS,CAAC,oBAAoB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;IAC9D,SAAS,CAAC,iBAAiB,CAAC,EAAE,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;IACxD,SAAS,CAAC,oBAAoB,CAAC,EAAE,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC;IAC9D,SAAS,CAAC,kBAAkB,CAAC,EAAE,MAAM,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAC1D,SAAS,CAAC,iBAAiB,CAAC,EAAE,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;IACxD,SAAS,CAAC,mBAAmB,CAAC,EAAE,MAAM,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;IAC5D,SAAS,CAAC,qBAAqB,CAAC,EAAE,MAAM,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;IAChE,SAAS,CAAC,qBAAqB,CAAC,EAAE,MAAM,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;IAChE,SAAS,CAAC,sBAAsB,CAAC,EAAE,MAAM,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;IAElE,QAAQ,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,GAAG,OAAO,CAAC,aAAa,CAAC;IAE5D,SAAS,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,GAAG,MAAM;IAcrC,SAAS,CAAC,cAAc,CAAC,MAAM,EAAE,WAAW,GAAG,MAAM,GAAG,SAAS;IAQjE,SAAS,CAAC,YAAY,CAAC,IAAI,EAAE,SAAS,GAAG,MAAM,GAAG,SAAS;IAQ3D,SAAS,CAAC,eAAe,CAAC,OAAO,EAAE,YAAY,GAAG,MAAM,GAAG,SAAS;IAQpE,SAAS,CAAC,aAAa,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,GAAG,SAAS;IAQ9D,SAAS,CAAC,YAAY,CAAC,IAAI,EAAE,SAAS,GAAG,MAAM,GAAG,SAAS;IAQ3D,SAAS,CAAC,cAAc,CAAC,MAAM,EAAE,WAAW,GAAG,MAAM,GAAG,SAAS;IAQjE,SAAS,CAAC,gBAAgB,CAAC,QAAQ,EAAE,aAAa,GAAG,MAAM,GAAG,SAAS;IAQvE,SAAS,CAAC,gBAAgB,CAAC,QAAQ,EAAE,aAAa,GAAG,MAAM,GAAG,SAAS;IAQvE,SAAS,CAAC,iBAAiB,CAAC,MAAM,EAAE,cAAc,GAAG,MAAM,GAAG,SAAS;IAQvE,SAAS,CAAC,eAAe,CAAC,oBAAoB,EAAE,MAAM,GAAG,YAAY;IAQrE,SAAS,CAAC,aAAa,CAAC,MAAM,EAAE,UAAU,GAAG,MAAM,GAAG,MAAM,GAAG,SAAS;IAYxE,SAAS,CAAC,aAAa,CACrB,KAAK,EAAE,MAAM,EACb,YAAY,EAAE,MAAM,EACpB,gBAAgB,EAAE,MAAM,GACvB,MAAM;
|
|
1
|
+
{"version":3,"file":"base-adapter.d.ts","sourceRoot":"","sources":["../../../src/services/providers/base-adapter.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,YAAY,EACZ,aAAa,EACb,IAAI,EACJ,WAAW,EACX,SAAS,EACT,YAAY,EACZ,UAAU,EACV,SAAS,EACT,WAAW,EACX,aAAa,EACb,aAAa,EACb,YAAY,EACZ,UAAU,EACV,cAAc,EACd,eAAe,EAGhB,MAAM,eAAe,CAAC;AAEvB,OAAO,EAAE,eAAe,EAAE,CAAC;AAE3B,8BAAsB,mBAAmB;IACvC,SAAS,CAAC,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAEpC,SAAS,CAAC,YAAY,CAAC,EAAE,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IAC9C,SAAS,CAAC,mBAAmB,CAAC,EAAE,MAAM,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;IAC5D,SAAS,CAAC,kBAAkB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,CAAC,CAAC;IAC/D,SAAS,CAAC,oBAAoB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;IAC9D,SAAS,CAAC,iBAAiB,CAAC,EAAE,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;IACxD,SAAS,CAAC,oBAAoB,CAAC,EAAE,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC;IAC9D,SAAS,CAAC,kBAAkB,CAAC,EAAE,MAAM,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAC1D,SAAS,CAAC,iBAAiB,CAAC,EAAE,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;IACxD,SAAS,CAAC,mBAAmB,CAAC,EAAE,MAAM,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;IAC5D,SAAS,CAAC,qBAAqB,CAAC,EAAE,MAAM,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;IAChE,SAAS,CAAC,qBAAqB,CAAC,EAAE,MAAM,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;IAChE,SAAS,CAAC,sBAAsB,CAAC,EAAE,MAAM,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;IAElE,QAAQ,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,GAAG,OAAO,CAAC,aAAa,CAAC;IAE5D,SAAS,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,GAAG,MAAM;IAcrC,SAAS,CAAC,cAAc,CAAC,MAAM,EAAE,WAAW,GAAG,MAAM,GAAG,SAAS;IAQjE,SAAS,CAAC,YAAY,CAAC,IAAI,EAAE,SAAS,GAAG,MAAM,GAAG,SAAS;IAQ3D,SAAS,CAAC,eAAe,CAAC,OAAO,EAAE,YAAY,GAAG,MAAM,GAAG,SAAS;IAQpE,SAAS,CAAC,aAAa,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,GAAG,SAAS;IAQ9D,SAAS,CAAC,YAAY,CAAC,IAAI,EAAE,SAAS,GAAG,MAAM,GAAG,SAAS;IAQ3D,SAAS,CAAC,cAAc,CAAC,MAAM,EAAE,WAAW,GAAG,MAAM,GAAG,SAAS;IAQjE,SAAS,CAAC,gBAAgB,CAAC,QAAQ,EAAE,aAAa,GAAG,MAAM,GAAG,SAAS;IAQvE,SAAS,CAAC,gBAAgB,CAAC,QAAQ,EAAE,aAAa,GAAG,MAAM,GAAG,SAAS;IAQvE,SAAS,CAAC,iBAAiB,CAAC,MAAM,EAAE,cAAc,GAAG,MAAM,GAAG,SAAS;IAQvE,SAAS,CAAC,eAAe,CAAC,oBAAoB,EAAE,MAAM,GAAG,YAAY;IAQrE,SAAS,CAAC,aAAa,CAAC,MAAM,EAAE,UAAU,GAAG,MAAM,GAAG,MAAM,GAAG,SAAS;IAYxE,SAAS,CAAC,aAAa,CACrB,KAAK,EAAE,MAAM,EACb,YAAY,EAAE,MAAM,EACpB,gBAAgB,EAAE,MAAM,GACvB,MAAM;CAOV"}
|
|
@@ -82,8 +82,9 @@ export class BaseProviderAdapter {
|
|
|
82
82
|
}
|
|
83
83
|
calculateCost(model, promptTokens, completionTokens) {
|
|
84
84
|
const modelInfo = MODEL_REGISTRY[model];
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
85
|
+
if (!modelInfo || !('pricing' in modelInfo) || !modelInfo.pricing?.input || !modelInfo.pricing?.output) {
|
|
86
|
+
return 0;
|
|
87
|
+
}
|
|
88
|
+
return (promptTokens / 1000 * modelInfo.pricing.input) + (completionTokens / 1000 * modelInfo.pricing.output);
|
|
88
89
|
}
|
|
89
90
|
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { FunctionCallingConfigMode } from '@google/genai';
|
|
2
|
+
import { LayerRequest, LayerResponse, Role, FinishReason, VideoSize } from '@layer-ai/sdk';
|
|
3
|
+
import { BaseProviderAdapter } from './base-adapter.js';
|
|
4
|
+
export declare class GoogleAdapter extends BaseProviderAdapter {
|
|
5
|
+
protected provider: string;
|
|
6
|
+
protected roleMappings: Record<Role, string>;
|
|
7
|
+
protected finishReasonMappings: Record<string, FinishReason>;
|
|
8
|
+
protected toolChoiceMappings: Record<string, FunctionCallingConfigMode>;
|
|
9
|
+
protected videoSizeConfig: Record<VideoSize, {
|
|
10
|
+
aspectRatio: string;
|
|
11
|
+
resolution: string;
|
|
12
|
+
}>;
|
|
13
|
+
call(request: LayerRequest): Promise<LayerResponse>;
|
|
14
|
+
private handleChat;
|
|
15
|
+
private handleImageGeneration;
|
|
16
|
+
private handleEmbeddings;
|
|
17
|
+
private handleVideoGeneration;
|
|
18
|
+
private handleTextToSpeech;
|
|
19
|
+
private sleep;
|
|
20
|
+
}
|
|
21
|
+
//# sourceMappingURL=google-adapter.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"google-adapter.d.ts","sourceRoot":"","sources":["../../../src/services/providers/google-adapter.ts"],"names":[],"mappings":"AAAA,OAAO,EAKL,yBAAyB,EAI1B,MAAM,eAAe,CAAC;AACvB,OAAO,EACL,YAAY,EACZ,aAAa,EACb,IAAI,EACJ,YAAY,EACZ,SAAS,EACV,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,mBAAmB,EAAE,MAAM,mBAAmB,CAAC;AAYxD,qBAAa,aAAc,SAAQ,mBAAmB;IACpD,SAAS,CAAC,QAAQ,SAAY;IAE9B,SAAS,CAAC,YAAY,EAAE,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,CAQ1C;IAGF,SAAS,CAAC,oBAAoB,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAO1D;IAEF,SAAS,CAAC,kBAAkB,EAAE,MAAM,CAAC,MAAM,EAAE,yBAAyB,CAAC,CAIrE;IAGF,SAAS,CAAC,eAAe,EAAE,MAAM,CAC/B,SAAS,EACT;QAAE,WAAW,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE,CAC5C,CAKC;IAEI,IAAI,CAAC,OAAO,EAAE,YAAY,GAAG,OAAO,CAAC,aAAa,CAAC;YAiB3C,UAAU;YAqLV,qBAAqB;YAkCrB,gBAAgB;YAmChB,qBAAqB;YAmHrB,kBAAkB;IAwChC,OAAO,CAAC,KAAK;CAGd"}
|
|
@@ -0,0 +1,408 @@
|
|
|
1
|
+
import { GoogleGenAI, FunctionCallingConfigMode, VideoGenerationReferenceType, } from '@google/genai';
|
|
2
|
+
import { BaseProviderAdapter } from './base-adapter.js';
|
|
3
|
+
import { ADAPTER_HANDLED } from './base-adapter.js';
|
|
4
|
+
let client = null;
|
|
5
|
+
function getGoogleClient() {
|
|
6
|
+
if (!client) {
|
|
7
|
+
client = new GoogleGenAI({ apiKey: process.env.GOOGLE_API_KEY || '' });
|
|
8
|
+
}
|
|
9
|
+
return client;
|
|
10
|
+
}
|
|
11
|
+
export class GoogleAdapter extends BaseProviderAdapter {
|
|
12
|
+
constructor() {
|
|
13
|
+
super(...arguments);
|
|
14
|
+
this.provider = 'google';
|
|
15
|
+
this.roleMappings = {
|
|
16
|
+
system: ADAPTER_HANDLED,
|
|
17
|
+
user: 'user',
|
|
18
|
+
assistant: 'model',
|
|
19
|
+
tool: 'function',
|
|
20
|
+
function: 'function',
|
|
21
|
+
model: 'model',
|
|
22
|
+
developer: 'system',
|
|
23
|
+
};
|
|
24
|
+
// Map Google finish reasons to Layer finish reasons
|
|
25
|
+
this.finishReasonMappings = {
|
|
26
|
+
STOP: 'completed',
|
|
27
|
+
MAX_TOKENS: 'length_limit',
|
|
28
|
+
SAFETY: 'filtered',
|
|
29
|
+
RECITATION: 'filtered',
|
|
30
|
+
FINISH_REASON_UNSPECIFIED: 'completed',
|
|
31
|
+
OTHER: 'completed',
|
|
32
|
+
};
|
|
33
|
+
this.toolChoiceMappings = {
|
|
34
|
+
auto: FunctionCallingConfigMode.AUTO,
|
|
35
|
+
none: FunctionCallingConfigMode.NONE,
|
|
36
|
+
required: FunctionCallingConfigMode.ANY,
|
|
37
|
+
};
|
|
38
|
+
// Map Layer VideoSize to Veo aspect ratio and resolution
|
|
39
|
+
this.videoSizeConfig = {
|
|
40
|
+
'720x1280': { aspectRatio: '9:16', resolution: '720p' },
|
|
41
|
+
'1280x720': { aspectRatio: '16:9', resolution: '720p' },
|
|
42
|
+
'1024x1792': { aspectRatio: '9:16', resolution: '1080p' },
|
|
43
|
+
'1792x1024': { aspectRatio: '16:9', resolution: '1080p' },
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
async call(request) {
|
|
47
|
+
switch (request.type) {
|
|
48
|
+
case 'chat':
|
|
49
|
+
return this.handleChat(request);
|
|
50
|
+
case 'image':
|
|
51
|
+
return this.handleImageGeneration(request);
|
|
52
|
+
case 'embeddings':
|
|
53
|
+
return this.handleEmbeddings(request);
|
|
54
|
+
case 'tts':
|
|
55
|
+
return this.handleTextToSpeech(request);
|
|
56
|
+
case 'video':
|
|
57
|
+
return this.handleVideoGeneration(request);
|
|
58
|
+
default:
|
|
59
|
+
throw new Error(`Unknown modality: ${request.type}`);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
async handleChat(request) {
|
|
63
|
+
const startTime = Date.now();
|
|
64
|
+
const client = getGoogleClient();
|
|
65
|
+
const { data: chat, model } = request;
|
|
66
|
+
if (!model) {
|
|
67
|
+
throw new Error('Model is required for chat completion');
|
|
68
|
+
}
|
|
69
|
+
const contents = [];
|
|
70
|
+
let systemInstruction;
|
|
71
|
+
// Handle system prompt
|
|
72
|
+
if (chat.systemPrompt) {
|
|
73
|
+
systemInstruction = chat.systemPrompt;
|
|
74
|
+
}
|
|
75
|
+
// Convert messages to Google format
|
|
76
|
+
for (const msg of chat.messages) {
|
|
77
|
+
const role = this.mapRole(msg.role);
|
|
78
|
+
// Skip system messages (handled via systemInstruction)
|
|
79
|
+
if (role === 'system') {
|
|
80
|
+
systemInstruction = systemInstruction
|
|
81
|
+
? `${systemInstruction}\n${msg.content}`
|
|
82
|
+
: msg.content;
|
|
83
|
+
continue;
|
|
84
|
+
}
|
|
85
|
+
const parts = [];
|
|
86
|
+
// Handle text content
|
|
87
|
+
if (msg.content) {
|
|
88
|
+
parts.push({ text: msg.content });
|
|
89
|
+
}
|
|
90
|
+
// Handle images
|
|
91
|
+
if (msg.images && msg.images.length > 0) {
|
|
92
|
+
for (const image of msg.images) {
|
|
93
|
+
if (image.base64) {
|
|
94
|
+
parts.push({
|
|
95
|
+
inlineData: {
|
|
96
|
+
mimeType: image.mimeType || 'image/jpeg',
|
|
97
|
+
data: image.base64,
|
|
98
|
+
},
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
else if (image.url) {
|
|
102
|
+
parts.push({
|
|
103
|
+
fileData: {
|
|
104
|
+
mimeType: image.mimeType || 'image/jpeg',
|
|
105
|
+
fileUri: image.url,
|
|
106
|
+
},
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
// Handle tool responses
|
|
112
|
+
if (msg.toolCallId && msg.role === 'tool') {
|
|
113
|
+
if (!msg.name) {
|
|
114
|
+
throw new Error('Tool response messages must include the function name');
|
|
115
|
+
}
|
|
116
|
+
parts.push({
|
|
117
|
+
functionResponse: {
|
|
118
|
+
name: msg.name || msg.toolCallId,
|
|
119
|
+
response: { result: msg.content },
|
|
120
|
+
},
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
// Handle assistant messages with tool calls
|
|
124
|
+
if (msg.toolCalls && msg.toolCalls.length > 0) {
|
|
125
|
+
for (const toolCall of msg.toolCalls) {
|
|
126
|
+
parts.push({
|
|
127
|
+
functionCall: {
|
|
128
|
+
name: toolCall.function.name,
|
|
129
|
+
args: JSON.parse(toolCall.function.arguments),
|
|
130
|
+
},
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
if (parts.length > 0) {
|
|
135
|
+
contents.push({
|
|
136
|
+
role: role === 'model' ? 'model' : 'user',
|
|
137
|
+
parts,
|
|
138
|
+
});
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
// Convert tools to Google format
|
|
142
|
+
let googleTools;
|
|
143
|
+
if (chat.tools && chat.tools.length > 0) {
|
|
144
|
+
googleTools = [
|
|
145
|
+
{
|
|
146
|
+
functionDeclarations: chat.tools.map((tool) => ({
|
|
147
|
+
name: tool.function.name,
|
|
148
|
+
description: tool.function.description,
|
|
149
|
+
parametersJsonSchema: tool.function.parameters,
|
|
150
|
+
})),
|
|
151
|
+
},
|
|
152
|
+
];
|
|
153
|
+
}
|
|
154
|
+
// Map tool choice
|
|
155
|
+
let toolConfig;
|
|
156
|
+
if (chat.toolChoice) {
|
|
157
|
+
const mode = this.mapToolChoice(chat.toolChoice);
|
|
158
|
+
if (typeof mode === 'string') {
|
|
159
|
+
toolConfig = {
|
|
160
|
+
functionCallingConfig: { mode: mode },
|
|
161
|
+
};
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
const response = await client.models.generateContent({
|
|
165
|
+
model: model,
|
|
166
|
+
contents,
|
|
167
|
+
config: {
|
|
168
|
+
...(systemInstruction && { systemInstruction }),
|
|
169
|
+
...(googleTools && { tools: googleTools }),
|
|
170
|
+
...(toolConfig && { toolConfig }),
|
|
171
|
+
...(chat.temperature !== undefined && { temperature: chat.temperature }),
|
|
172
|
+
...(chat.maxTokens !== undefined && { maxOutputTokens: chat.maxTokens }),
|
|
173
|
+
...(chat.topP !== undefined && { topP: chat.topP }),
|
|
174
|
+
...(chat.stopSequences !== undefined && { stopSequences: chat.stopSequences }),
|
|
175
|
+
}
|
|
176
|
+
});
|
|
177
|
+
const candidate = response.candidates?.[0];
|
|
178
|
+
const content = candidate?.content;
|
|
179
|
+
const textContent = content?.parts
|
|
180
|
+
?.filter((part) => 'text' in part)
|
|
181
|
+
.map((part) => part.text)
|
|
182
|
+
.join('');
|
|
183
|
+
// Extract tool calls
|
|
184
|
+
const toolCalls = content?.parts
|
|
185
|
+
?.filter((part) => 'functionCall' in part)
|
|
186
|
+
.map((part, index) => {
|
|
187
|
+
const fc = part.functionCall;
|
|
188
|
+
return {
|
|
189
|
+
id: `call_${index}_${fc.name}`,
|
|
190
|
+
type: 'function',
|
|
191
|
+
function: {
|
|
192
|
+
name: fc.name,
|
|
193
|
+
arguments: JSON.stringify(fc.args),
|
|
194
|
+
},
|
|
195
|
+
};
|
|
196
|
+
});
|
|
197
|
+
const promptTokens = response.usageMetadata?.promptTokenCount || 0;
|
|
198
|
+
const completionTokens = response.usageMetadata?.candidatesTokenCount || 0;
|
|
199
|
+
const totalTokens = response.usageMetadata?.totalTokenCount || 0;
|
|
200
|
+
const cost = this.calculateCost(model, promptTokens, completionTokens);
|
|
201
|
+
return {
|
|
202
|
+
content: textContent || undefined,
|
|
203
|
+
toolCalls: toolCalls && toolCalls.length > 0 ? toolCalls : undefined,
|
|
204
|
+
model: response.modelVersion || model,
|
|
205
|
+
finishReason: this.mapFinishReason(candidate?.finishReason || 'STOP'),
|
|
206
|
+
rawFinishReason: candidate?.finishReason,
|
|
207
|
+
usage: {
|
|
208
|
+
promptTokens,
|
|
209
|
+
completionTokens,
|
|
210
|
+
totalTokens,
|
|
211
|
+
},
|
|
212
|
+
cost,
|
|
213
|
+
latencyMs: Date.now() - startTime,
|
|
214
|
+
raw: response,
|
|
215
|
+
};
|
|
216
|
+
}
|
|
217
|
+
async handleImageGeneration(request) {
|
|
218
|
+
const startTime = Date.now();
|
|
219
|
+
const client = getGoogleClient();
|
|
220
|
+
const { data: image, model } = request;
|
|
221
|
+
if (!model) {
|
|
222
|
+
throw new Error('Model is required for chat completion');
|
|
223
|
+
}
|
|
224
|
+
// Google's Imagen API via generateImages
|
|
225
|
+
const response = await client.models.generateImages({
|
|
226
|
+
model: model,
|
|
227
|
+
prompt: image.prompt,
|
|
228
|
+
config: {
|
|
229
|
+
numberOfImages: image.count || 1,
|
|
230
|
+
...(image.seed !== undefined && { seed: image.seed }),
|
|
231
|
+
},
|
|
232
|
+
});
|
|
233
|
+
const images = response.generatedImages?.map((img) => ({
|
|
234
|
+
base64: img.image?.imageBytes,
|
|
235
|
+
})) || [];
|
|
236
|
+
return {
|
|
237
|
+
images,
|
|
238
|
+
model: model,
|
|
239
|
+
latencyMs: Date.now() - startTime,
|
|
240
|
+
raw: response,
|
|
241
|
+
};
|
|
242
|
+
}
|
|
243
|
+
async handleEmbeddings(request) {
|
|
244
|
+
const startTime = Date.now();
|
|
245
|
+
const client = getGoogleClient();
|
|
246
|
+
const { data: embedding, model } = request;
|
|
247
|
+
if (!model) {
|
|
248
|
+
throw new Error('Model is required for chat completion');
|
|
249
|
+
}
|
|
250
|
+
const inputs = Array.isArray(embedding.input)
|
|
251
|
+
? embedding.input
|
|
252
|
+
: [embedding.input];
|
|
253
|
+
const response = await client.models.embedContent({
|
|
254
|
+
model: model,
|
|
255
|
+
contents: inputs.map((text) => ({ parts: [{ text }] })),
|
|
256
|
+
});
|
|
257
|
+
const embeddings = response.embeddings?.map((e) => e.values || []) || [];
|
|
258
|
+
return {
|
|
259
|
+
embeddings,
|
|
260
|
+
model: model,
|
|
261
|
+
usage: {
|
|
262
|
+
promptTokens: 0, // Google doesn't provide token count for embeddings
|
|
263
|
+
completionTokens: 0,
|
|
264
|
+
totalTokens: 0,
|
|
265
|
+
},
|
|
266
|
+
latencyMs: Date.now() - startTime,
|
|
267
|
+
raw: response,
|
|
268
|
+
};
|
|
269
|
+
}
|
|
270
|
+
async handleVideoGeneration(request) {
|
|
271
|
+
const startTime = Date.now();
|
|
272
|
+
const client = getGoogleClient();
|
|
273
|
+
const { data: video, model } = request;
|
|
274
|
+
if (!model) {
|
|
275
|
+
throw new Error('Model is required for chat completion');
|
|
276
|
+
}
|
|
277
|
+
// Derive aspect ratio and resolution from size
|
|
278
|
+
const sizeConfig = video.size ? this.videoSizeConfig[video.size] : null;
|
|
279
|
+
const aspectRatio = sizeConfig?.aspectRatio || '16:9';
|
|
280
|
+
const resolution = sizeConfig?.resolution || '720p';
|
|
281
|
+
// Parse duration - Veo accepts 4, 6, or 8 seconds
|
|
282
|
+
let durationSeconds;
|
|
283
|
+
if (video.duration !== undefined) {
|
|
284
|
+
const dur = typeof video.duration === 'string'
|
|
285
|
+
? parseInt(video.duration, 10)
|
|
286
|
+
: video.duration;
|
|
287
|
+
// Veo supports 4, 6, or 8 seconds
|
|
288
|
+
if (dur <= 5)
|
|
289
|
+
durationSeconds = 5;
|
|
290
|
+
else if (dur <= 6)
|
|
291
|
+
durationSeconds = 6;
|
|
292
|
+
else
|
|
293
|
+
durationSeconds = 8;
|
|
294
|
+
}
|
|
295
|
+
// Build reference images for Veo 3.1
|
|
296
|
+
let referenceImages;
|
|
297
|
+
if (video.referenceImages && video.referenceImages.length > 0) {
|
|
298
|
+
const refTypeMap = {
|
|
299
|
+
subject: VideoGenerationReferenceType.ASSET,
|
|
300
|
+
style: VideoGenerationReferenceType.STYLE,
|
|
301
|
+
asset: VideoGenerationReferenceType.ASSET,
|
|
302
|
+
};
|
|
303
|
+
referenceImages = video.referenceImages.slice(0, 3).map((ref) => ({
|
|
304
|
+
image: ref.base64 ? { imageBytes: ref.base64 } : { imageUri: ref.url },
|
|
305
|
+
referenceType: refTypeMap[ref.referenceType || 'asset'],
|
|
306
|
+
}));
|
|
307
|
+
}
|
|
308
|
+
// Build starting image if provided
|
|
309
|
+
let image;
|
|
310
|
+
if (video.image) {
|
|
311
|
+
image = video.image.base64
|
|
312
|
+
? { imageBytes: video.image.base64 }
|
|
313
|
+
: { imageUri: video.image.url };
|
|
314
|
+
}
|
|
315
|
+
// Build last frame for interpolation
|
|
316
|
+
let lastFrame;
|
|
317
|
+
if (video.lastFrame) {
|
|
318
|
+
lastFrame = video.lastFrame.base64
|
|
319
|
+
? { imageBytes: video.lastFrame.base64 }
|
|
320
|
+
: { imageUri: video.lastFrame.url };
|
|
321
|
+
}
|
|
322
|
+
// Start the video generation operation
|
|
323
|
+
let operation = await client.models.generateVideos({
|
|
324
|
+
model: model,
|
|
325
|
+
prompt: video.prompt,
|
|
326
|
+
...(image && { image }),
|
|
327
|
+
config: {
|
|
328
|
+
aspectRatio,
|
|
329
|
+
resolution,
|
|
330
|
+
...(durationSeconds !== undefined && { durationSeconds }),
|
|
331
|
+
...(video.seed !== undefined && { seed: video.seed }),
|
|
332
|
+
...(video.negativePrompt && { negativePrompt: video.negativePrompt }),
|
|
333
|
+
...(video.personGeneration && {
|
|
334
|
+
personGeneration: video.personGeneration,
|
|
335
|
+
}),
|
|
336
|
+
...(video.numberOfVideos && { numberOfVideos: video.numberOfVideos }),
|
|
337
|
+
...(referenceImages && { referenceImages }),
|
|
338
|
+
...(lastFrame && { lastFrame }),
|
|
339
|
+
},
|
|
340
|
+
});
|
|
341
|
+
// Poll until the operation completes
|
|
342
|
+
// Veo operations can take 11 seconds to 6 minutes
|
|
343
|
+
const pollIntervalMs = 5000; // Poll every 5 seconds
|
|
344
|
+
const maxWaitMs = 10 * 60 * 1000; // Max wait 10 minutes
|
|
345
|
+
const startPoll = Date.now();
|
|
346
|
+
while (!operation.done) {
|
|
347
|
+
if (Date.now() - startPoll > maxWaitMs) {
|
|
348
|
+
throw new Error('Video generation timed out after 10 minutes');
|
|
349
|
+
}
|
|
350
|
+
await this.sleep(pollIntervalMs);
|
|
351
|
+
operation = (await client.operations.get({
|
|
352
|
+
operation: operation,
|
|
353
|
+
}));
|
|
354
|
+
}
|
|
355
|
+
// Extract generated videos
|
|
356
|
+
const generatedVideos = operation.response?.generatedVideos || [];
|
|
357
|
+
const videos = generatedVideos.map((vid) => {
|
|
358
|
+
if (vid.video) {
|
|
359
|
+
return {
|
|
360
|
+
url: vid.video.uri,
|
|
361
|
+
duration: durationSeconds,
|
|
362
|
+
};
|
|
363
|
+
}
|
|
364
|
+
return {};
|
|
365
|
+
});
|
|
366
|
+
return {
|
|
367
|
+
videos: videos.filter((v) => v.url),
|
|
368
|
+
model: model,
|
|
369
|
+
latencyMs: Date.now() - startTime,
|
|
370
|
+
raw: operation.response,
|
|
371
|
+
};
|
|
372
|
+
}
|
|
373
|
+
async handleTextToSpeech(request) {
|
|
374
|
+
const startTime = Date.now();
|
|
375
|
+
const client = getGoogleClient();
|
|
376
|
+
const { data: tts, model } = request;
|
|
377
|
+
if (!model) {
|
|
378
|
+
throw new Error('Model is required for chat completion');
|
|
379
|
+
}
|
|
380
|
+
const response = await client.models.generateContent({
|
|
381
|
+
model: model,
|
|
382
|
+
contents: tts.input,
|
|
383
|
+
config: {
|
|
384
|
+
responseModalities: ['AUDIO'],
|
|
385
|
+
speechConfig: {
|
|
386
|
+
voiceConfig: {
|
|
387
|
+
prebuiltVoiceConfig: {
|
|
388
|
+
voiceName: tts.voice || 'Kore',
|
|
389
|
+
},
|
|
390
|
+
},
|
|
391
|
+
},
|
|
392
|
+
},
|
|
393
|
+
});
|
|
394
|
+
const audioData = response.candidates?.[0]?.content?.parts?.[0]?.inlineData?.data;
|
|
395
|
+
return {
|
|
396
|
+
audio: {
|
|
397
|
+
base64: audioData,
|
|
398
|
+
format: 'wav',
|
|
399
|
+
},
|
|
400
|
+
model: model,
|
|
401
|
+
latencyMs: Date.now() - startTime,
|
|
402
|
+
raw: response,
|
|
403
|
+
};
|
|
404
|
+
}
|
|
405
|
+
sleep(ms) {
|
|
406
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
407
|
+
}
|
|
408
|
+
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"google.d.ts","sourceRoot":"","sources":["../../../src/services/providers/google.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,OAAO,EAAkB,MAAM,eAAe,CAAC;AAY7D,MAAM,WAAW,sBAAsB;IACrC,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB;AAGD,MAAM,WAAW,gBAAgB;IAC/B,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,EAAE,MAAM,CAAC;IACrB,gBAAgB,EAAE,MAAM,CAAC;IACzB,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,wBAAsB,gBAAgB,CAAC,MAAM,EAAE,sBAAsB,GAAG,OAAO,CAAC,gBAAgB,CAAC,
|
|
1
|
+
{"version":3,"file":"google.d.ts","sourceRoot":"","sources":["../../../src/services/providers/google.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,OAAO,EAAkB,MAAM,eAAe,CAAC;AAY7D,MAAM,WAAW,sBAAsB;IACrC,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB;AAGD,MAAM,WAAW,gBAAgB;IAC/B,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,EAAE,MAAM,CAAC;IACrB,gBAAgB,EAAE,MAAM,CAAC;IACzB,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,wBAAsB,gBAAgB,CAAC,MAAM,EAAE,sBAAsB,GAAG,OAAO,CAAC,gBAAgB,CAAC,CAmChG"}
|
|
@@ -27,8 +27,10 @@ export async function createCompletion(params) {
|
|
|
27
27
|
const promptTokens = usageMetadata?.promptTokenCount || 0;
|
|
28
28
|
const completionTokens = usageMetadata?.candidatesTokenCount || 0;
|
|
29
29
|
const totalTokens = usageMetadata?.totalTokenCount || (promptTokens + completionTokens);
|
|
30
|
-
const
|
|
31
|
-
const costUsd = (
|
|
30
|
+
const modelInfo = MODEL_REGISTRY[params.model];
|
|
31
|
+
const costUsd = ('pricing' in modelInfo && modelInfo.pricing?.input && modelInfo.pricing?.output)
|
|
32
|
+
? (promptTokens / 1000 * modelInfo.pricing.input) + (completionTokens / 1000 * modelInfo.pricing.output)
|
|
33
|
+
: 0;
|
|
32
34
|
return {
|
|
33
35
|
content,
|
|
34
36
|
promptTokens,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../../src/services/providers/openai.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,OAAO,EAAkB,MAAM,eAAe,CAAC;AAe7D,MAAM,WAAW,sBAAsB;IACrC,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,gBAAgB;IAC/B,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,EAAE,MAAM,CAAC;IACrB,gBAAgB,EAAE,MAAM,CAAC;IACzB,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,wBAAsB,gBAAgB,CAAC,MAAM,EAAE,sBAAsB,GAAG,OAAO,CAAC,gBAAgB,CAAC,
|
|
1
|
+
{"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../../src/services/providers/openai.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,OAAO,EAAkB,MAAM,eAAe,CAAC;AAe7D,MAAM,WAAW,sBAAsB;IACrC,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,gBAAgB;IAC/B,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,EAAE,MAAM,CAAC;IACrB,gBAAgB,EAAE,MAAM,CAAC;IACzB,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,wBAAsB,gBAAgB,CAAC,MAAM,EAAE,sBAAsB,GAAG,OAAO,CAAC,gBAAgB,CAAC,CAqChG"}
|
|
@@ -31,8 +31,10 @@ export async function createCompletion(params) {
|
|
|
31
31
|
const completionTokens = response.usage?.completion_tokens || 0;
|
|
32
32
|
const totalTokens = response.usage?.total_tokens || 0;
|
|
33
33
|
// Calculate cost
|
|
34
|
-
const
|
|
35
|
-
const costUsd = (
|
|
34
|
+
const modelInfo = MODEL_REGISTRY[params.model];
|
|
35
|
+
const costUsd = ('pricing' in modelInfo && modelInfo.pricing?.input && modelInfo.pricing?.output)
|
|
36
|
+
? (promptTokens / 1000 * modelInfo.pricing.input) + (completionTokens / 1000 * modelInfo.pricing.output)
|
|
37
|
+
: 0;
|
|
36
38
|
return {
|
|
37
39
|
content,
|
|
38
40
|
promptTokens,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"test-google-adapter.d.ts","sourceRoot":"","sources":["../../../../src/services/providers/tests/test-google-adapter.ts"],"names":[],"mappings":"AAAA,OAAO,eAAe,CAAC"}
|
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
import 'dotenv/config';
|
|
2
|
+
import { GoogleAdapter } from '../google-adapter.js';
|
|
3
|
+
const adapter = new GoogleAdapter();
|
|
4
|
+
async function testChatCompletion() {
|
|
5
|
+
console.log('Testing chat completion...');
|
|
6
|
+
const request = {
|
|
7
|
+
gate: 'test-gate',
|
|
8
|
+
model: 'gemini-2.5-flash',
|
|
9
|
+
type: 'chat',
|
|
10
|
+
data: {
|
|
11
|
+
messages: [
|
|
12
|
+
{ role: 'user', content: 'Say "Hello World" and nothing else.' },
|
|
13
|
+
],
|
|
14
|
+
temperature: 0.7,
|
|
15
|
+
maxTokens: 10,
|
|
16
|
+
},
|
|
17
|
+
};
|
|
18
|
+
const response = await adapter.call(request);
|
|
19
|
+
console.log('Response:', response.content);
|
|
20
|
+
console.log('Tokens:', response.usage);
|
|
21
|
+
console.log('Cost:', response.cost);
|
|
22
|
+
console.log('Latency:', response.latencyMs + 'ms');
|
|
23
|
+
console.log('Finish reason:', response.finishReason);
|
|
24
|
+
console.log('✅ Chat completion test passed\n');
|
|
25
|
+
}
|
|
26
|
+
async function testChatWithVision() {
|
|
27
|
+
console.log('Testing chat with vision...');
|
|
28
|
+
const request = {
|
|
29
|
+
gate: 'test-gate',
|
|
30
|
+
model: 'gemini-2.5-flash',
|
|
31
|
+
type: 'chat',
|
|
32
|
+
data: {
|
|
33
|
+
messages: [
|
|
34
|
+
{
|
|
35
|
+
role: 'user',
|
|
36
|
+
content: 'What color is the sky in this image?',
|
|
37
|
+
images: [
|
|
38
|
+
{
|
|
39
|
+
url: 'https://images.unsplash.com/photo-1765202659641-9ad9facfe5cf?q=80&w=1364&auto=format&fit=crop&ixlib=rb-4.1.0&ixid=M3wxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8fA%3D%3D',
|
|
40
|
+
},
|
|
41
|
+
],
|
|
42
|
+
},
|
|
43
|
+
],
|
|
44
|
+
maxTokens: 50,
|
|
45
|
+
},
|
|
46
|
+
};
|
|
47
|
+
const response = await adapter.call(request);
|
|
48
|
+
console.log('Response:', response.content);
|
|
49
|
+
console.log('Finish reason:', response.finishReason);
|
|
50
|
+
console.log('✅ Vision test passed\n');
|
|
51
|
+
}
|
|
52
|
+
async function testImageGeneration() {
|
|
53
|
+
console.log('Testing image generation...');
|
|
54
|
+
const request = {
|
|
55
|
+
gate: 'test-gate',
|
|
56
|
+
model: 'imagen-4.0-generate-001',
|
|
57
|
+
type: 'image',
|
|
58
|
+
data: {
|
|
59
|
+
prompt: 'A cute cat playing with a ball of yarn',
|
|
60
|
+
count: 1,
|
|
61
|
+
},
|
|
62
|
+
};
|
|
63
|
+
const response = await adapter.call(request);
|
|
64
|
+
console.log('Generated images:', response.images?.length);
|
|
65
|
+
console.log('Image base64 length:', response.images?.[0]?.base64?.length);
|
|
66
|
+
console.log('Latency:', response.latencyMs + 'ms');
|
|
67
|
+
console.log('✅ Image generation test passed\n');
|
|
68
|
+
}
|
|
69
|
+
async function testEmbeddings() {
|
|
70
|
+
console.log('Testing embeddings...');
|
|
71
|
+
const request = {
|
|
72
|
+
gate: 'test-gate',
|
|
73
|
+
model: 'text-embedding-004',
|
|
74
|
+
type: 'embeddings',
|
|
75
|
+
data: {
|
|
76
|
+
input: 'Hello world',
|
|
77
|
+
},
|
|
78
|
+
};
|
|
79
|
+
const response = await adapter.call(request);
|
|
80
|
+
console.log('Embeddings dimensions:', response.embeddings?.[0]?.length);
|
|
81
|
+
console.log('Latency:', response.latencyMs + 'ms');
|
|
82
|
+
console.log('✅ Embeddings test passed\n');
|
|
83
|
+
}
|
|
84
|
+
async function testToolCalling() {
|
|
85
|
+
console.log('Testing tool calling...');
|
|
86
|
+
// Step 1: Send message with tools available
|
|
87
|
+
const request = {
|
|
88
|
+
gate: 'test-gate',
|
|
89
|
+
model: 'gemini-2.5-flash',
|
|
90
|
+
type: 'chat',
|
|
91
|
+
data: {
|
|
92
|
+
messages: [
|
|
93
|
+
{ role: 'user', content: 'Use the get_current_time function to tell me what time it is.' },
|
|
94
|
+
],
|
|
95
|
+
tools: [
|
|
96
|
+
{
|
|
97
|
+
type: 'function',
|
|
98
|
+
function: {
|
|
99
|
+
name: 'get_current_time',
|
|
100
|
+
description: 'Returns the current time in ISO format',
|
|
101
|
+
parameters: {
|
|
102
|
+
type: 'object',
|
|
103
|
+
properties: {},
|
|
104
|
+
},
|
|
105
|
+
},
|
|
106
|
+
},
|
|
107
|
+
],
|
|
108
|
+
toolChoice: 'required',
|
|
109
|
+
},
|
|
110
|
+
};
|
|
111
|
+
const response = await adapter.call(request);
|
|
112
|
+
console.log('Response content:', response.content);
|
|
113
|
+
console.log('Tool calls:', response.toolCalls);
|
|
114
|
+
console.log('Finish reason:', response.finishReason);
|
|
115
|
+
console.log('Raw finish reason:', response.rawFinishReason);
|
|
116
|
+
if (!response.toolCalls || response.toolCalls.length === 0) {
|
|
117
|
+
console.log('Full response:', JSON.stringify(response, null, 2));
|
|
118
|
+
throw new Error('Expected tool calls but got none');
|
|
119
|
+
}
|
|
120
|
+
const toolCall = response.toolCalls[0];
|
|
121
|
+
console.log('Function called:', toolCall.function.name);
|
|
122
|
+
console.log('Function arguments:', toolCall.function.arguments);
|
|
123
|
+
// Step 2: Send tool response back
|
|
124
|
+
const toolResponseRequest = {
|
|
125
|
+
gate: 'test-gate',
|
|
126
|
+
model: 'gemini-2.5-flash',
|
|
127
|
+
type: 'chat',
|
|
128
|
+
data: {
|
|
129
|
+
messages: [
|
|
130
|
+
{ role: 'user', content: 'Use the get_current_time function to tell me what time it is.' },
|
|
131
|
+
{
|
|
132
|
+
role: 'assistant',
|
|
133
|
+
toolCalls: response.toolCalls,
|
|
134
|
+
},
|
|
135
|
+
{
|
|
136
|
+
role: 'tool',
|
|
137
|
+
toolCallId: toolCall.id,
|
|
138
|
+
name: toolCall.function.name,
|
|
139
|
+
content: JSON.stringify({ current_time: '2025-12-20T07:30:00Z' }),
|
|
140
|
+
},
|
|
141
|
+
],
|
|
142
|
+
tools: request.data.tools,
|
|
143
|
+
},
|
|
144
|
+
};
|
|
145
|
+
const finalResponse = await adapter.call(toolResponseRequest);
|
|
146
|
+
console.log('Final response:', finalResponse.content);
|
|
147
|
+
console.log('✅ Tool calling test passed\n');
|
|
148
|
+
}
|
|
149
|
+
async function testEmbeddingsMultiple() {
|
|
150
|
+
console.log('Testing multiple embeddings...');
|
|
151
|
+
const request = {
|
|
152
|
+
gate: 'test-gate',
|
|
153
|
+
model: 'text-embedding-004',
|
|
154
|
+
type: 'embeddings',
|
|
155
|
+
data: {
|
|
156
|
+
input: ['Hello world', 'Goodbye world', 'Testing embeddings'],
|
|
157
|
+
},
|
|
158
|
+
};
|
|
159
|
+
const response = await adapter.call(request);
|
|
160
|
+
console.log('Number of embeddings:', response.embeddings?.length);
|
|
161
|
+
console.log('First embedding dimensions:', response.embeddings?.[0]?.length);
|
|
162
|
+
console.log('Latency:', response.latencyMs + 'ms');
|
|
163
|
+
console.log('✅ Multiple embeddings test passed\n');
|
|
164
|
+
}
|
|
165
|
+
async function testTextToSpeech() {
|
|
166
|
+
console.log('Testing text-to-speech...');
|
|
167
|
+
const request = {
|
|
168
|
+
gate: 'test-gate',
|
|
169
|
+
model: 'gemini-2.5-flash-preview-tts',
|
|
170
|
+
type: 'tts',
|
|
171
|
+
data: {
|
|
172
|
+
input: 'Hello, this is a test of the Google text-to-speech capabilities.',
|
|
173
|
+
voice: 'Kore',
|
|
174
|
+
},
|
|
175
|
+
};
|
|
176
|
+
const response = await adapter.call(request);
|
|
177
|
+
console.log('Audio format:', response.audio?.format);
|
|
178
|
+
console.log('Audio base64 length:', response.audio?.base64?.length);
|
|
179
|
+
console.log('Latency:', response.latencyMs + 'ms');
|
|
180
|
+
console.log('✅ Text-to-speech test passed\n');
|
|
181
|
+
}
|
|
182
|
+
async function testVideoGeneration() {
|
|
183
|
+
console.log('Testing video generation (this may take a few minutes)...');
|
|
184
|
+
const request = {
|
|
185
|
+
gate: 'test-gate',
|
|
186
|
+
model: 'veo-2.0-generate-001',
|
|
187
|
+
type: 'video',
|
|
188
|
+
data: {
|
|
189
|
+
prompt: 'A serene ocean wave rolling onto a sandy beach at sunset',
|
|
190
|
+
size: '1280x720',
|
|
191
|
+
duration: 5,
|
|
192
|
+
},
|
|
193
|
+
};
|
|
194
|
+
const response = await adapter.call(request);
|
|
195
|
+
console.log('Generated videos:', response.videos?.length);
|
|
196
|
+
console.log('Video URL:', response.videos?.[0]?.url);
|
|
197
|
+
console.log('Video duration:', response.videos?.[0]?.duration);
|
|
198
|
+
console.log('Latency:', response.latencyMs + 'ms');
|
|
199
|
+
console.log('✅ Video generation test passed\n');
|
|
200
|
+
}
|
|
201
|
+
async function runTests() {
|
|
202
|
+
try {
|
|
203
|
+
console.log('Chat completion tests...');
|
|
204
|
+
await testChatCompletion();
|
|
205
|
+
await testChatWithVision();
|
|
206
|
+
console.log('Embeddings...');
|
|
207
|
+
await testEmbeddings();
|
|
208
|
+
await testEmbeddingsMultiple();
|
|
209
|
+
console.log('Image generation...');
|
|
210
|
+
await testImageGeneration();
|
|
211
|
+
console.log('Text-to-speech...');
|
|
212
|
+
await testTextToSpeech();
|
|
213
|
+
console.log('Video generation...');
|
|
214
|
+
await testVideoGeneration();
|
|
215
|
+
console.log('Tool calling...');
|
|
216
|
+
await testToolCalling();
|
|
217
|
+
console.log('✅ All tests passed!');
|
|
218
|
+
}
|
|
219
|
+
catch (error) {
|
|
220
|
+
console.error('❌ Test failed:', error);
|
|
221
|
+
process.exit(1);
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
runTests();
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"task-analysis.d.ts","sourceRoot":"","sources":["../../src/services/task-analysis.ts"],"names":[],"mappings":"AACA,OAAO,EAAkB,YAAY,
|
|
1
|
+
{"version":3,"file":"task-analysis.d.ts","sourceRoot":"","sources":["../../src/services/task-analysis.ts"],"names":[],"mappings":"AACA,OAAO,EAAkB,YAAY,EAAmC,MAAM,eAAe,CAAC;AAqD9F,wBAAsB,WAAW,CAC/B,WAAW,EAAE,MAAM,EACnB,eAAe,CAAC,EAAE;IAChB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB,GACA,OAAO,CAAC,YAAY,CAAC,CAqGvB"}
|
|
@@ -1,15 +1,76 @@
|
|
|
1
1
|
import Anthropic from '@anthropic-ai/sdk';
|
|
2
2
|
import { MODEL_REGISTRY } from '@layer-ai/sdk';
|
|
3
|
+
async function detectTaskType(description, anthropic) {
|
|
4
|
+
const prompt = `Analyze this task description and determine what TYPE of AI task it is.
|
|
5
|
+
|
|
6
|
+
TASK DESCRIPTION:
|
|
7
|
+
"${description}"
|
|
8
|
+
|
|
9
|
+
AVAILABLE TASK TYPES:
|
|
10
|
+
- chat: Conversational AI, text generation, Q&A, summarization, translation
|
|
11
|
+
- image: Image generation, image creation
|
|
12
|
+
- video: Video generation, video creation
|
|
13
|
+
- audio: Audio/music generation
|
|
14
|
+
- tts: Text-to-speech, voice synthesis
|
|
15
|
+
- stt: Speech-to-text, audio transcription
|
|
16
|
+
- embeddings: Text embeddings, semantic search
|
|
17
|
+
- document: Document processing, OCR
|
|
18
|
+
- responses: Complex reasoning tasks (o3-pro style models)
|
|
19
|
+
- language-completion: Legacy text completion
|
|
20
|
+
|
|
21
|
+
Return ONLY the task type as a single word, nothing else.`;
|
|
22
|
+
try {
|
|
23
|
+
const response = await anthropic.messages.create({
|
|
24
|
+
model: 'claude-haiku-4-5-20251001',
|
|
25
|
+
max_tokens: 50,
|
|
26
|
+
temperature: 0.0,
|
|
27
|
+
messages: [{
|
|
28
|
+
role: 'user',
|
|
29
|
+
content: prompt
|
|
30
|
+
}]
|
|
31
|
+
});
|
|
32
|
+
const responseContent = response.content[0];
|
|
33
|
+
if (responseContent.type !== 'text') {
|
|
34
|
+
throw new Error('Unexpected response type from Claude');
|
|
35
|
+
}
|
|
36
|
+
const detectedType = responseContent.text.trim().toLowerCase();
|
|
37
|
+
const validTypes = ['chat', 'image', 'video', 'audio', 'tts', 'stt', 'embeddings', 'document', 'responses', 'language-completion'];
|
|
38
|
+
if (validTypes.includes(detectedType)) {
|
|
39
|
+
return detectedType;
|
|
40
|
+
}
|
|
41
|
+
return 'chat';
|
|
42
|
+
}
|
|
43
|
+
catch (error) {
|
|
44
|
+
console.error('Failed to detect task type:', error);
|
|
45
|
+
return 'chat';
|
|
46
|
+
}
|
|
47
|
+
}
|
|
3
48
|
export async function analyzeTask(description, userPreferences) {
|
|
4
49
|
const anthropic = new Anthropic({
|
|
5
50
|
apiKey: process.env.ANTHROPIC_API_KEY
|
|
6
51
|
});
|
|
7
|
-
const registryContext = JSON.stringify(MODEL_REGISTRY, null, 2);
|
|
8
52
|
const costWeight = userPreferences?.costWeight ?? 0.33;
|
|
9
53
|
const latencyWeight = userPreferences?.latencyWeight ?? 0.33;
|
|
10
54
|
const qualityWeight = userPreferences?.qualityWeight ?? 0.33;
|
|
55
|
+
let taskType = 'chat';
|
|
56
|
+
try {
|
|
57
|
+
taskType = await detectTaskType(description, anthropic);
|
|
58
|
+
}
|
|
59
|
+
catch (error) {
|
|
60
|
+
console.error('Failed to detect task type, defaulting to chat:', error);
|
|
61
|
+
}
|
|
62
|
+
const filteredRegistry = {};
|
|
63
|
+
for (const [key, model] of Object.entries(MODEL_REGISTRY)) {
|
|
64
|
+
if (model.type === taskType) {
|
|
65
|
+
filteredRegistry[key] = model;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
const registryContext = JSON.stringify(filteredRegistry, null, 2);
|
|
11
69
|
const prompt = `You are analyzing a task to recommend the best AI models from our registry.
|
|
12
70
|
|
|
71
|
+
TASK TYPE: ${taskType}
|
|
72
|
+
All models below are specifically for ${taskType} tasks.
|
|
73
|
+
|
|
13
74
|
MODEL REGISTRY (available models and their capabilities):
|
|
14
75
|
${registryContext}
|
|
15
76
|
|
|
@@ -61,11 +122,15 @@ Return JSON with:
|
|
|
61
122
|
if (typeof (mapping) !== 'object' || Array.isArray(mapping)) {
|
|
62
123
|
throw new Error('Mapping is in wrong format');
|
|
63
124
|
}
|
|
64
|
-
return
|
|
125
|
+
return {
|
|
126
|
+
taskType,
|
|
127
|
+
...mapping
|
|
128
|
+
};
|
|
65
129
|
}
|
|
66
130
|
catch (error) {
|
|
67
131
|
console.error('Failed to find accurate task requirements', error);
|
|
68
132
|
return {
|
|
133
|
+
taskType,
|
|
69
134
|
primary: 'gpt-4o',
|
|
70
135
|
alternatives: ['claude-sonnet-4-5-20250929', 'gemini-2.5-flash'],
|
|
71
136
|
reasoning: 'Task analysis failed, returning safe defaults'
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@layer-ai/core",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.3.1",
|
|
4
4
|
"description": "Core API routes and services for Layer AI",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -35,7 +35,7 @@
|
|
|
35
35
|
"nanoid": "^5.0.4",
|
|
36
36
|
"openai": "^4.24.0",
|
|
37
37
|
"pg": "^8.11.3",
|
|
38
|
-
"@layer-ai/sdk": "^0.
|
|
38
|
+
"@layer-ai/sdk": "^0.3.1"
|
|
39
39
|
},
|
|
40
40
|
"devDependencies": {
|
|
41
41
|
"@types/express": "^4.17.21",
|