@layer-ai/core 0.1.10 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/dist/index.d.ts +1 -0
  2. package/dist/index.d.ts.map +1 -1
  3. package/dist/index.js +1 -0
  4. package/dist/routes/complete.d.ts.map +1 -1
  5. package/dist/routes/complete.js +24 -4
  6. package/dist/routes/gates.d.ts.map +1 -1
  7. package/dist/routes/gates.js +8 -13
  8. package/dist/routes/v2/complete.d.ts +4 -0
  9. package/dist/routes/v2/complete.d.ts.map +1 -0
  10. package/dist/routes/v2/complete.js +214 -0
  11. package/dist/routes/v2/tests/test-complete-anthropic.d.ts +2 -0
  12. package/dist/routes/v2/tests/test-complete-anthropic.d.ts.map +1 -0
  13. package/dist/routes/v2/tests/test-complete-anthropic.js +132 -0
  14. package/dist/routes/v2/tests/test-complete-openai.d.ts +2 -0
  15. package/dist/routes/v2/tests/test-complete-openai.d.ts.map +1 -0
  16. package/dist/routes/v2/tests/test-complete-openai.js +178 -0
  17. package/dist/routes/v2/tests/test-complete-routing.d.ts +2 -0
  18. package/dist/routes/v2/tests/test-complete-routing.d.ts.map +1 -0
  19. package/dist/routes/v2/tests/test-complete-routing.js +192 -0
  20. package/dist/services/providers/anthropic-adapter.d.ts +12 -0
  21. package/dist/services/providers/anthropic-adapter.d.ts.map +1 -0
  22. package/dist/services/providers/anthropic-adapter.js +203 -0
  23. package/dist/services/providers/base-adapter.d.ts +1 -1
  24. package/dist/services/providers/base-adapter.d.ts.map +1 -1
  25. package/dist/services/providers/base-adapter.js +1 -1
  26. package/dist/services/providers/openai-adapter.d.ts +2 -2
  27. package/dist/services/providers/openai-adapter.d.ts.map +1 -1
  28. package/dist/services/providers/openai-adapter.js +15 -3
  29. package/dist/services/providers/tests/test-anthropic-adapter.d.ts +2 -0
  30. package/dist/services/providers/tests/test-anthropic-adapter.d.ts.map +1 -0
  31. package/dist/services/providers/tests/test-anthropic-adapter.js +104 -0
  32. package/dist/services/providers/tests/test-openai-adapter.d.ts +2 -0
  33. package/dist/services/providers/tests/test-openai-adapter.d.ts.map +1 -0
  34. package/dist/services/providers/tests/test-openai-adapter.js +118 -0
  35. package/package.json +2 -2
@@ -1,5 +1,5 @@
1
1
  import OpenAI from 'openai';
2
- import { ProviderAdapter } from './base-adapter.js';
2
+ import { BaseProviderAdapter } from './base-adapter.js';
3
3
  let openai = null;
4
4
  function getOpenAIClient() {
5
5
  if (!openai) {
@@ -9,7 +9,7 @@ function getOpenAIClient() {
9
9
  }
10
10
  return openai;
11
11
  }
12
- export class OpenAIAdapter extends ProviderAdapter {
12
+ export class OpenAIAdapter extends BaseProviderAdapter {
13
13
  constructor() {
14
14
  super(...arguments);
15
15
  this.provider = 'openai';
@@ -76,7 +76,7 @@ export class OpenAIAdapter extends ProviderAdapter {
76
76
  case 'tts':
77
77
  return this.handleTextToSpeech(request);
78
78
  case 'video':
79
- throw new Error('Video generation not yet supported by OpenAI');
79
+ throw new Error('Video generation not yet supported by LayerAI');
80
80
  default:
81
81
  throw new Error(`Unknown modality: ${request.type}`);
82
82
  }
@@ -85,6 +85,9 @@ export class OpenAIAdapter extends ProviderAdapter {
85
85
  const startTime = Date.now();
86
86
  const client = getOpenAIClient();
87
87
  const { data: chat, model } = request;
88
+ if (!model) {
89
+ throw new Error('Model is required for chat completion');
90
+ }
88
91
  const messages = [];
89
92
  if (chat.systemPrompt) {
90
93
  messages.push({ role: 'system', content: chat.systemPrompt });
@@ -172,6 +175,9 @@ export class OpenAIAdapter extends ProviderAdapter {
172
175
  const startTime = Date.now();
173
176
  const client = getOpenAIClient();
174
177
  const { data: image, model } = request;
178
+ if (!model) {
179
+ throw new Error('Model is required for image generation');
180
+ }
175
181
  const response = await client.images.generate({
176
182
  model: model,
177
183
  prompt: image.prompt,
@@ -194,6 +200,9 @@ export class OpenAIAdapter extends ProviderAdapter {
194
200
  const startTime = Date.now();
195
201
  const client = getOpenAIClient();
196
202
  const { data: embedding, model } = request;
203
+ if (!model) {
204
+ throw new Error('Model is required for embeddings');
205
+ }
197
206
  const response = await client.embeddings.create({
198
207
  model: model,
199
208
  input: embedding.input,
@@ -219,6 +228,9 @@ export class OpenAIAdapter extends ProviderAdapter {
219
228
  const startTime = Date.now();
220
229
  const client = getOpenAIClient();
221
230
  const { data: tts, model } = request;
231
+ if (!model) {
232
+ throw new Error('Model is required for tts');
233
+ }
222
234
  const response = await client.audio.speech.create({
223
235
  model: model,
224
236
  input: tts.input,
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=test-anthropic-adapter.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"test-anthropic-adapter.d.ts","sourceRoot":"","sources":["../../../../src/services/providers/tests/test-anthropic-adapter.ts"],"names":[],"mappings":""}
@@ -0,0 +1,104 @@
1
+ import { AnthropicAdapter } from '../anthropic-adapter.js';
2
+ const adapter = new AnthropicAdapter();
3
+ async function testChatCompletion() {
4
+ console.log('Testing chat completion...');
5
+ const request = {
6
+ gate: 'test-gate',
7
+ model: 'claude-sonnet-4-5-20250929',
8
+ type: 'chat',
9
+ data: {
10
+ messages: [
11
+ { role: 'user', content: 'Say "Hello World" and nothing else.' }
12
+ ],
13
+ temperature: 0.7,
14
+ maxTokens: 10,
15
+ }
16
+ };
17
+ const response = await adapter.call(request);
18
+ console.log('Response:', response.content);
19
+ console.log('Tokens:', response.usage);
20
+ console.log('Cost:', response.cost);
21
+ console.log('Latency:', response.latencyMs + 'ms');
22
+ console.log('Finish reason:', response.finishReason);
23
+ console.log('✅ Chat completion test passed\n');
24
+ }
25
+ async function testChatWithVision() {
26
+ console.log('Testing chat with vision...');
27
+ const request = {
28
+ gate: 'test-gate',
29
+ model: 'claude-sonnet-4-5-20250929',
30
+ type: 'chat',
31
+ data: {
32
+ messages: [
33
+ {
34
+ role: 'user',
35
+ content: 'What color is the sky in this image?',
36
+ images: [{
37
+ url: 'https://images.unsplash.com/photo-1765202659641-9ad9facfe5cf?q=80&w=1364&auto=format&fit=crop&ixlib=rb-4.1.0&ixid=M3wxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8fA%3D%3D',
38
+ }]
39
+ }
40
+ ],
41
+ maxTokens: 50,
42
+ }
43
+ };
44
+ const response = await adapter.call(request);
45
+ console.log('Response:', response.content);
46
+ console.log('Finish reason:', response.finishReason);
47
+ console.log('✅ Vision test passed\n');
48
+ }
49
+ async function testToolCalls() {
50
+ console.log('Testing tool calls...');
51
+ const request = {
52
+ gate: 'test-gate',
53
+ model: 'claude-sonnet-4-5-20250929',
54
+ type: 'chat',
55
+ data: {
56
+ messages: [
57
+ { role: 'user', content: 'What is the weather in San Francisco?' }
58
+ ],
59
+ tools: [
60
+ {
61
+ type: 'function',
62
+ function: {
63
+ name: 'get_weather',
64
+ description: 'Get the current weather for a location',
65
+ parameters: {
66
+ type: 'object',
67
+ properties: {
68
+ location: {
69
+ type: 'string',
70
+ description: 'The city and state, e.g. San Francisco, CA',
71
+ },
72
+ },
73
+ required: ['location'],
74
+ },
75
+ },
76
+ },
77
+ ],
78
+ maxTokens: 100,
79
+ }
80
+ };
81
+ const response = await adapter.call(request);
82
+ console.log('Response:', response.content);
83
+ console.log('Tool calls:', response.toolCalls);
84
+ console.log('Finish reason:', response.finishReason);
85
+ if (response.toolCalls && response.toolCalls.length > 0) {
86
+ console.log('✅ Tool calls test passed\n');
87
+ }
88
+ else {
89
+ throw new Error('Expected tool calls but got none');
90
+ }
91
+ }
92
+ async function runTests() {
93
+ try {
94
+ await testChatCompletion();
95
+ await testChatWithVision();
96
+ await testToolCalls();
97
+ console.log('✅ All tests passed!');
98
+ }
99
+ catch (error) {
100
+ console.error('❌ Test failed:', error);
101
+ process.exit(1);
102
+ }
103
+ }
104
+ runTests();
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=test-openai-adapter.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"test-openai-adapter.d.ts","sourceRoot":"","sources":["../../../../src/services/providers/tests/test-openai-adapter.ts"],"names":[],"mappings":""}
@@ -0,0 +1,118 @@
1
+ import { OpenAIAdapter } from '../openai-adapter.js';
2
+ const adapter = new OpenAIAdapter();
3
+ async function testChatCompletion() {
4
+ console.log('Testing chat completion...');
5
+ const request = {
6
+ gate: 'test-gate',
7
+ model: 'gpt-4o-mini',
8
+ type: 'chat',
9
+ data: {
10
+ messages: [
11
+ { role: 'user', content: 'Say "Hello World" and nothing else.' }
12
+ ],
13
+ temperature: 0.7,
14
+ maxTokens: 10,
15
+ }
16
+ };
17
+ const response = await adapter.call(request);
18
+ console.log('Response:', response.content);
19
+ console.log('Tokens:', response.usage);
20
+ console.log('Cost:', response.cost);
21
+ console.log('Latency:', response.latencyMs + 'ms');
22
+ console.log('Finish reason:', response.finishReason);
23
+ console.log('✅ Chat completion test passed\n');
24
+ }
25
+ async function testChatWithVision() {
26
+ console.log('Testing chat with vision...');
27
+ const request = {
28
+ gate: 'test-gate',
29
+ model: 'gpt-4o-mini',
30
+ type: 'chat',
31
+ data: {
32
+ messages: [
33
+ {
34
+ role: 'user',
35
+ content: 'What color is the sky in this image?',
36
+ images: [{
37
+ url: 'https://images.unsplash.com/photo-1765202659641-9ad9facfe5cf?q=80&w=1364&auto=format&fit=crop&ixlib=rb-4.1.0&ixid=M3wxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8fA%3D%3D',
38
+ detail: 'high'
39
+ }]
40
+ }
41
+ ],
42
+ maxTokens: 50,
43
+ }
44
+ };
45
+ const response = await adapter.call(request);
46
+ console.log('Response:', response.content);
47
+ console.log('Finish reason:', response.finishReason);
48
+ console.log('✅ Vision test passed\n');
49
+ }
50
+ async function testImageGeneration() {
51
+ console.log('Testing image generation...');
52
+ const request = {
53
+ gate: 'test-gate',
54
+ model: 'dall-e-3',
55
+ type: 'image',
56
+ data: {
57
+ prompt: 'A cute cat playing with a ball of yarn',
58
+ size: '1024x1024',
59
+ quality: 'standard',
60
+ count: 1,
61
+ }
62
+ };
63
+ const response = await adapter.call(request);
64
+ console.log('Generated images:', response.images?.length);
65
+ console.log('Image URL:', response.images?.[0]?.url);
66
+ console.log('Revised prompt:', response.images?.[0]?.revisedPrompt);
67
+ console.log('✅ Image generation test passed\n');
68
+ }
69
+ async function testEmbeddings() {
70
+ console.log('Testing embeddings...');
71
+ const request = {
72
+ gate: 'test-gate',
73
+ model: 'text-embedding-3-small',
74
+ type: 'embeddings',
75
+ data: {
76
+ input: 'Hello world',
77
+ }
78
+ };
79
+ const response = await adapter.call(request);
80
+ console.log('Embeddings dimensions:', response.embeddings?.[0]?.length);
81
+ console.log('Tokens:', response.usage);
82
+ console.log('Cost:', response.cost);
83
+ console.log('✅ Embeddings test passed\n');
84
+ }
85
+ async function testTextToSpeech() {
86
+ console.log('Testing text-to-speech...');
87
+ const request = {
88
+ gate: 'test-gate',
89
+ model: 'tts-1',
90
+ type: 'tts',
91
+ data: {
92
+ input: 'Hello, this is a test.',
93
+ voice: 'alloy',
94
+ speed: 1.0,
95
+ responseFormat: 'mp3',
96
+ }
97
+ };
98
+ const response = await adapter.call(request);
99
+ console.log('Audio format:', response.audio?.format);
100
+ console.log('Audio base64 length:', response.audio?.base64?.length);
101
+ console.log('✅ Text-to-speech test passed\n');
102
+ }
103
+ async function runTests() {
104
+ try {
105
+ await testChatCompletion();
106
+ console.log('Testing vision...');
107
+ await testChatWithVision();
108
+ await testImageGeneration();
109
+ await testEmbeddings();
110
+ await testTextToSpeech();
111
+ console.log('✅ All tests passed!');
112
+ }
113
+ catch (error) {
114
+ console.error('❌ Test failed:', error);
115
+ process.exit(1);
116
+ }
117
+ }
118
+ runTests();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@layer-ai/core",
3
- "version": "0.1.10",
3
+ "version": "0.2.0",
4
4
  "description": "Core API routes and services for Layer AI",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -33,7 +33,7 @@
33
33
  "dependencies": {
34
34
  "@anthropic-ai/sdk": "^0.39.0",
35
35
  "@google/genai": "^1.30.0",
36
- "@layer-ai/sdk": "^0.1.4",
36
+ "@layer-ai/sdk": "workspace:^",
37
37
  "bcryptjs": "^2.4.3",
38
38
  "express": "^4.18.2",
39
39
  "ioredis": "^5.3.2",