@odda-ai/matching-core 1.0.3 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/index.d.ts +3 -0
- package/dist/cjs/index.js +19 -0
- package/dist/cjs/package.json +1 -0
- package/dist/cjs/src/ai/AIProvider.d.ts +36 -0
- package/dist/cjs/src/ai/AIProvider.js +134 -0
- package/dist/cjs/src/ai/adapters/AnthropicAdapter.d.ts +12 -0
- package/dist/cjs/src/ai/adapters/AnthropicAdapter.js +42 -0
- package/dist/cjs/src/ai/adapters/OllamaAdapter.d.ts +17 -0
- package/dist/cjs/src/ai/adapters/OllamaAdapter.js +42 -0
- package/dist/cjs/src/ai/adapters/OpenAIAdapter.d.ts +12 -0
- package/dist/cjs/src/ai/adapters/OpenAIAdapter.js +51 -0
- package/dist/cjs/src/ai/adapters/index.d.ts +3 -0
- package/dist/cjs/src/ai/adapters/index.js +9 -0
- package/dist/cjs/src/ai/factory.d.ts +12 -0
- package/dist/cjs/src/ai/factory.js +40 -0
- package/dist/cjs/src/ai/index.d.ts +5 -0
- package/dist/cjs/src/ai/index.js +25 -0
- package/dist/cjs/src/ai/registry.d.ts +13 -0
- package/dist/cjs/src/ai/registry.js +17 -0
- package/dist/cjs/src/ai/types.d.ts +54 -0
- package/dist/cjs/src/ai/types.js +2 -0
- package/dist/cjs/src/cv-parser/PDFParserService.d.ts +41 -0
- package/dist/cjs/src/cv-parser/PDFParserService.js +176 -0
- package/dist/cjs/src/cv-parser/index.d.ts +2 -0
- package/dist/cjs/src/cv-parser/index.js +6 -0
- package/dist/cjs/src/cv-parser/types.d.ts +51 -0
- package/dist/cjs/src/cv-parser/types.js +2 -0
- package/dist/cjs/src/features/ai-cv-resume.service.d.ts +14 -0
- package/dist/cjs/src/features/ai-cv-resume.service.js +81 -0
- package/dist/cjs/src/features/ai-talent.service.d.ts +18 -0
- package/dist/cjs/src/features/ai-talent.service.js +32 -0
- package/dist/cjs/src/features/cv-chunking.service.d.ts +140 -0
- package/dist/cjs/src/features/cv-chunking.service.js +338 -0
- package/dist/cjs/src/features/index.d.ts +5 -0
- package/dist/cjs/src/features/index.js +23 -0
- package/dist/cjs/src/features/job-matcher.service.d.ts +14 -0
- package/dist/cjs/src/features/job-matcher.service.js +21 -0
- package/dist/cjs/src/features/prompts.d.ts +8 -0
- package/dist/cjs/src/features/prompts.js +613 -0
- package/dist/cjs/src/features/system-messages.d.ts +6 -0
- package/dist/cjs/src/features/system-messages.js +32 -0
- package/dist/cjs/src/features/types.d.ts +49 -0
- package/dist/cjs/src/features/types.js +18 -0
- package/package.json +3 -2
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
__exportStar(require("./src/ai/index.js"), exports);
|
|
18
|
+
__exportStar(require("./src/cv-parser/index.js"), exports);
|
|
19
|
+
__exportStar(require("./src/features/index.js"), exports);
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"type":"commonjs"}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import type { AIAdapter, BaseAIConfig, ChatMessage, AIResponse } from "./types.js";
|
|
2
|
+
import { AI_ADAPTERS } from "./registry.js";
|
|
3
|
+
/**
|
|
4
|
+
* Classe generica per interrogare qualsiasi provider AI
|
|
5
|
+
*/
|
|
6
|
+
export declare class AIProvider {
|
|
7
|
+
private config;
|
|
8
|
+
private client;
|
|
9
|
+
private adapter;
|
|
10
|
+
constructor(config: BaseAIConfig, adapter: AIAdapter | keyof typeof AI_ADAPTERS);
|
|
11
|
+
private createClient;
|
|
12
|
+
/**
|
|
13
|
+
* Invia una chat completion al provider AI
|
|
14
|
+
*/
|
|
15
|
+
chat(messages: ChatMessage[]): Promise<AIResponse>;
|
|
16
|
+
/**
|
|
17
|
+
* Invia un singolo prompt al provider AI
|
|
18
|
+
*/
|
|
19
|
+
prompt(prompt: string, systemMessage?: string): Promise<AIResponse>;
|
|
20
|
+
/**
|
|
21
|
+
* Aggiorna la configurazione del provider
|
|
22
|
+
*/
|
|
23
|
+
updateConfig(config: Partial<BaseAIConfig>): void;
|
|
24
|
+
/**
|
|
25
|
+
* Ottiene la configurazione corrente
|
|
26
|
+
*/
|
|
27
|
+
getConfig(): BaseAIConfig;
|
|
28
|
+
/**
|
|
29
|
+
* Ottiene l'adapter corrente
|
|
30
|
+
*/
|
|
31
|
+
getAdapter(): AIAdapter;
|
|
32
|
+
/**
|
|
33
|
+
* Cambia l'adapter mantenendo la stessa configurazione
|
|
34
|
+
*/
|
|
35
|
+
setAdapter(adapter: AIAdapter | keyof typeof AI_ADAPTERS): void;
|
|
36
|
+
}
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.AIProvider = void 0;
|
|
7
|
+
const axios_1 = __importDefault(require("axios"));
|
|
8
|
+
const registry_js_1 = require("./registry.js");
|
|
9
|
+
/**
|
|
10
|
+
* Classe generica per interrogare qualsiasi provider AI
|
|
11
|
+
*/
|
|
12
|
+
class AIProvider {
|
|
13
|
+
config;
|
|
14
|
+
client;
|
|
15
|
+
adapter;
|
|
16
|
+
constructor(config, adapter) {
|
|
17
|
+
this.config = {
|
|
18
|
+
temperature: 0.7,
|
|
19
|
+
maxTokens: 1000,
|
|
20
|
+
timeout: 300000,
|
|
21
|
+
...config,
|
|
22
|
+
};
|
|
23
|
+
// Determina l'adapter da usare
|
|
24
|
+
if (!adapter) {
|
|
25
|
+
throw new Error("È necessario specificare un adapter");
|
|
26
|
+
}
|
|
27
|
+
if (typeof adapter === "string") {
|
|
28
|
+
const registeredAdapter = registry_js_1.AI_ADAPTERS[adapter];
|
|
29
|
+
if (!registeredAdapter) {
|
|
30
|
+
throw new Error(`Adapter non trovato: ${adapter}`);
|
|
31
|
+
}
|
|
32
|
+
this.adapter = registeredAdapter;
|
|
33
|
+
}
|
|
34
|
+
else {
|
|
35
|
+
this.adapter = adapter;
|
|
36
|
+
}
|
|
37
|
+
// Configura il client HTTP
|
|
38
|
+
this.client = this.createClient();
|
|
39
|
+
}
|
|
40
|
+
createClient() {
|
|
41
|
+
const headers = {
|
|
42
|
+
...this.adapter.configureHeaders?.(this.config),
|
|
43
|
+
...this.config.headers,
|
|
44
|
+
};
|
|
45
|
+
const axiosConfig = {
|
|
46
|
+
baseURL: this.config.baseURL,
|
|
47
|
+
headers,
|
|
48
|
+
};
|
|
49
|
+
if (this.config.timeout !== undefined) {
|
|
50
|
+
axiosConfig.timeout = this.config.timeout;
|
|
51
|
+
}
|
|
52
|
+
return axios_1.default.create(axiosConfig);
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Invia una chat completion al provider AI
|
|
56
|
+
*/
|
|
57
|
+
async chat(messages) {
|
|
58
|
+
try {
|
|
59
|
+
const { endpoint, body } = this.adapter.transformRequest(messages, this.config);
|
|
60
|
+
const response = await this.client.post(endpoint, body);
|
|
61
|
+
return this.adapter.transformResponse(response);
|
|
62
|
+
}
|
|
63
|
+
catch (error) {
|
|
64
|
+
if (axios_1.default.isAxiosError(error)) {
|
|
65
|
+
const errorMessage = error.response?.data?.error?.message ||
|
|
66
|
+
error.response?.data?.message ||
|
|
67
|
+
error.message;
|
|
68
|
+
const axiosError = new Error(`Errore nella richiesta AI: ${errorMessage}`);
|
|
69
|
+
axiosError.name = "AIProviderError";
|
|
70
|
+
axiosError.status = error.response?.status;
|
|
71
|
+
axiosError.data = error.response?.data;
|
|
72
|
+
throw axiosError;
|
|
73
|
+
}
|
|
74
|
+
throw error;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Invia un singolo prompt al provider AI
|
|
79
|
+
*/
|
|
80
|
+
async prompt(prompt, systemMessage) {
|
|
81
|
+
const messages = [];
|
|
82
|
+
if (systemMessage) {
|
|
83
|
+
messages.push({
|
|
84
|
+
role: "system",
|
|
85
|
+
content: systemMessage,
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
messages.push({
|
|
89
|
+
role: "user",
|
|
90
|
+
content: prompt,
|
|
91
|
+
});
|
|
92
|
+
return this.chat(messages);
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Aggiorna la configurazione del provider
|
|
96
|
+
*/
|
|
97
|
+
updateConfig(config) {
|
|
98
|
+
this.config = { ...this.config, ...config };
|
|
99
|
+
// Ricrea il client se la configurazione HTTP è cambiata
|
|
100
|
+
if (config.baseURL || config.apiKey || config.headers || config.timeout) {
|
|
101
|
+
this.client = this.createClient();
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Ottiene la configurazione corrente
|
|
106
|
+
*/
|
|
107
|
+
getConfig() {
|
|
108
|
+
return { ...this.config };
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Ottiene l'adapter corrente
|
|
112
|
+
*/
|
|
113
|
+
getAdapter() {
|
|
114
|
+
return this.adapter;
|
|
115
|
+
}
|
|
116
|
+
/**
|
|
117
|
+
* Cambia l'adapter mantenendo la stessa configurazione
|
|
118
|
+
*/
|
|
119
|
+
setAdapter(adapter) {
|
|
120
|
+
if (typeof adapter === "string") {
|
|
121
|
+
const registeredAdapter = registry_js_1.AI_ADAPTERS[adapter];
|
|
122
|
+
if (!registeredAdapter) {
|
|
123
|
+
throw new Error(`Adapter non trovato: ${adapter}`);
|
|
124
|
+
}
|
|
125
|
+
this.adapter = registeredAdapter;
|
|
126
|
+
}
|
|
127
|
+
else {
|
|
128
|
+
this.adapter = adapter;
|
|
129
|
+
}
|
|
130
|
+
// Ricrea il client con i nuovi header dell'adapter
|
|
131
|
+
this.client = this.createClient();
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
exports.AIProvider = AIProvider;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { AIAdapter, ChatMessage, BaseAIConfig, AIResponse } from '../types.js';
|
|
2
|
+
/**
|
|
3
|
+
* Adapter per Anthropic Claude
|
|
4
|
+
*/
|
|
5
|
+
export declare class AnthropicAdapter implements AIAdapter {
|
|
6
|
+
transformRequest(messages: ChatMessage[], config: BaseAIConfig): {
|
|
7
|
+
endpoint: string;
|
|
8
|
+
body: any;
|
|
9
|
+
};
|
|
10
|
+
transformResponse(response: any): AIResponse;
|
|
11
|
+
configureHeaders(config: BaseAIConfig): Record<string, string>;
|
|
12
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AnthropicAdapter = void 0;
|
|
4
|
+
/**
|
|
5
|
+
* Adapter per Anthropic Claude
|
|
6
|
+
*/
|
|
7
|
+
class AnthropicAdapter {
|
|
8
|
+
transformRequest(messages, config) {
|
|
9
|
+
return {
|
|
10
|
+
endpoint: '/v1/messages',
|
|
11
|
+
body: {
|
|
12
|
+
model: config.model,
|
|
13
|
+
messages: messages.filter(m => m.role !== 'system'),
|
|
14
|
+
system: messages.find(m => m.role === 'system')?.content,
|
|
15
|
+
max_tokens: config.maxTokens || 1024,
|
|
16
|
+
temperature: config.temperature,
|
|
17
|
+
...config.additionalParams
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
transformResponse(response) {
|
|
22
|
+
const data = response.data;
|
|
23
|
+
return {
|
|
24
|
+
content: data.content?.[0]?.text || '',
|
|
25
|
+
model: data.model,
|
|
26
|
+
usage: data.usage ? {
|
|
27
|
+
promptTokens: data.usage.input_tokens,
|
|
28
|
+
completionTokens: data.usage.output_tokens,
|
|
29
|
+
totalTokens: data.usage.input_tokens + data.usage.output_tokens
|
|
30
|
+
} : undefined,
|
|
31
|
+
raw: data
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
configureHeaders(config) {
|
|
35
|
+
return {
|
|
36
|
+
'Content-Type': 'application/json',
|
|
37
|
+
'x-api-key': config.apiKey || '',
|
|
38
|
+
'anthropic-version': '2023-06-01'
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
exports.AnthropicAdapter = AnthropicAdapter;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { AIAdapter, ChatMessage, BaseAIConfig, AIResponse } from '../types.js';
|
|
2
|
+
/**
|
|
3
|
+
* Adapter per Ollama
|
|
4
|
+
*/
|
|
5
|
+
export declare class OllamaAdapter implements AIAdapter {
|
|
6
|
+
transformRequest(messages: ChatMessage[], config: BaseAIConfig): {
|
|
7
|
+
endpoint: string;
|
|
8
|
+
body: {
|
|
9
|
+
model: string;
|
|
10
|
+
messages: ChatMessage[];
|
|
11
|
+
stream: boolean;
|
|
12
|
+
options: any;
|
|
13
|
+
};
|
|
14
|
+
};
|
|
15
|
+
transformResponse(response: any): AIResponse;
|
|
16
|
+
configureHeaders(): Record<string, string>;
|
|
17
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.OllamaAdapter = void 0;
|
|
4
|
+
/**
|
|
5
|
+
* Adapter per Ollama
|
|
6
|
+
*/
|
|
7
|
+
class OllamaAdapter {
|
|
8
|
+
transformRequest(messages, config) {
|
|
9
|
+
return {
|
|
10
|
+
endpoint: '/chat',
|
|
11
|
+
body: {
|
|
12
|
+
model: config.model,
|
|
13
|
+
messages,
|
|
14
|
+
stream: false,
|
|
15
|
+
options: {
|
|
16
|
+
temperature: config.temperature,
|
|
17
|
+
num_predict: config.maxTokens,
|
|
18
|
+
...config.additionalParams
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
transformResponse(response) {
|
|
24
|
+
const data = response.data;
|
|
25
|
+
return {
|
|
26
|
+
content: data.message?.content || '',
|
|
27
|
+
model: data.model,
|
|
28
|
+
usage: data.prompt_eval_count && data.eval_count ? {
|
|
29
|
+
promptTokens: data.prompt_eval_count,
|
|
30
|
+
completionTokens: data.eval_count,
|
|
31
|
+
totalTokens: data.prompt_eval_count + data.eval_count
|
|
32
|
+
} : undefined,
|
|
33
|
+
raw: data
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
configureHeaders() {
|
|
37
|
+
return {
|
|
38
|
+
'Content-Type': 'application/json'
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
exports.OllamaAdapter = OllamaAdapter;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { AIAdapter, ChatMessage, BaseAIConfig, AIResponse } from '../types.js';
|
|
2
|
+
/**
|
|
3
|
+
* Adapter per OpenAI e provider compatibili (Azure OpenAI, ecc.)
|
|
4
|
+
*/
|
|
5
|
+
export declare class OpenAIAdapter implements AIAdapter {
|
|
6
|
+
transformRequest(messages: ChatMessage[], config: BaseAIConfig): {
|
|
7
|
+
endpoint: string;
|
|
8
|
+
body: any;
|
|
9
|
+
};
|
|
10
|
+
transformResponse(response: any): AIResponse;
|
|
11
|
+
configureHeaders(config: BaseAIConfig): Record<string, string>;
|
|
12
|
+
}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.OpenAIAdapter = void 0;
|
|
4
|
+
/**
|
|
5
|
+
* Adapter per OpenAI e provider compatibili (Azure OpenAI, ecc.)
|
|
6
|
+
*/
|
|
7
|
+
class OpenAIAdapter {
|
|
8
|
+
transformRequest(messages, config) {
|
|
9
|
+
const body = {
|
|
10
|
+
model: config.model,
|
|
11
|
+
messages,
|
|
12
|
+
// temperature: config.temperature,
|
|
13
|
+
// ...config.additionalParams
|
|
14
|
+
};
|
|
15
|
+
// Usa max_completion_tokens per i nuovi modelli, max_tokens per i vecchi
|
|
16
|
+
// if (config.maxTokens) {
|
|
17
|
+
// if (config.model?.includes('gpt-4') || config.model?.includes('gpt-5')) {
|
|
18
|
+
// body.max_completion_tokens = config.maxTokens;
|
|
19
|
+
// } else {
|
|
20
|
+
// body.max_tokens = config.maxTokens;
|
|
21
|
+
// }
|
|
22
|
+
// }
|
|
23
|
+
return {
|
|
24
|
+
endpoint: '/chat/completions',
|
|
25
|
+
body
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
transformResponse(response) {
|
|
29
|
+
const data = response.data;
|
|
30
|
+
return {
|
|
31
|
+
content: data.choices?.[0]?.message?.content || '',
|
|
32
|
+
model: data.model,
|
|
33
|
+
usage: data.usage ? {
|
|
34
|
+
promptTokens: data.usage.prompt_tokens,
|
|
35
|
+
completionTokens: data.usage.completion_tokens,
|
|
36
|
+
totalTokens: data.usage.total_tokens
|
|
37
|
+
} : undefined,
|
|
38
|
+
raw: data
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
configureHeaders(config) {
|
|
42
|
+
const headers = {
|
|
43
|
+
'Content-Type': 'application/json'
|
|
44
|
+
};
|
|
45
|
+
if (config.apiKey) {
|
|
46
|
+
headers['Authorization'] = `Bearer ${config.apiKey}`;
|
|
47
|
+
}
|
|
48
|
+
return headers;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
exports.OpenAIAdapter = OpenAIAdapter;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AnthropicAdapter = exports.OllamaAdapter = exports.OpenAIAdapter = void 0;
|
|
4
|
+
var OpenAIAdapter_js_1 = require("./OpenAIAdapter.js");
|
|
5
|
+
Object.defineProperty(exports, "OpenAIAdapter", { enumerable: true, get: function () { return OpenAIAdapter_js_1.OpenAIAdapter; } });
|
|
6
|
+
var OllamaAdapter_js_1 = require("./OllamaAdapter.js");
|
|
7
|
+
Object.defineProperty(exports, "OllamaAdapter", { enumerable: true, get: function () { return OllamaAdapter_js_1.OllamaAdapter; } });
|
|
8
|
+
var AnthropicAdapter_js_1 = require("./AnthropicAdapter.js");
|
|
9
|
+
Object.defineProperty(exports, "AnthropicAdapter", { enumerable: true, get: function () { return AnthropicAdapter_js_1.AnthropicAdapter; } });
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { AIProvider } from './AIProvider.js';
|
|
2
|
+
import type { BaseAIConfig, AIAdapter } from './types.js';
|
|
3
|
+
/**
|
|
4
|
+
* Factory helper per creare provider comuni
|
|
5
|
+
*/
|
|
6
|
+
export declare const createAIProvider: {
|
|
7
|
+
openai: (apiKey: string, model?: string, config?: Partial<BaseAIConfig>) => AIProvider;
|
|
8
|
+
ollama: (model?: string, baseURL?: string, config?: Partial<BaseAIConfig>) => AIProvider;
|
|
9
|
+
anthropic: (apiKey: string, model?: string, config?: Partial<BaseAIConfig>) => AIProvider;
|
|
10
|
+
azureOpenAI: (apiKey: string, endpoint: string, deployment: string, config?: Partial<BaseAIConfig>) => AIProvider;
|
|
11
|
+
custom: (baseURL: string, adapter: AIAdapter, config?: Partial<BaseAIConfig>) => AIProvider;
|
|
12
|
+
};
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createAIProvider = void 0;
|
|
4
|
+
const AIProvider_js_1 = require("./AIProvider.js");
|
|
5
|
+
/**
|
|
6
|
+
* Factory helper per creare provider comuni
|
|
7
|
+
*/
|
|
8
|
+
exports.createAIProvider = {
|
|
9
|
+
openai: (apiKey, model = 'gpt-4', config) => new AIProvider_js_1.AIProvider({
|
|
10
|
+
baseURL: 'https://api.openai.com/v1',
|
|
11
|
+
apiKey,
|
|
12
|
+
model,
|
|
13
|
+
...config
|
|
14
|
+
}, 'openai'),
|
|
15
|
+
ollama: (model = 'llama2', baseURL = 'http://localhost:11434/api', config) => new AIProvider_js_1.AIProvider({
|
|
16
|
+
baseURL,
|
|
17
|
+
model,
|
|
18
|
+
...config
|
|
19
|
+
}, 'ollama'),
|
|
20
|
+
anthropic: (apiKey, model = 'claude-3-5-sonnet-20241022', config) => new AIProvider_js_1.AIProvider({
|
|
21
|
+
baseURL: 'https://api.anthropic.com',
|
|
22
|
+
apiKey,
|
|
23
|
+
model,
|
|
24
|
+
...config
|
|
25
|
+
}, 'anthropic'),
|
|
26
|
+
azureOpenAI: (apiKey, endpoint, deployment, config) => new AIProvider_js_1.AIProvider({
|
|
27
|
+
baseURL: `${endpoint}/openai/deployments/${deployment}`,
|
|
28
|
+
apiKey,
|
|
29
|
+
model: deployment,
|
|
30
|
+
headers: {
|
|
31
|
+
'api-key': apiKey
|
|
32
|
+
},
|
|
33
|
+
...config
|
|
34
|
+
}, 'azure'),
|
|
35
|
+
custom: (baseURL, adapter, config) => new AIProvider_js_1.AIProvider({
|
|
36
|
+
baseURL,
|
|
37
|
+
model: config?.model || 'default',
|
|
38
|
+
...config
|
|
39
|
+
}, adapter)
|
|
40
|
+
};
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
exports.AI_ADAPTERS = exports.createAIProvider = exports.AIProvider = void 0;
|
|
18
|
+
var AIProvider_js_1 = require("./AIProvider.js");
|
|
19
|
+
Object.defineProperty(exports, "AIProvider", { enumerable: true, get: function () { return AIProvider_js_1.AIProvider; } });
|
|
20
|
+
var factory_js_1 = require("./factory.js");
|
|
21
|
+
Object.defineProperty(exports, "createAIProvider", { enumerable: true, get: function () { return factory_js_1.createAIProvider; } });
|
|
22
|
+
var registry_js_1 = require("./registry.js");
|
|
23
|
+
Object.defineProperty(exports, "AI_ADAPTERS", { enumerable: true, get: function () { return registry_js_1.AI_ADAPTERS; } });
|
|
24
|
+
__exportStar(require("./types.js"), exports);
|
|
25
|
+
__exportStar(require("./adapters/index.js"), exports);
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { OpenAIAdapter } from './adapters/OpenAIAdapter.js';
|
|
2
|
+
import { OllamaAdapter } from './adapters/OllamaAdapter.js';
|
|
3
|
+
import { AnthropicAdapter } from './adapters/AnthropicAdapter.js';
|
|
4
|
+
/**
|
|
5
|
+
* Registry degli adapter disponibili
|
|
6
|
+
*/
|
|
7
|
+
export declare const AI_ADAPTERS: {
|
|
8
|
+
readonly openai: OpenAIAdapter;
|
|
9
|
+
readonly ollama: OllamaAdapter;
|
|
10
|
+
readonly anthropic: AnthropicAdapter;
|
|
11
|
+
readonly azure: OpenAIAdapter;
|
|
12
|
+
readonly 'azure-openai': OpenAIAdapter;
|
|
13
|
+
};
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AI_ADAPTERS = void 0;
|
|
4
|
+
const OpenAIAdapter_js_1 = require("./adapters/OpenAIAdapter.js");
|
|
5
|
+
const OllamaAdapter_js_1 = require("./adapters/OllamaAdapter.js");
|
|
6
|
+
const AnthropicAdapter_js_1 = require("./adapters/AnthropicAdapter.js");
|
|
7
|
+
/**
|
|
8
|
+
* Registry degli adapter disponibili
|
|
9
|
+
*/
|
|
10
|
+
exports.AI_ADAPTERS = {
|
|
11
|
+
openai: new OpenAIAdapter_js_1.OpenAIAdapter(),
|
|
12
|
+
ollama: new OllamaAdapter_js_1.OllamaAdapter(),
|
|
13
|
+
anthropic: new AnthropicAdapter_js_1.AnthropicAdapter(),
|
|
14
|
+
// Alias per compatibilità
|
|
15
|
+
azure: new OpenAIAdapter_js_1.OpenAIAdapter(),
|
|
16
|
+
'azure-openai': new OpenAIAdapter_js_1.OpenAIAdapter()
|
|
17
|
+
};
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Messaggio nel formato chat
|
|
3
|
+
*/
|
|
4
|
+
export interface ChatMessage {
|
|
5
|
+
role: string;
|
|
6
|
+
content: string;
|
|
7
|
+
name?: string;
|
|
8
|
+
}
|
|
9
|
+
/**
|
|
10
|
+
* Risposta dall'AI
|
|
11
|
+
*/
|
|
12
|
+
export interface AIResponse {
|
|
13
|
+
content: string;
|
|
14
|
+
model?: string;
|
|
15
|
+
usage?: {
|
|
16
|
+
promptTokens: number;
|
|
17
|
+
completionTokens: number;
|
|
18
|
+
totalTokens: number;
|
|
19
|
+
} | undefined;
|
|
20
|
+
raw?: any;
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Configurazione base per il provider AI
|
|
24
|
+
*/
|
|
25
|
+
export interface BaseAIConfig {
|
|
26
|
+
baseURL: string;
|
|
27
|
+
model: string;
|
|
28
|
+
apiKey?: string;
|
|
29
|
+
headers?: Record<string, string>;
|
|
30
|
+
temperature?: number;
|
|
31
|
+
maxTokens?: number;
|
|
32
|
+
timeout?: number;
|
|
33
|
+
[key: string]: any;
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Adapter per trasformare richieste e risposte per provider specifici
|
|
37
|
+
*/
|
|
38
|
+
export interface AIAdapter {
|
|
39
|
+
/**
|
|
40
|
+
* Trasforma i messaggi nel formato richiesto dal provider
|
|
41
|
+
*/
|
|
42
|
+
transformRequest(messages: ChatMessage[], config: BaseAIConfig): {
|
|
43
|
+
endpoint: string;
|
|
44
|
+
body: any;
|
|
45
|
+
};
|
|
46
|
+
/**
|
|
47
|
+
* Trasforma la risposta del provider nel formato standard
|
|
48
|
+
*/
|
|
49
|
+
transformResponse(response: any): AIResponse;
|
|
50
|
+
/**
|
|
51
|
+
* Configura gli header HTTP specifici del provider
|
|
52
|
+
*/
|
|
53
|
+
configureHeaders?(config: BaseAIConfig): Record<string, string>;
|
|
54
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import type { PDFExtractionOptions, PDFExtractionResult } from './types.js';
|
|
2
|
+
/**
|
|
3
|
+
* Servizio per estrarre testo dai file PDF
|
|
4
|
+
*/
|
|
5
|
+
export declare class PDFParserService {
|
|
6
|
+
/**
|
|
7
|
+
* Estrae il testo da un file PDF
|
|
8
|
+
* @param pdfPath Percorso del file PDF o Buffer
|
|
9
|
+
* @param options Opzioni di estrazione
|
|
10
|
+
*/
|
|
11
|
+
extractText(pdfPath: string | Buffer, options?: PDFExtractionOptions): Promise<PDFExtractionResult>;
|
|
12
|
+
/**
|
|
13
|
+
* Estrae il testo da più PDF contemporaneamente
|
|
14
|
+
* @param pdfPaths Array di percorsi PDF
|
|
15
|
+
* @param options Opzioni di estrazione
|
|
16
|
+
*/
|
|
17
|
+
extractTextFromMultiple(pdfPaths: (string | Buffer)[], options?: PDFExtractionOptions): Promise<PDFExtractionResult[]>;
|
|
18
|
+
/**
|
|
19
|
+
* Verifica se un file è un PDF valido
|
|
20
|
+
* @param pdfPath Percorso del file PDF o Buffer
|
|
21
|
+
*/
|
|
22
|
+
isValidPDF(pdfPath: string | Buffer): Promise<boolean>;
|
|
23
|
+
/**
|
|
24
|
+
* Crea un renderer personalizzato per le pagine
|
|
25
|
+
*/
|
|
26
|
+
private createPageRenderer;
|
|
27
|
+
/**
|
|
28
|
+
* Estrae i metadati dal PDF
|
|
29
|
+
*/
|
|
30
|
+
private extractMetadata;
|
|
31
|
+
/**
|
|
32
|
+
* Estrae solo testo da una specifica pagina
|
|
33
|
+
* @param pdfPath Percorso del file PDF o Buffer
|
|
34
|
+
* @param pageNumber Numero della pagina (1-based)
|
|
35
|
+
*/
|
|
36
|
+
extractPageText(pdfPath: string | Buffer, pageNumber: number): Promise<string>;
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Istanza singleton del parser
|
|
40
|
+
*/
|
|
41
|
+
export declare const pdfParser: PDFParserService;
|