promptfoo 0.17.3 → 0.17.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/README.md +3 -2
  2. package/dist/package.json +2 -2
  3. package/dist/src/assertions.d.ts.map +1 -1
  4. package/dist/src/assertions.js +14 -2
  5. package/dist/src/assertions.js.map +1 -1
  6. package/dist/src/index.d.ts.map +1 -1
  7. package/dist/src/index.js +1 -0
  8. package/dist/src/index.js.map +1 -1
  9. package/dist/src/main.js +70 -39
  10. package/dist/src/main.js.map +1 -1
  11. package/dist/src/providers/anthropic.js.map +1 -1
  12. package/dist/src/providers/azureopenai.d.ts +34 -0
  13. package/dist/src/providers/azureopenai.d.ts.map +1 -0
  14. package/dist/src/providers/azureopenai.js +234 -0
  15. package/dist/src/providers/azureopenai.js.map +1 -0
  16. package/dist/src/providers/openai.d.ts.map +1 -1
  17. package/dist/src/providers/openai.js +23 -9
  18. package/dist/src/providers/openai.js.map +1 -1
  19. package/dist/src/providers.d.ts.map +1 -1
  20. package/dist/src/providers.js +16 -0
  21. package/dist/src/providers.js.map +1 -1
  22. package/dist/src/telemetry.d.ts +1 -0
  23. package/dist/src/telemetry.d.ts.map +1 -1
  24. package/dist/src/telemetry.js +7 -0
  25. package/dist/src/telemetry.js.map +1 -1
  26. package/dist/src/types.d.ts +2 -1
  27. package/dist/src/types.d.ts.map +1 -1
  28. package/dist/src/updates.d.ts.map +1 -1
  29. package/dist/src/updates.js +10 -1
  30. package/dist/src/updates.js.map +1 -1
  31. package/dist/src/util.d.ts +3 -0
  32. package/dist/src/util.d.ts.map +1 -1
  33. package/dist/src/util.js +40 -1
  34. package/dist/src/util.js.map +1 -1
  35. package/dist/src/web/client/assets/{index-58a0e3e3.js → index-c2756e5d.js} +1 -1
  36. package/dist/src/web/client/index.html +1 -1
  37. package/package.json +2 -2
  38. package/src/assertions.ts +18 -2
  39. package/src/index.ts +1 -0
  40. package/src/main.ts +88 -40
  41. package/src/providers/anthropic.ts +1 -1
  42. package/src/providers/azureopenai.ts +280 -0
  43. package/src/providers/openai.ts +27 -11
  44. package/src/providers.ts +19 -0
  45. package/src/telemetry.ts +14 -1
  46. package/src/types.ts +4 -0
  47. package/src/updates.ts +10 -1
  48. package/src/util.ts +40 -0
  49. package/src/web/client/package-lock.json +5726 -0
  50. package/src/web/client/src/ResultsView.tsx +12 -10
@@ -0,0 +1,280 @@
1
+ import logger from '../logger';
2
+ import { fetchJsonWithCache } from '../cache';
3
+ import { REQUEST_TIMEOUT_MS } from './shared';
4
+
5
+ import type { ApiProvider, ProviderEmbeddingResponse, ProviderResponse } from '../types.js';
6
+
7
+ interface AzureOpenAiCompletionOptions {
8
+ temperature?: number;
9
+ functions?: {
10
+ name: string;
11
+ description?: string;
12
+ parameters: any;
13
+ }[];
14
+ function_call?: 'none' | 'auto';
15
+ }
16
+
17
+ class AzureOpenAiGenericProvider implements ApiProvider {
18
+ deploymentName: string;
19
+ apiKey?: string;
20
+ apiHost?: string;
21
+
22
+ constructor(deploymentName: string, apiKey?: string) {
23
+ this.deploymentName = deploymentName;
24
+
25
+ this.apiKey = apiKey || process.env.AZURE_OPENAI_API_KEY;
26
+
27
+ this.apiHost = process.env.AZURE_OPENAI_API_HOST;
28
+ }
29
+
30
+ id(): string {
31
+ return `azureopenai:${this.deploymentName}`;
32
+ }
33
+
34
+ toString(): string {
35
+ return `[Azure OpenAI Provider ${this.deploymentName}]`;
36
+ }
37
+
38
+ // @ts-ignore: Prompt is not used in this implementation
39
+ async callApi(prompt: string, options?: AzureOpenAiCompletionOptions): Promise<ProviderResponse> {
40
+ throw new Error('Not implemented');
41
+ }
42
+ }
43
+
44
+ export class AzureOpenAiEmbeddingProvider extends AzureOpenAiGenericProvider {
45
+ async callEmbeddingApi(text: string): Promise<ProviderEmbeddingResponse> {
46
+ if (!this.apiKey) {
47
+ throw new Error('Azure OpenAI API key must be set for similarity comparison');
48
+ }
49
+ if (!this.apiHost) {
50
+ throw new Error('Azure OpenAI API host must be set');
51
+ }
52
+
53
+ const body = {
54
+ input: text,
55
+ model: this.deploymentName,
56
+ };
57
+ let data,
58
+ cached = false;
59
+ try {
60
+ ({ data, cached } = (await fetchJsonWithCache(
61
+ `https://${this.apiHost}/openai/deployments/${this.deploymentName}/embeddings?api-version=2023-07-01-preview`,
62
+ {
63
+ method: 'POST',
64
+ headers: {
65
+ 'Content-Type': 'application/json',
66
+ 'api-key': this.apiKey,
67
+ },
68
+ body: JSON.stringify(body),
69
+ },
70
+ REQUEST_TIMEOUT_MS,
71
+ )) as unknown as any);
72
+ } catch (err) {
73
+ return {
74
+ error: `API call error: ${String(err)}`,
75
+ tokenUsage: {
76
+ total: 0,
77
+ prompt: 0,
78
+ completion: 0,
79
+ },
80
+ };
81
+ }
82
+ logger.debug(`\tAzure OpenAI API response (embeddings): ${JSON.stringify(data)}`);
83
+
84
+ try {
85
+ const embedding = data?.data?.[0]?.embedding;
86
+ if (!embedding) {
87
+ throw new Error('No embedding returned');
88
+ }
89
+ const ret = {
90
+ embedding,
91
+ tokenUsage: cached
92
+ ? { cached: data.usage.total_tokens }
93
+ : {
94
+ total: data.usage.total_tokens,
95
+ prompt: data.usage.prompt_tokens,
96
+ completion: data.usage.completion_tokens,
97
+ },
98
+ };
99
+ return ret;
100
+ } catch (err) {
101
+ return {
102
+ error: `API response error: ${String(err)}: ${JSON.stringify(data)}`,
103
+ tokenUsage: {
104
+ total: data?.usage?.total_tokens,
105
+ prompt: data?.usage?.prompt_tokens,
106
+ completion: data?.usage?.completion_tokens,
107
+ },
108
+ };
109
+ }
110
+ }
111
+ }
112
+
113
+ export class AzureOpenAiCompletionProvider extends AzureOpenAiGenericProvider {
114
+ options: AzureOpenAiCompletionOptions;
115
+
116
+ constructor(deploymentName: string, apiKey?: string, context?: AzureOpenAiCompletionOptions) {
117
+ super(deploymentName, apiKey);
118
+ this.options = context || {};
119
+ }
120
+
121
+ async callApi(prompt: string, options?: AzureOpenAiCompletionOptions): Promise<ProviderResponse> {
122
+ if (!this.apiKey) {
123
+ throw new Error(
124
+ 'Azure OpenAI API key is not set. Set AZURE_OPENAI_API_KEY environment variable or pass it as an argument to the constructor.',
125
+ );
126
+ }
127
+ if (!this.apiHost) {
128
+ throw new Error('Azure OpenAI API host must be set');
129
+ }
130
+
131
+ let stop: string;
132
+ try {
133
+ stop = process.env.OPENAI_STOP
134
+ ? JSON.parse(process.env.OPENAI_STOP)
135
+ : ['<|im_end|>', '<|endoftext|>'];
136
+ } catch (err) {
137
+ throw new Error(`OPENAI_STOP is not a valid JSON string: ${err}`);
138
+ }
139
+ const body = {
140
+ model: this.deploymentName,
141
+ prompt,
142
+ max_tokens: parseInt(process.env.OPENAI_MAX_TOKENS || '1024'),
143
+ temperature:
144
+ options?.temperature ??
145
+ this.options.temperature ??
146
+ parseFloat(process.env.OPENAI_TEMPERATURE || '0'),
147
+ stop,
148
+ };
149
+ logger.debug(`Calling Azure OpenAI API: ${JSON.stringify(body)}`);
150
+ let data,
151
+ cached = false;
152
+ try {
153
+ ({ data, cached } = (await fetchJsonWithCache(
154
+ `https://${this.apiHost}/openai/deployments/${this.deploymentName}/completions?api-version=2023-07-01-preview`,
155
+ {
156
+ method: 'POST',
157
+ headers: {
158
+ 'Content-Type': 'application/json',
159
+ 'api-key': this.apiKey,
160
+ },
161
+ body: JSON.stringify(body),
162
+ },
163
+ REQUEST_TIMEOUT_MS,
164
+ )) as unknown as any);
165
+ } catch (err) {
166
+ return {
167
+ error: `API call error: ${String(err)}`,
168
+ };
169
+ }
170
+ logger.debug(`\tAzure OpenAI API response: ${JSON.stringify(data)}`);
171
+ try {
172
+ return {
173
+ output: data.choices[0].text,
174
+ tokenUsage: cached
175
+ ? { cached: data.usage.total_tokens }
176
+ : {
177
+ total: data.usage.total_tokens,
178
+ prompt: data.usage.prompt_tokens,
179
+ completion: data.usage.completion_tokens,
180
+ },
181
+ };
182
+ } catch (err) {
183
+ return {
184
+ error: `API response error: ${String(err)}: ${JSON.stringify(data)}`,
185
+ };
186
+ }
187
+ }
188
+ }
189
+
190
+ export class AzureOpenAiChatCompletionProvider extends AzureOpenAiGenericProvider {
191
+ options: AzureOpenAiCompletionOptions;
192
+
193
+ constructor(deploymentName: string, apiKey?: string, context?: AzureOpenAiCompletionOptions) {
194
+ super(deploymentName, apiKey);
195
+ this.options = context || {};
196
+ }
197
+
198
+ async callApi(prompt: string, options?: AzureOpenAiCompletionOptions): Promise<ProviderResponse> {
199
+ if (!this.apiKey) {
200
+ throw new Error(
201
+ 'Azure OpenAI API key is not set. Set AZURE_OPENAI_API_KEY environment variable or pass it as an argument to the constructor.',
202
+ );
203
+ }
204
+ if (!this.apiHost) {
205
+ throw new Error('Azure OpenAI API host must be set');
206
+ }
207
+
208
+ let messages: { role: string; content: string; name?: string }[];
209
+ try {
210
+ messages = JSON.parse(prompt) as { role: string; content: string }[];
211
+ } catch (err) {
212
+ const trimmedPrompt = prompt.trim();
213
+ if (
214
+ process.env.PROMPTFOO_REQUIRE_JSON_PROMPTS ||
215
+ trimmedPrompt.startsWith('{') ||
216
+ trimmedPrompt.startsWith('[')
217
+ ) {
218
+ throw new Error(
219
+ `Azure OpenAI Chat Completion prompt is not a valid JSON string: ${err}\n\n${prompt}`,
220
+ );
221
+ }
222
+ messages = [{ role: 'user', content: prompt }];
223
+ }
224
+
225
+ const body = {
226
+ model: this.deploymentName,
227
+ messages: messages,
228
+ max_tokens: parseInt(process.env.OPENAI_MAX_TOKENS || '1024'),
229
+ temperature:
230
+ options?.temperature ??
231
+ this.options.temperature ??
232
+ parseFloat(process.env.OPENAI_TEMPERATURE || '0'),
233
+ functions: options?.functions || this.options.functions || undefined,
234
+ function_call: options?.function_call || this.options.function_call || undefined,
235
+ };
236
+ logger.debug(`Calling Azure OpenAI API: ${JSON.stringify(body)}`);
237
+
238
+ let data,
239
+ cached = false;
240
+ try {
241
+ ({ data, cached } = (await fetchJsonWithCache(
242
+ `https://${this.apiHost}/openai/deployments/${this.deploymentName}/chat/completions?api-version=2023-07-01-preview`,
243
+ {
244
+ method: 'POST',
245
+ headers: {
246
+ 'Content-Type': 'application/json',
247
+ 'api-key': this.apiKey,
248
+ },
249
+ body: JSON.stringify(body),
250
+ },
251
+ REQUEST_TIMEOUT_MS,
252
+ )) as unknown as any);
253
+ } catch (err) {
254
+ return {
255
+ error: `API call error: ${String(err)}`,
256
+ };
257
+ }
258
+
259
+ logger.debug(`\tAzure OpenAI API response: ${JSON.stringify(data)}`);
260
+ try {
261
+ const message = data.choices[0].message;
262
+ const output =
263
+ message.content === null ? JSON.stringify(message.function_call) : message.content;
264
+ return {
265
+ output,
266
+ tokenUsage: cached
267
+ ? { cached: data.usage.total_tokens }
268
+ : {
269
+ total: data.usage.total_tokens,
270
+ prompt: data.usage.prompt_tokens,
271
+ completion: data.usage.completion_tokens,
272
+ },
273
+ };
274
+ } catch (err) {
275
+ return {
276
+ error: `API response error: ${String(err)}: ${JSON.stringify(data)}`,
277
+ };
278
+ }
279
+ }
280
+ }
@@ -1,3 +1,5 @@
1
+ import yaml from 'js-yaml';
2
+
1
3
  import logger from '../logger';
2
4
  import { fetchJsonWithCache } from '../cache';
3
5
  import { REQUEST_TIMEOUT_MS } from './shared';
@@ -226,20 +228,34 @@ export class OpenAiChatCompletionProvider extends OpenAiGenericProvider {
226
228
  }
227
229
 
228
230
  let messages: { role: string; content: string; name?: string }[];
229
- try {
230
- messages = JSON.parse(prompt) as { role: string; content: string }[];
231
- } catch (err) {
232
- const trimmedPrompt = prompt.trim();
233
- if (
234
- process.env.PROMPTFOO_REQUIRE_JSON_PROMPTS ||
235
- trimmedPrompt.startsWith('{') ||
236
- trimmedPrompt.startsWith('[')
237
- ) {
231
+ const trimmedPrompt = prompt.trim();
232
+ if (trimmedPrompt.startsWith('- role:')) {
233
+ try {
234
+ // Try YAML
235
+ messages = yaml.load(prompt) as { role: string; content: string }[];
236
+ } catch (err) {
238
237
  throw new Error(
239
- `OpenAI Chat Completion prompt is not a valid JSON string: ${err}\n\n${prompt}`,
238
+ `OpenAI Chat Completion prompt is not a valid YAML string: ${err}\n\n${prompt}`,
240
239
  );
241
240
  }
242
- messages = [{ role: 'user', content: prompt }];
241
+ } else {
242
+ try {
243
+ // Try JSON
244
+ messages = JSON.parse(prompt) as { role: string; content: string }[];
245
+ } catch (err) {
246
+ if (
247
+ process.env.PROMPTFOO_REQUIRE_JSON_PROMPTS ||
248
+ trimmedPrompt.startsWith('{') ||
249
+ trimmedPrompt.startsWith('[')
250
+ ) {
251
+ throw new Error(
252
+ `OpenAI Chat Completion prompt is not a valid JSON string: ${err}\n\n${prompt}`,
253
+ );
254
+ }
255
+
256
+ // Fall back to wrapping the prompt in a user message
257
+ messages = [{ role: 'user', content: prompt }];
258
+ }
243
259
  }
244
260
 
245
261
  const body = {
package/src/providers.ts CHANGED
@@ -6,6 +6,10 @@ import { OpenAiCompletionProvider, OpenAiChatCompletionProvider } from './provid
6
6
  import { AnthropicCompletionProvider } from './providers/anthropic';
7
7
  import { LocalAiCompletionProvider, LocalAiChatProvider } from './providers/localai';
8
8
  import { ScriptCompletionProvider } from './providers/scriptCompletion';
9
+ import {
10
+ AzureOpenAiChatCompletionProvider,
11
+ AzureOpenAiCompletionProvider,
12
+ } from './providers/azureopenai';
9
13
 
10
14
  export async function loadApiProviders(
11
15
  providerPaths: ProviderId | ProviderId[] | RawProviderConfig[],
@@ -68,6 +72,21 @@ export async function loadApiProvider(
68
72
  `Unknown OpenAI model type: ${modelType}. Use one of the following providers: openai:chat:<model name>, openai:completion:<model name>`,
69
73
  );
70
74
  }
75
+ } else if (providerPath?.startsWith('azureopenai:')) {
76
+ // Load Azure OpenAI module
77
+ const options = providerPath.split(':');
78
+ const modelType = options[1];
79
+ const deploymentName = options[2];
80
+
81
+ if (modelType === 'chat') {
82
+ return new AzureOpenAiChatCompletionProvider(deploymentName, undefined, context?.config);
83
+ } else if (modelType === 'completion') {
84
+ return new AzureOpenAiCompletionProvider(deploymentName, undefined, context?.config);
85
+ } else {
86
+ throw new Error(
87
+ `Unknown Azure OpenAI model type: ${modelType}. Use one of the following providers: openai:chat:<model name>, openai:completion:<model name>`,
88
+ );
89
+ }
71
90
  } else if (providerPath?.startsWith('anthropic:')) {
72
91
  // Load Anthropic module
73
92
  const options = providerPath.split(':');
package/src/telemetry.ts CHANGED
@@ -1,5 +1,8 @@
1
+ import chalk from 'chalk';
2
+
1
3
  import packageJson from '../package.json';
2
- import { fetchWithTimeout } from './util';
4
+ import logger from './logger';
5
+ import { fetchWithTimeout, maybeRecordFirstRun } from './util';
3
6
 
4
7
  type TelemetryEvent = {
5
8
  event: string;
@@ -30,6 +33,16 @@ export class Telemetry {
30
33
  }
31
34
  }
32
35
 
36
+ maybeShowNotice(): void {
37
+ if (maybeRecordFirstRun()) {
38
+ logger.info(
39
+ chalk.gray(
40
+ 'Anonymous telemetry is enabled. For more info, see https://www.promptfoo.dev/docs/configuration/telemetry',
41
+ ),
42
+ );
43
+ }
44
+ }
45
+
33
46
  async send(): Promise<void> {
34
47
  if (!this.disabled && this.events.length > 0) {
35
48
  try {
package/src/types.ts CHANGED
@@ -136,6 +136,7 @@ type BaseAssertionTypes =
136
136
  | 'icontains'
137
137
  | 'contains-all'
138
138
  | 'contains-any'
139
+ | 'starts-with'
139
140
  | 'regex'
140
141
  | 'is-json'
141
142
  | 'contains-json'
@@ -230,6 +231,9 @@ export interface TestSuiteConfig {
230
231
 
231
232
  // Path to write output. Writes to console/web viewer if not set.
232
233
  outputPath?: string;
234
+
235
+ // Determines whether or not sharing is enabled.
236
+ sharing?: boolean;
233
237
  }
234
238
 
235
239
  export type UnifiedConfig = TestSuiteConfig & {
package/src/updates.ts CHANGED
@@ -17,7 +17,16 @@ export async function getLatestVersion(packageName: string) {
17
17
  }
18
18
 
19
19
  export async function checkForUpdates(): Promise<boolean> {
20
- const latestVersion = await getLatestVersion('promptfoo');
20
+ if (process.env.PROMPTFOO_DISABLE_UPDATE) {
21
+ return false;
22
+ }
23
+
24
+ let latestVersion: string;
25
+ try {
26
+ latestVersion = await getLatestVersion('promptfoo');
27
+ } catch {
28
+ return false;
29
+ }
21
30
  if (semverGt(latestVersion, VERSION)) {
22
31
  const border = '='.repeat(process.stdout.columns - 10);
23
32
  logger.info(
package/src/util.ts CHANGED
@@ -37,6 +37,46 @@ function parseJson(json: string): any | undefined {
37
37
  }
38
38
  }
39
39
 
40
+ let globalConfigCache: any = null;
41
+
42
+ export function resetGlobalConfig(): void {
43
+ globalConfigCache = null;
44
+ }
45
+
46
+ export function readGlobalConfig(): any {
47
+ if (!globalConfigCache) {
48
+ const configDir = getConfigDirectoryPath();
49
+ const configFilePath = path.join(configDir, 'promptfoo.yaml');
50
+
51
+ if (fs.existsSync(configFilePath)) {
52
+ globalConfigCache = yaml.load(fs.readFileSync(configFilePath, 'utf-8'));
53
+ } else {
54
+ if (!fs.existsSync(configDir)) {
55
+ fs.mkdirSync(configDir, { recursive: true });
56
+ }
57
+ globalConfigCache = { hasRun: false };
58
+ fs.writeFileSync(configFilePath, yaml.dump(globalConfigCache));
59
+ }
60
+ }
61
+
62
+ return globalConfigCache;
63
+ }
64
+
65
+ export function maybeRecordFirstRun(): boolean {
66
+ // Return true if first run
67
+ try {
68
+ const config = readGlobalConfig();
69
+ if (!config.hasRun) {
70
+ config.hasRun = true;
71
+ fs.writeFileSync(path.join(getConfigDirectoryPath(), 'promptfoo.yaml'), yaml.dump(config));
72
+ return true;
73
+ }
74
+ return false;
75
+ } catch (err) {
76
+ return false;
77
+ }
78
+ }
79
+
40
80
  export async function maybeReadConfig(configPath: string): Promise<UnifiedConfig | undefined> {
41
81
  if (!fs.existsSync(configPath)) {
42
82
  return undefined;