opennote-cli 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/LICENSE +22 -0
  2. package/README.md +272 -0
  3. package/package.json +30 -0
  4. package/src/arrangement.js +282 -0
  5. package/src/arrangement.ts +317 -0
  6. package/src/assets/cover.png +0 -0
  7. package/src/cli.js +1398 -0
  8. package/src/cli.ts +1709 -0
  9. package/src/exportAudio.js +459 -0
  10. package/src/exportAudio.ts +499 -0
  11. package/src/exportMidi.js +85 -0
  12. package/src/exportMidi.ts +103 -0
  13. package/src/ffmpeg-static.d.ts +5 -0
  14. package/src/fx.js +28 -0
  15. package/src/fx.ts +49 -0
  16. package/src/generator.js +15 -0
  17. package/src/generator.ts +29 -0
  18. package/src/index.js +511 -0
  19. package/src/index.ts +642 -0
  20. package/src/instrument.js +35 -0
  21. package/src/instrument.ts +51 -0
  22. package/src/midi.js +167 -0
  23. package/src/midi.ts +218 -0
  24. package/src/openExport.js +22 -0
  25. package/src/openExport.ts +24 -0
  26. package/src/prompt.js +22 -0
  27. package/src/prompt.ts +25 -0
  28. package/src/providers/auth.js +23 -0
  29. package/src/providers/auth.ts +30 -0
  30. package/src/providers/claudeProvider.js +46 -0
  31. package/src/providers/claudeProvider.ts +50 -0
  32. package/src/providers/factory.js +39 -0
  33. package/src/providers/factory.ts +43 -0
  34. package/src/providers/geminiProvider.js +55 -0
  35. package/src/providers/geminiProvider.ts +71 -0
  36. package/src/providers/grokProvider.js +57 -0
  37. package/src/providers/grokProvider.ts +69 -0
  38. package/src/providers/groqProvider.js +57 -0
  39. package/src/providers/groqProvider.ts +69 -0
  40. package/src/providers/mockProvider.js +13 -0
  41. package/src/providers/mockProvider.ts +15 -0
  42. package/src/providers/openaiProvider.js +45 -0
  43. package/src/providers/openaiProvider.ts +49 -0
  44. package/src/providers/retry.js +46 -0
  45. package/src/providers/retry.ts +54 -0
  46. package/src/types.js +1 -0
  47. package/src/types.ts +17 -0
  48. package/src/validate.js +10 -0
  49. package/src/validate.ts +13 -0
  50. package/tsconfig.json +13 -0
@@ -0,0 +1,39 @@
1
+ import { ClaudeProvider } from './claudeProvider';
2
+ import { GeminiProvider } from './geminiProvider';
3
+ import { GrokProvider } from './grokProvider';
4
+ import { GroqProvider } from './groqProvider';
5
+ import { MockProvider } from './mockProvider';
6
+ import { OpenAIProvider } from './openaiProvider';
7
+ export function buildProvider(name) {
8
+ if (name === 'mock') {
9
+ return new MockProvider();
10
+ }
11
+ if (name === 'openai') {
12
+ const apiKey = process.env.OPENAI_API_KEY;
13
+ if (!apiKey)
14
+ throw new Error('Missing OPENAI_API_KEY');
15
+ return new OpenAIProvider(apiKey, process.env.OPENAI_MODEL || 'gpt-4.1-mini');
16
+ }
17
+ if (name === 'gemini') {
18
+ const apiKey = process.env.GEMINI_API_KEY;
19
+ if (!apiKey)
20
+ throw new Error('Missing GEMINI_API_KEY');
21
+ return new GeminiProvider(apiKey, process.env.GEMINI_MODEL || 'gemini-2.0-flash');
22
+ }
23
+ if (name === 'groq') {
24
+ const apiKey = process.env.GROQ_API_KEY;
25
+ if (!apiKey)
26
+ throw new Error('Missing GROQ_API_KEY');
27
+ return new GroqProvider(apiKey, process.env.GROQ_MODEL || 'llama-3.3-70b-versatile');
28
+ }
29
+ if (name === 'grok') {
30
+ const apiKey = process.env.XAI_API_KEY;
31
+ if (!apiKey)
32
+ throw new Error('Missing XAI_API_KEY');
33
+ return new GrokProvider(apiKey, process.env.GROK_MODEL || 'grok-2-latest');
34
+ }
35
+ const apiKey = process.env.ANTHROPIC_API_KEY;
36
+ if (!apiKey)
37
+ throw new Error('Missing ANTHROPIC_API_KEY');
38
+ return new ClaudeProvider(apiKey, process.env.CLAUDE_MODEL || 'claude-3-5-sonnet-latest');
39
+ }
@@ -0,0 +1,43 @@
1
+ import type { LLMProvider } from '../types';
2
+ import { ClaudeProvider } from './claudeProvider';
3
+ import { GeminiProvider } from './geminiProvider';
4
+ import { GrokProvider } from './grokProvider';
5
+ import { GroqProvider } from './groqProvider';
6
+ import { MockProvider } from './mockProvider';
7
+ import { OpenAIProvider } from './openaiProvider';
8
+
9
+ export type ProviderName = 'mock' | 'openai' | 'gemini' | 'claude' | 'groq' | 'grok';
10
+
11
+ export function buildProvider(name: ProviderName): LLMProvider {
12
+ if (name === 'mock') {
13
+ return new MockProvider();
14
+ }
15
+
16
+ if (name === 'openai') {
17
+ const apiKey = process.env.OPENAI_API_KEY;
18
+ if (!apiKey) throw new Error('Missing OPENAI_API_KEY');
19
+ return new OpenAIProvider(apiKey, process.env.OPENAI_MODEL || 'gpt-4.1-mini');
20
+ }
21
+
22
+ if (name === 'gemini') {
23
+ const apiKey = process.env.GEMINI_API_KEY;
24
+ if (!apiKey) throw new Error('Missing GEMINI_API_KEY');
25
+ return new GeminiProvider(apiKey, process.env.GEMINI_MODEL || 'gemini-2.0-flash');
26
+ }
27
+
28
+ if (name === 'groq') {
29
+ const apiKey = process.env.GROQ_API_KEY;
30
+ if (!apiKey) throw new Error('Missing GROQ_API_KEY');
31
+ return new GroqProvider(apiKey, process.env.GROQ_MODEL || 'llama-3.3-70b-versatile');
32
+ }
33
+
34
+ if (name === 'grok') {
35
+ const apiKey = process.env.XAI_API_KEY;
36
+ if (!apiKey) throw new Error('Missing XAI_API_KEY');
37
+ return new GrokProvider(apiKey, process.env.GROK_MODEL || 'grok-2-latest');
38
+ }
39
+
40
+ const apiKey = process.env.ANTHROPIC_API_KEY;
41
+ if (!apiKey) throw new Error('Missing ANTHROPIC_API_KEY');
42
+ return new ClaudeProvider(apiKey, process.env.CLAUDE_MODEL || 'claude-3-5-sonnet-latest');
43
+ }
@@ -0,0 +1,55 @@
1
+ import { assertHeaderSafeApiKey } from './auth';
2
+ import { systemPrompt, userPrompt } from '../prompt';
3
+ import { computeRetryDelayMs, shouldRetryStatus, waitForRetry } from './retry';
4
+ function extractText(json) {
5
+ const parts = json.candidates?.[0]?.content?.parts ?? [];
6
+ return parts
7
+ .map((p) => (typeof p.text === 'string' ? p.text : ''))
8
+ .join('')
9
+ .trim();
10
+ }
11
+ export class GeminiProvider {
12
+ apiKey;
13
+ model;
14
+ constructor(apiKey, model = 'gemini-2.0-flash') {
15
+ this.apiKey = apiKey;
16
+ this.model = model;
17
+ }
18
+ async nextNote(input) {
19
+ const apiKey = assertHeaderSafeApiKey('Gemini', this.apiKey);
20
+ const maxRetries = 4;
21
+ for (let attempt = 0;; attempt++) {
22
+ const res = await fetch(`https://generativelanguage.googleapis.com/v1beta/models/${encodeURIComponent(this.model)}:generateContent?key=${encodeURIComponent(apiKey)}`, {
23
+ method: 'POST',
24
+ headers: {
25
+ 'Content-Type': 'application/json',
26
+ },
27
+ body: JSON.stringify({
28
+ systemInstruction: {
29
+ parts: [{ text: systemPrompt() }],
30
+ },
31
+ contents: [
32
+ {
33
+ role: 'user',
34
+ parts: [{ text: userPrompt(input) }],
35
+ },
36
+ ],
37
+ }),
38
+ });
39
+ if (!res.ok) {
40
+ const text = await res.text();
41
+ if (attempt < maxRetries && shouldRetryStatus(res.status)) {
42
+ const delayMs = computeRetryDelayMs(res.headers.get('retry-after'), text, attempt);
43
+ await waitForRetry(delayMs);
44
+ continue;
45
+ }
46
+ throw new Error(`Gemini request failed: ${res.status} ${text}`);
47
+ }
48
+ const json = (await res.json());
49
+ const text = extractText(json);
50
+ if (!text)
51
+ throw new Error('Gemini response missing text content');
52
+ return JSON.parse(text);
53
+ }
54
+ }
55
+ }
@@ -0,0 +1,71 @@
1
+ import type { GeneratedNote, LLMProvider, NextNoteRequest } from '../types';
2
+ import { assertHeaderSafeApiKey } from './auth';
3
+ import { systemPrompt, userPrompt } from '../prompt';
4
+ import { computeRetryDelayMs, shouldRetryStatus, waitForRetry } from './retry';
5
+
6
+ type GeminiResponse = {
7
+ candidates?: Array<{
8
+ content?: {
9
+ parts?: Array<{ text?: string }>;
10
+ };
11
+ }>;
12
+ };
13
+
14
+ function extractText(json: GeminiResponse): string {
15
+ const parts = json.candidates?.[0]?.content?.parts ?? [];
16
+ return parts
17
+ .map((p) => (typeof p.text === 'string' ? p.text : ''))
18
+ .join('')
19
+ .trim();
20
+ }
21
+
22
+ export class GeminiProvider implements LLMProvider {
23
+ constructor(
24
+ private readonly apiKey: string,
25
+ private readonly model = 'gemini-2.0-flash',
26
+ ) {}
27
+
28
+ async nextNote(input: NextNoteRequest): Promise<GeneratedNote> {
29
+ const apiKey = assertHeaderSafeApiKey('Gemini', this.apiKey);
30
+ const maxRetries = 4;
31
+
32
+ for (let attempt = 0; ; attempt++) {
33
+ const res = await fetch(
34
+ `https://generativelanguage.googleapis.com/v1beta/models/${encodeURIComponent(this.model)}:generateContent?key=${encodeURIComponent(apiKey)}`,
35
+ {
36
+ method: 'POST',
37
+ headers: {
38
+ 'Content-Type': 'application/json',
39
+ },
40
+ body: JSON.stringify({
41
+ systemInstruction: {
42
+ parts: [{ text: systemPrompt() }],
43
+ },
44
+ contents: [
45
+ {
46
+ role: 'user',
47
+ parts: [{ text: userPrompt(input) }],
48
+ },
49
+ ],
50
+ }),
51
+ },
52
+ );
53
+
54
+ if (!res.ok) {
55
+ const text = await res.text();
56
+ if (attempt < maxRetries && shouldRetryStatus(res.status)) {
57
+ const delayMs = computeRetryDelayMs(res.headers.get('retry-after'), text, attempt);
58
+ await waitForRetry(delayMs);
59
+ continue;
60
+ }
61
+ throw new Error(`Gemini request failed: ${res.status} ${text}`);
62
+ }
63
+
64
+ const json = (await res.json()) as GeminiResponse;
65
+ const text = extractText(json);
66
+ if (!text) throw new Error('Gemini response missing text content');
67
+ return JSON.parse(text) as GeneratedNote;
68
+ }
69
+ }
70
+ }
71
+
@@ -0,0 +1,57 @@
1
+ import { assertHeaderSafeApiKey } from './auth';
2
+ import { systemPrompt, userPrompt } from '../prompt';
3
+ import { computeRetryDelayMs, shouldRetryStatus, waitForRetry } from './retry';
4
+ function extractContent(content) {
5
+ if (typeof content === 'string')
6
+ return content;
7
+ if (Array.isArray(content)) {
8
+ return content
9
+ .map((c) => (typeof c?.text === 'string' ? c.text : ''))
10
+ .join('')
11
+ .trim();
12
+ }
13
+ return '';
14
+ }
15
+ export class GrokProvider {
16
+ apiKey;
17
+ model;
18
+ constructor(apiKey, model = 'grok-2-latest') {
19
+ this.apiKey = apiKey;
20
+ this.model = model;
21
+ }
22
+ async nextNote(input) {
23
+ const apiKey = assertHeaderSafeApiKey('Grok', this.apiKey);
24
+ const maxRetries = 4;
25
+ for (let attempt = 0;; attempt++) {
26
+ const res = await fetch('https://api.x.ai/v1/chat/completions', {
27
+ method: 'POST',
28
+ headers: {
29
+ Authorization: `Bearer ${apiKey}`,
30
+ 'Content-Type': 'application/json',
31
+ },
32
+ body: JSON.stringify({
33
+ model: this.model,
34
+ temperature: 0.7,
35
+ messages: [
36
+ { role: 'system', content: systemPrompt() },
37
+ { role: 'user', content: userPrompt(input) },
38
+ ],
39
+ }),
40
+ });
41
+ if (!res.ok) {
42
+ const text = await res.text();
43
+ if (attempt < maxRetries && shouldRetryStatus(res.status)) {
44
+ const delayMs = computeRetryDelayMs(res.headers.get('retry-after'), text, attempt);
45
+ await waitForRetry(delayMs);
46
+ continue;
47
+ }
48
+ throw new Error(`Grok request failed: ${res.status} ${text}`);
49
+ }
50
+ const json = (await res.json());
51
+ const text = extractContent(json.choices?.[0]?.message?.content);
52
+ if (!text)
53
+ throw new Error('Grok response missing content');
54
+ return JSON.parse(text);
55
+ }
56
+ }
57
+ }
@@ -0,0 +1,69 @@
1
+ import type { GeneratedNote, LLMProvider, NextNoteRequest } from '../types';
2
+ import { assertHeaderSafeApiKey } from './auth';
3
+ import { systemPrompt, userPrompt } from '../prompt';
4
+ import { computeRetryDelayMs, shouldRetryStatus, waitForRetry } from './retry';
5
+
6
+ type ChatCompletionResponse = {
7
+ choices?: Array<{
8
+ message?: {
9
+ content?: string | Array<{ type?: string; text?: string }>;
10
+ };
11
+ }>;
12
+ };
13
+
14
+ type ChatContent = string | Array<{ type?: string; text?: string }> | undefined;
15
+
16
+ function extractContent(content: ChatContent): string {
17
+ if (typeof content === 'string') return content;
18
+ if (Array.isArray(content)) {
19
+ return content
20
+ .map((c) => (typeof c?.text === 'string' ? c.text : ''))
21
+ .join('')
22
+ .trim();
23
+ }
24
+ return '';
25
+ }
26
+
27
+ export class GrokProvider implements LLMProvider {
28
+ constructor(
29
+ private readonly apiKey: string,
30
+ private readonly model = 'grok-2-latest',
31
+ ) {}
32
+
33
+ async nextNote(input: NextNoteRequest): Promise<GeneratedNote> {
34
+ const apiKey = assertHeaderSafeApiKey('Grok', this.apiKey);
35
+ const maxRetries = 4;
36
+ for (let attempt = 0; ; attempt++) {
37
+ const res = await fetch('https://api.x.ai/v1/chat/completions', {
38
+ method: 'POST',
39
+ headers: {
40
+ Authorization: `Bearer ${apiKey}`,
41
+ 'Content-Type': 'application/json',
42
+ },
43
+ body: JSON.stringify({
44
+ model: this.model,
45
+ temperature: 0.7,
46
+ messages: [
47
+ { role: 'system', content: systemPrompt() },
48
+ { role: 'user', content: userPrompt(input) },
49
+ ],
50
+ }),
51
+ });
52
+
53
+ if (!res.ok) {
54
+ const text = await res.text();
55
+ if (attempt < maxRetries && shouldRetryStatus(res.status)) {
56
+ const delayMs = computeRetryDelayMs(res.headers.get('retry-after'), text, attempt);
57
+ await waitForRetry(delayMs);
58
+ continue;
59
+ }
60
+ throw new Error(`Grok request failed: ${res.status} ${text}`);
61
+ }
62
+
63
+ const json = (await res.json()) as ChatCompletionResponse;
64
+ const text = extractContent(json.choices?.[0]?.message?.content);
65
+ if (!text) throw new Error('Grok response missing content');
66
+ return JSON.parse(text) as GeneratedNote;
67
+ }
68
+ }
69
+ }
@@ -0,0 +1,57 @@
1
+ import { assertHeaderSafeApiKey } from './auth';
2
+ import { systemPrompt, userPrompt } from '../prompt';
3
+ import { computeRetryDelayMs, shouldRetryStatus, waitForRetry } from './retry';
4
+ function extractContent(content) {
5
+ if (typeof content === 'string')
6
+ return content;
7
+ if (Array.isArray(content)) {
8
+ return content
9
+ .map((c) => (typeof c?.text === 'string' ? c.text : ''))
10
+ .join('')
11
+ .trim();
12
+ }
13
+ return '';
14
+ }
15
+ export class GroqProvider {
16
+ apiKey;
17
+ model;
18
+ constructor(apiKey, model = 'llama-3.3-70b-versatile') {
19
+ this.apiKey = apiKey;
20
+ this.model = model;
21
+ }
22
+ async nextNote(input) {
23
+ const apiKey = assertHeaderSafeApiKey('Groq', this.apiKey);
24
+ const maxRetries = 4;
25
+ for (let attempt = 0;; attempt++) {
26
+ const res = await fetch('https://api.groq.com/openai/v1/chat/completions', {
27
+ method: 'POST',
28
+ headers: {
29
+ Authorization: `Bearer ${apiKey}`,
30
+ 'Content-Type': 'application/json',
31
+ },
32
+ body: JSON.stringify({
33
+ model: this.model,
34
+ temperature: 0.7,
35
+ messages: [
36
+ { role: 'system', content: systemPrompt() },
37
+ { role: 'user', content: userPrompt(input) },
38
+ ],
39
+ }),
40
+ });
41
+ if (!res.ok) {
42
+ const text = await res.text();
43
+ if (attempt < maxRetries && shouldRetryStatus(res.status)) {
44
+ const delayMs = computeRetryDelayMs(res.headers.get('retry-after'), text, attempt);
45
+ await waitForRetry(delayMs);
46
+ continue;
47
+ }
48
+ throw new Error(`Groq request failed: ${res.status} ${text}`);
49
+ }
50
+ const json = (await res.json());
51
+ const text = extractContent(json.choices?.[0]?.message?.content);
52
+ if (!text)
53
+ throw new Error('Groq response missing content');
54
+ return JSON.parse(text);
55
+ }
56
+ }
57
+ }
@@ -0,0 +1,69 @@
1
+ import type { GeneratedNote, LLMProvider, NextNoteRequest } from '../types';
2
+ import { assertHeaderSafeApiKey } from './auth';
3
+ import { systemPrompt, userPrompt } from '../prompt';
4
+ import { computeRetryDelayMs, shouldRetryStatus, waitForRetry } from './retry';
5
+
6
+ type ChatCompletionResponse = {
7
+ choices?: Array<{
8
+ message?: {
9
+ content?: string | Array<{ type?: string; text?: string }>;
10
+ };
11
+ }>;
12
+ };
13
+
14
+ type ChatContent = string | Array<{ type?: string; text?: string }> | undefined;
15
+
16
+ function extractContent(content: ChatContent): string {
17
+ if (typeof content === 'string') return content;
18
+ if (Array.isArray(content)) {
19
+ return content
20
+ .map((c) => (typeof c?.text === 'string' ? c.text : ''))
21
+ .join('')
22
+ .trim();
23
+ }
24
+ return '';
25
+ }
26
+
27
+ export class GroqProvider implements LLMProvider {
28
+ constructor(
29
+ private readonly apiKey: string,
30
+ private readonly model = 'llama-3.3-70b-versatile',
31
+ ) {}
32
+
33
+ async nextNote(input: NextNoteRequest): Promise<GeneratedNote> {
34
+ const apiKey = assertHeaderSafeApiKey('Groq', this.apiKey);
35
+ const maxRetries = 4;
36
+ for (let attempt = 0; ; attempt++) {
37
+ const res = await fetch('https://api.groq.com/openai/v1/chat/completions', {
38
+ method: 'POST',
39
+ headers: {
40
+ Authorization: `Bearer ${apiKey}`,
41
+ 'Content-Type': 'application/json',
42
+ },
43
+ body: JSON.stringify({
44
+ model: this.model,
45
+ temperature: 0.7,
46
+ messages: [
47
+ { role: 'system', content: systemPrompt() },
48
+ { role: 'user', content: userPrompt(input) },
49
+ ],
50
+ }),
51
+ });
52
+
53
+ if (!res.ok) {
54
+ const text = await res.text();
55
+ if (attempt < maxRetries && shouldRetryStatus(res.status)) {
56
+ const delayMs = computeRetryDelayMs(res.headers.get('retry-after'), text, attempt);
57
+ await waitForRetry(delayMs);
58
+ continue;
59
+ }
60
+ throw new Error(`Groq request failed: ${res.status} ${text}`);
61
+ }
62
+
63
+ const json = (await res.json()) as ChatCompletionResponse;
64
+ const text = extractContent(json.choices?.[0]?.message?.content);
65
+ if (!text) throw new Error('Groq response missing content');
66
+ return JSON.parse(text) as GeneratedNote;
67
+ }
68
+ }
69
+ }
@@ -0,0 +1,13 @@
1
+ // Simple fallback provider for local testing without API keys.
2
+ export class MockProvider {
3
+ async nextNote(input) {
4
+ const last = input.history.at(-1)?.pitch ?? input.seedPitch;
5
+ const stepChoices = [-2, -1, 1, 2, 3];
6
+ const step = stepChoices[Math.floor(Math.random() * stepChoices.length)];
7
+ return {
8
+ pitch: Math.max(48, Math.min(84, last + step)),
9
+ velocity: 90,
10
+ durationMs: 300,
11
+ };
12
+ }
13
+ }
@@ -0,0 +1,15 @@
1
+ import type { GeneratedNote, LLMProvider, NextNoteRequest } from '../types';
2
+
3
+ // Simple fallback provider for local testing without API keys.
4
+ export class MockProvider implements LLMProvider {
5
+ async nextNote(input: NextNoteRequest): Promise<GeneratedNote> {
6
+ const last = input.history.at(-1)?.pitch ?? input.seedPitch;
7
+ const stepChoices = [-2, -1, 1, 2, 3];
8
+ const step = stepChoices[Math.floor(Math.random() * stepChoices.length)];
9
+ return {
10
+ pitch: Math.max(48, Math.min(84, last + step)),
11
+ velocity: 90,
12
+ durationMs: 300,
13
+ };
14
+ }
15
+ }
@@ -0,0 +1,45 @@
1
+ import { assertHeaderSafeApiKey } from './auth';
2
+ import { systemPrompt, userPrompt } from '../prompt';
3
+ import { computeRetryDelayMs, shouldRetryStatus, waitForRetry } from './retry';
4
+ export class OpenAIProvider {
5
+ apiKey;
6
+ model;
7
+ constructor(apiKey, model = 'gpt-4.1-mini') {
8
+ this.apiKey = apiKey;
9
+ this.model = model;
10
+ }
11
+ async nextNote(input) {
12
+ const apiKey = assertHeaderSafeApiKey('OpenAI', this.apiKey);
13
+ const maxRetries = 4;
14
+ for (let attempt = 0;; attempt++) {
15
+ const res = await fetch('https://api.openai.com/v1/responses', {
16
+ method: 'POST',
17
+ headers: {
18
+ 'Authorization': `Bearer ${apiKey}`,
19
+ 'Content-Type': 'application/json',
20
+ },
21
+ body: JSON.stringify({
22
+ model: this.model,
23
+ input: [
24
+ { role: 'system', content: [{ type: 'input_text', text: systemPrompt() }] },
25
+ { role: 'user', content: [{ type: 'input_text', text: userPrompt(input) }] },
26
+ ],
27
+ }),
28
+ });
29
+ if (!res.ok) {
30
+ const text = await res.text();
31
+ if (attempt < maxRetries && shouldRetryStatus(res.status)) {
32
+ const delayMs = computeRetryDelayMs(res.headers.get('retry-after'), text, attempt);
33
+ await waitForRetry(delayMs);
34
+ continue;
35
+ }
36
+ throw new Error(`OpenAI request failed: ${res.status} ${text}`);
37
+ }
38
+ const json = await res.json();
39
+ if (!json.output_text) {
40
+ throw new Error('OpenAI response missing output_text');
41
+ }
42
+ return JSON.parse(json.output_text);
43
+ }
44
+ }
45
+ }
@@ -0,0 +1,49 @@
1
+ import type { GeneratedNote, LLMProvider, NextNoteRequest } from '../types';
2
+ import { assertHeaderSafeApiKey } from './auth';
3
+ import { systemPrompt, userPrompt } from '../prompt';
4
+ import { computeRetryDelayMs, shouldRetryStatus, waitForRetry } from './retry';
5
+
6
+ export class OpenAIProvider implements LLMProvider {
7
+ constructor(
8
+ private readonly apiKey: string,
9
+ private readonly model = 'gpt-4.1-mini',
10
+ ) {}
11
+
12
+ async nextNote(input: NextNoteRequest): Promise<GeneratedNote> {
13
+ const apiKey = assertHeaderSafeApiKey('OpenAI', this.apiKey);
14
+ const maxRetries = 4;
15
+ for (let attempt = 0; ; attempt++) {
16
+ const res = await fetch('https://api.openai.com/v1/responses', {
17
+ method: 'POST',
18
+ headers: {
19
+ 'Authorization': `Bearer ${apiKey}`,
20
+ 'Content-Type': 'application/json',
21
+ },
22
+ body: JSON.stringify({
23
+ model: this.model,
24
+ input: [
25
+ { role: 'system', content: [{ type: 'input_text', text: systemPrompt() }] },
26
+ { role: 'user', content: [{ type: 'input_text', text: userPrompt(input) }] },
27
+ ],
28
+ }),
29
+ });
30
+
31
+ if (!res.ok) {
32
+ const text = await res.text();
33
+ if (attempt < maxRetries && shouldRetryStatus(res.status)) {
34
+ const delayMs = computeRetryDelayMs(res.headers.get('retry-after'), text, attempt);
35
+ await waitForRetry(delayMs);
36
+ continue;
37
+ }
38
+ throw new Error(`OpenAI request failed: ${res.status} ${text}`);
39
+ }
40
+
41
+ const json = await res.json() as { output_text?: string };
42
+ if (!json.output_text) {
43
+ throw new Error('OpenAI response missing output_text');
44
+ }
45
+
46
+ return JSON.parse(json.output_text) as GeneratedNote;
47
+ }
48
+ }
49
+ }
@@ -0,0 +1,46 @@
1
+ const RETRYABLE_STATUS = new Set([408, 409, 425, 429, 500, 502, 503, 504]);
2
+ function sleep(ms) {
3
+ return new Promise((resolve) => setTimeout(resolve, ms));
4
+ }
5
+ function parseRetryAfterMs(value) {
6
+ if (!value)
7
+ return null;
8
+ const sec = Number.parseFloat(value);
9
+ if (Number.isFinite(sec) && sec >= 0)
10
+ return Math.round(sec * 1000);
11
+ const date = Date.parse(value);
12
+ if (!Number.isNaN(date)) {
13
+ const diff = date - Date.now();
14
+ return diff > 0 ? diff : 0;
15
+ }
16
+ return null;
17
+ }
18
+ function parseTryAgainMs(text) {
19
+ const match = text.match(/try again in\s+(\d+(?:\.\d+)?)s/i);
20
+ if (!match)
21
+ return null;
22
+ const sec = Number.parseFloat(match[1]);
23
+ if (!Number.isFinite(sec) || sec < 0)
24
+ return null;
25
+ return Math.round(sec * 1000);
26
+ }
27
+ function backoffMs(attempt) {
28
+ const base = 500 * Math.pow(2, attempt);
29
+ const jitter = Math.floor(Math.random() * 250);
30
+ return base + jitter;
31
+ }
32
+ export function shouldRetryStatus(status) {
33
+ return RETRYABLE_STATUS.has(status);
34
+ }
35
+ export function computeRetryDelayMs(headersRetryAfter, responseText, attempt) {
36
+ const fromHeader = parseRetryAfterMs(headersRetryAfter);
37
+ if (fromHeader != null)
38
+ return Math.min(10_000, Math.max(250, fromHeader));
39
+ const fromBody = parseTryAgainMs(responseText);
40
+ if (fromBody != null)
41
+ return Math.min(10_000, Math.max(250, fromBody));
42
+ return Math.min(10_000, backoffMs(attempt));
43
+ }
44
+ export async function waitForRetry(ms) {
45
+ await sleep(ms);
46
+ }