@freetison/git-super 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +384 -0
- package/bin/git-super.mjs +576 -0
- package/lib/ARCHITECTURE.md +254 -0
- package/lib/auth/auth-strategy.mjs +132 -0
- package/lib/auth/credential-store.mjs +222 -0
- package/lib/auth/oauth-flows.mjs +266 -0
- package/lib/auth/token-manager.mjs +246 -0
- package/lib/cli/auth-commands.mjs +327 -0
- package/lib/config/config-loader.mjs +167 -0
- package/lib/fallback/add-files-strategy.mjs +15 -0
- package/lib/fallback/base-fallback-strategy.mjs +34 -0
- package/lib/fallback/delete-files-strategy.mjs +15 -0
- package/lib/fallback/fallback-resolver.mjs +54 -0
- package/lib/fallback/modify-files-strategy.mjs +15 -0
- package/lib/providers/anthropic-provider.mjs +44 -0
- package/lib/providers/azure-openai-provider.mjs +185 -0
- package/lib/providers/base-oauth-provider.mjs +62 -0
- package/lib/providers/base-provider.mjs +29 -0
- package/lib/providers/generic-oidc-provider.mjs +144 -0
- package/lib/providers/github-copilot-provider.mjs +113 -0
- package/lib/providers/ollama-provider.mjs +109 -0
- package/lib/providers/openai-provider.mjs +44 -0
- package/lib/providers/provider-registry.mjs +99 -0
- package/package.json +59 -0
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Azure OpenAI Provider with Azure AD Authentication
|
|
3
|
+
* Uses Microsoft Identity Platform (MSAL) for OAuth
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { BaseOAuthProvider } from './base-oauth-provider.mjs';
|
|
7
|
+
import { TokenManager } from '../auth/token-manager.mjs';
|
|
8
|
+
|
|
9
|
+
export class AzureOpenAIProvider extends BaseOAuthProvider {
|
|
10
|
+
constructor(config) {
|
|
11
|
+
// Azure AD configuration
|
|
12
|
+
const tenantId = config.azureTenantId || 'common';
|
|
13
|
+
const clientId = config.azureClientId;
|
|
14
|
+
|
|
15
|
+
if (!clientId) {
|
|
16
|
+
throw new Error(
|
|
17
|
+
'Azure Client ID is required. Set AZURE_CLIENT_ID or configure in .gitsuperrc'
|
|
18
|
+
);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// Create token manager for Azure AD
|
|
22
|
+
const tokenManager = new TokenManager('azure-openai', {
|
|
23
|
+
clientId,
|
|
24
|
+
scopes: ['https://cognitiveservices.azure.com/.default'],
|
|
25
|
+
tokenEndpoint: `https://login.microsoftonline.com/${tenantId}/oauth2/v2.0/token`,
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
super(config, tokenManager);
|
|
29
|
+
|
|
30
|
+
this.tenantId = tenantId;
|
|
31
|
+
this.resourceEndpoint = config.azureResourceEndpoint;
|
|
32
|
+
this.deploymentName = config.azureDeploymentName || config.aiModel;
|
|
33
|
+
|
|
34
|
+
if (!this.resourceEndpoint) {
|
|
35
|
+
throw new Error(
|
|
36
|
+
'Azure OpenAI endpoint is required. Set AZURE_OPENAI_ENDPOINT or configure in .gitsuperrc\n' +
|
|
37
|
+
'Example: https://your-resource.openai.azure.com'
|
|
38
|
+
);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Initiate Azure AD device code flow
|
|
44
|
+
*/
|
|
45
|
+
async initiateAuth() {
|
|
46
|
+
const response = await fetch(
|
|
47
|
+
`https://login.microsoftonline.com/${this.tenantId}/oauth2/v2.0/devicecode`,
|
|
48
|
+
{
|
|
49
|
+
method: 'POST',
|
|
50
|
+
headers: {
|
|
51
|
+
'Content-Type': 'application/x-www-form-urlencoded',
|
|
52
|
+
},
|
|
53
|
+
body: new URLSearchParams({
|
|
54
|
+
client_id: this.tokenManager.clientId,
|
|
55
|
+
scope: this.tokenManager.scopes.join(' '),
|
|
56
|
+
}),
|
|
57
|
+
}
|
|
58
|
+
);
|
|
59
|
+
|
|
60
|
+
if (!response.ok) {
|
|
61
|
+
throw new Error(`Azure AD device code request failed: ${response.statusText}`);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
const data = await response.json();
|
|
65
|
+
|
|
66
|
+
return {
|
|
67
|
+
deviceCode: data.device_code,
|
|
68
|
+
userCode: data.user_code,
|
|
69
|
+
verificationUri: data.verification_uri,
|
|
70
|
+
verificationUriComplete: data.verification_url, // Note: Azure uses verification_url
|
|
71
|
+
expiresIn: data.expires_in,
|
|
72
|
+
interval: data.interval || 5,
|
|
73
|
+
message: data.message,
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Poll for Azure AD token
|
|
79
|
+
*/
|
|
80
|
+
async completeAuth(deviceCode, interval) {
|
|
81
|
+
const pollInterval = interval * 1000;
|
|
82
|
+
const maxAttempts = 180;
|
|
83
|
+
|
|
84
|
+
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
|
85
|
+
await new Promise(resolve => setTimeout(resolve, pollInterval));
|
|
86
|
+
|
|
87
|
+
try {
|
|
88
|
+
const response = await fetch(
|
|
89
|
+
`https://login.microsoftonline.com/${this.tenantId}/oauth2/v2.0/token`,
|
|
90
|
+
{
|
|
91
|
+
method: 'POST',
|
|
92
|
+
headers: {
|
|
93
|
+
'Content-Type': 'application/x-www-form-urlencoded',
|
|
94
|
+
},
|
|
95
|
+
body: new URLSearchParams({
|
|
96
|
+
grant_type: 'urn:ietf:params:oauth:grant-type:device_code',
|
|
97
|
+
client_id: this.tokenManager.clientId,
|
|
98
|
+
device_code: deviceCode,
|
|
99
|
+
}),
|
|
100
|
+
}
|
|
101
|
+
);
|
|
102
|
+
|
|
103
|
+
const data = await response.json();
|
|
104
|
+
|
|
105
|
+
if (data.error) {
|
|
106
|
+
if (data.error === 'authorization_pending') {
|
|
107
|
+
continue;
|
|
108
|
+
} else if (data.error === 'slow_down') {
|
|
109
|
+
await new Promise(resolve => setTimeout(resolve, pollInterval));
|
|
110
|
+
continue;
|
|
111
|
+
} else if (data.error === 'expired_token') {
|
|
112
|
+
throw new Error('Device code expired. Please try again.');
|
|
113
|
+
} else if (data.error === 'access_denied') {
|
|
114
|
+
throw new Error('User denied authorization.');
|
|
115
|
+
} else {
|
|
116
|
+
throw new Error(`Azure AD error: ${data.error} - ${data.error_description || ''}`);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// Success
|
|
121
|
+
const tokens = {
|
|
122
|
+
accessToken: data.access_token,
|
|
123
|
+
refreshToken: data.refresh_token,
|
|
124
|
+
expiresIn: data.expires_in,
|
|
125
|
+
tokenType: data.token_type,
|
|
126
|
+
scope: data.scope,
|
|
127
|
+
};
|
|
128
|
+
|
|
129
|
+
await this.tokenManager.storeTokens(tokens);
|
|
130
|
+
return;
|
|
131
|
+
} catch (error) {
|
|
132
|
+
if (error.message.includes('error:') ||
|
|
133
|
+
error.message.includes('expired') ||
|
|
134
|
+
error.message.includes('denied')) {
|
|
135
|
+
throw error;
|
|
136
|
+
}
|
|
137
|
+
console.warn(`Polling attempt ${attempt + 1} failed: ${error.message}`);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
throw new Error('Azure AD authorization timeout. Please try again.');
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
/**
|
|
145
|
+
* Generate commit message using Azure OpenAI
|
|
146
|
+
*/
|
|
147
|
+
async generate(prompt) {
|
|
148
|
+
const isAuthenticated = await this.isAuthenticated();
|
|
149
|
+
|
|
150
|
+
if (!isAuthenticated) {
|
|
151
|
+
throw new Error(
|
|
152
|
+
'Azure AD authentication required. ' +
|
|
153
|
+
'Please authenticate with: git super auth login --provider azure-openai'
|
|
154
|
+
);
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// Build Azure OpenAI endpoint
|
|
158
|
+
const apiVersion = '2024-02-01';
|
|
159
|
+
const endpoint = `${this.resourceEndpoint}/openai/deployments/${this.deploymentName}/chat/completions?api-version=${apiVersion}`;
|
|
160
|
+
|
|
161
|
+
const response = await this.authenticatedFetch(endpoint, {
|
|
162
|
+
method: 'POST',
|
|
163
|
+
body: JSON.stringify({
|
|
164
|
+
messages: [{ role: 'user', content: prompt }],
|
|
165
|
+
max_tokens: 200,
|
|
166
|
+
temperature: 0.7,
|
|
167
|
+
}),
|
|
168
|
+
});
|
|
169
|
+
|
|
170
|
+
if (!response.ok) {
|
|
171
|
+
const errorText = await response.text();
|
|
172
|
+
throw new Error(`Azure OpenAI error: ${response.status} - ${errorText}`);
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
const data = await response.json();
|
|
176
|
+
return data.choices[0].message.content.trim().replace(/^["']|["']$/g, '');
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
/**
|
|
180
|
+
* Get provider display name
|
|
181
|
+
*/
|
|
182
|
+
getName() {
|
|
183
|
+
return 'azure-openai';
|
|
184
|
+
}
|
|
185
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Base OAuth Provider
|
|
3
|
+
* Extended base for providers using OAuth authentication
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { BaseAIProvider } from './base-provider.mjs';
|
|
7
|
+
import { OAuthAuthStrategy } from '../auth/auth-strategy.mjs';
|
|
8
|
+
|
|
9
|
+
export class BaseOAuthProvider extends BaseAIProvider {
|
|
10
|
+
constructor(config, tokenManager) {
|
|
11
|
+
super(config);
|
|
12
|
+
this.tokenManager = tokenManager;
|
|
13
|
+
this.authStrategy = new OAuthAuthStrategy(config, tokenManager);
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Get authentication headers (with automatic token refresh)
|
|
18
|
+
* @returns {Promise<Object>}
|
|
19
|
+
*/
|
|
20
|
+
async getAuthHeaders() {
|
|
21
|
+
return await this.authStrategy.getAuthHeaders();
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Check if authenticated
|
|
26
|
+
* @returns {Promise<boolean>}
|
|
27
|
+
*/
|
|
28
|
+
async isAuthenticated() {
|
|
29
|
+
return await this.authStrategy.isValid();
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Get common headers for API requests
|
|
34
|
+
* @returns {Object}
|
|
35
|
+
*/
|
|
36
|
+
getCommonHeaders() {
|
|
37
|
+
return {
|
|
38
|
+
'Content-Type': 'application/json',
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Make authenticated API request
|
|
44
|
+
* @param {string} url - API endpoint
|
|
45
|
+
* @param {Object} options - Fetch options
|
|
46
|
+
* @returns {Promise<Response>}
|
|
47
|
+
*/
|
|
48
|
+
async authenticatedFetch(url, options = {}) {
|
|
49
|
+
const authHeaders = await this.getAuthHeaders();
|
|
50
|
+
|
|
51
|
+
const response = await fetch(url, {
|
|
52
|
+
...options,
|
|
53
|
+
headers: {
|
|
54
|
+
...this.getCommonHeaders(),
|
|
55
|
+
...authHeaders,
|
|
56
|
+
...(options.headers || {}),
|
|
57
|
+
},
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
return response;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Base AI Provider - Strategy Pattern interface
|
|
3
|
+
* All AI providers must extend this class
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
export class BaseAIProvider {
|
|
7
|
+
constructor(config, authStrategy = null) {
|
|
8
|
+
this.config = config;
|
|
9
|
+
this.authStrategy = authStrategy;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Generate a commit message from a prompt
|
|
14
|
+
* @param {string} prompt - The prompt to send to the AI
|
|
15
|
+
* @returns {Promise<string>} - The generated message
|
|
16
|
+
* @throws {Error} - Must be implemented by subclasses
|
|
17
|
+
*/
|
|
18
|
+
async generate(prompt) {
|
|
19
|
+
throw new Error(`${this.constructor.name} must implement generate(prompt)`);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Get the provider name (used for logging)
|
|
24
|
+
* @returns {string}
|
|
25
|
+
*/
|
|
26
|
+
getName() {
|
|
27
|
+
return this.constructor.name.replace('Provider', '').toLowerCase();
|
|
28
|
+
}
|
|
29
|
+
}
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Generic OIDC (OpenID Connect) Provider
|
|
3
|
+
* Supports any OAuth 2.0 / OIDC compliant identity provider
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { BaseOAuthProvider } from './base-oauth-provider.mjs';
|
|
7
|
+
import { TokenManager } from '../auth/token-manager.mjs';
|
|
8
|
+
import { DeviceCodeFlow } from '../auth/oauth-flows.mjs';
|
|
9
|
+
|
|
10
|
+
export class GenericOIDCProvider extends BaseOAuthProvider {
|
|
11
|
+
constructor(config) {
|
|
12
|
+
const issuer = config.oidcIssuer;
|
|
13
|
+
const clientId = config.oidcClientId;
|
|
14
|
+
|
|
15
|
+
if (!issuer || !clientId) {
|
|
16
|
+
throw new Error(
|
|
17
|
+
'OIDC configuration required: OIDC_ISSUER and OIDC_CLIENT_ID must be set'
|
|
18
|
+
);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// Create token manager
|
|
22
|
+
const tokenManager = new TokenManager('generic-oidc', {
|
|
23
|
+
clientId,
|
|
24
|
+
scopes: config.oidcScopes || ['openid', 'profile', 'email'],
|
|
25
|
+
tokenEndpoint: config.oidcTokenEndpoint || `${issuer}/oauth2/token`,
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
super(config, tokenManager);
|
|
29
|
+
|
|
30
|
+
this.issuer = issuer;
|
|
31
|
+
this.apiEndpoint = config.oidcApiEndpoint;
|
|
32
|
+
this.deviceAuthEndpoint = config.oidcDeviceAuthEndpoint || `${issuer}/oauth2/device/authorize`;
|
|
33
|
+
|
|
34
|
+
if (!this.apiEndpoint) {
|
|
35
|
+
throw new Error(
|
|
36
|
+
'OIDC API endpoint required: Set OIDC_API_ENDPOINT in config\n' +
|
|
37
|
+
'This should be the endpoint where your AI service is hosted'
|
|
38
|
+
);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Discover OIDC configuration from well-known endpoint
|
|
44
|
+
* @returns {Promise<Object>}
|
|
45
|
+
*/
|
|
46
|
+
async discoverConfig() {
|
|
47
|
+
try {
|
|
48
|
+
const response = await fetch(`${this.issuer}/.well-known/openid-configuration`);
|
|
49
|
+
if (!response.ok) {
|
|
50
|
+
throw new Error(`OIDC discovery failed: ${response.statusText}`);
|
|
51
|
+
}
|
|
52
|
+
return await response.json();
|
|
53
|
+
} catch (error) {
|
|
54
|
+
console.warn(`OIDC discovery failed: ${error.message}, using manual configuration`);
|
|
55
|
+
return null;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Initiate device code flow
|
|
61
|
+
*/
|
|
62
|
+
async initiateAuth() {
|
|
63
|
+
const flow = new DeviceCodeFlow({
|
|
64
|
+
clientId: this.tokenManager.clientId,
|
|
65
|
+
deviceAuthEndpoint: this.deviceAuthEndpoint,
|
|
66
|
+
tokenEndpoint: this.tokenManager.tokenEndpoint,
|
|
67
|
+
scopes: this.tokenManager.scopes,
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
return await flow.initiate();
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Complete device code flow
|
|
75
|
+
*/
|
|
76
|
+
async completeAuth(deviceCode, interval) {
|
|
77
|
+
const flow = new DeviceCodeFlow({
|
|
78
|
+
clientId: this.tokenManager.clientId,
|
|
79
|
+
deviceAuthEndpoint: this.deviceAuthEndpoint,
|
|
80
|
+
tokenEndpoint: this.tokenManager.tokenEndpoint,
|
|
81
|
+
scopes: this.tokenManager.scopes,
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
const tokens = await flow.pollForToken(deviceCode, interval);
|
|
85
|
+
await this.tokenManager.storeTokens(tokens);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Generate commit message using generic OIDC-protected API
|
|
90
|
+
*/
|
|
91
|
+
async generate(prompt) {
|
|
92
|
+
const isAuthenticated = await this.isAuthenticated();
|
|
93
|
+
|
|
94
|
+
if (!isAuthenticated) {
|
|
95
|
+
throw new Error(
|
|
96
|
+
'OIDC authentication required. ' +
|
|
97
|
+
'Please authenticate with: git super auth login --provider generic-oidc'
|
|
98
|
+
);
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Make request to configured API endpoint
|
|
102
|
+
// This assumes a standard chat completions interface, but can be customized
|
|
103
|
+
const response = await this.authenticatedFetch(this.apiEndpoint, {
|
|
104
|
+
method: 'POST',
|
|
105
|
+
body: JSON.stringify({
|
|
106
|
+
model: this.config.aiModel || 'default',
|
|
107
|
+
messages: [{ role: 'user', content: prompt }],
|
|
108
|
+
max_tokens: 200,
|
|
109
|
+
temperature: 0.7,
|
|
110
|
+
}),
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
if (!response.ok) {
|
|
114
|
+
const errorText = await response.text();
|
|
115
|
+
throw new Error(`OIDC API error: ${response.status} - ${errorText}`);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
const data = await response.json();
|
|
119
|
+
|
|
120
|
+
// Try to extract message from common response formats
|
|
121
|
+
if (data.choices && data.choices[0]?.message?.content) {
|
|
122
|
+
// OpenAI-compatible format
|
|
123
|
+
return data.choices[0].message.content.trim().replace(/^["']|["']$/g, '');
|
|
124
|
+
} else if (data.content && Array.isArray(data.content) && data.content[0]?.text) {
|
|
125
|
+
// Anthropic-compatible format
|
|
126
|
+
return data.content[0].text.trim().replace(/^["']|["']$/g, '');
|
|
127
|
+
} else if (data.response) {
|
|
128
|
+
// Simple response format
|
|
129
|
+
return data.response.trim().replace(/^["']|["']$/g, '');
|
|
130
|
+
} else if (typeof data === 'string') {
|
|
131
|
+
// Plain text response
|
|
132
|
+
return data.trim().replace(/^["']|["']$/g, '');
|
|
133
|
+
} else {
|
|
134
|
+
throw new Error('Unexpected API response format');
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
/**
|
|
139
|
+
* Get provider display name
|
|
140
|
+
*/
|
|
141
|
+
getName() {
|
|
142
|
+
return 'generic-oidc';
|
|
143
|
+
}
|
|
144
|
+
}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* GitHub Copilot Enterprise Provider
|
|
3
|
+
* Uses GitHub OAuth for authentication
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { BaseOAuthProvider } from './base-oauth-provider.mjs';
|
|
7
|
+
import { TokenManager } from '../auth/token-manager.mjs';
|
|
8
|
+
import { DeviceCodeFlow } from '../auth/oauth-flows.mjs';
|
|
9
|
+
|
|
10
|
+
export class GitHubCopilotProvider extends BaseOAuthProvider {
|
|
11
|
+
constructor(config) {
|
|
12
|
+
// Create token manager for GitHub OAuth
|
|
13
|
+
const tokenManager = new TokenManager('github-copilot', {
|
|
14
|
+
clientId: config.githubClientId || 'Iv1.b507a08c87ecfe98', // GitHub CLI client ID
|
|
15
|
+
scopes: ['read:user', 'read:org'], // Copilot scopes may vary
|
|
16
|
+
tokenEndpoint: 'https://github.com/login/oauth/access_token',
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
super(config, tokenManager);
|
|
20
|
+
this.githubOrg = config.githubOrg;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Initiate device code flow for GitHub authentication
|
|
25
|
+
* @returns {Promise<Object>} Device authorization details
|
|
26
|
+
*/
|
|
27
|
+
async initiateAuth() {
|
|
28
|
+
const flow = new DeviceCodeFlow({
|
|
29
|
+
clientId: this.tokenManager.clientId,
|
|
30
|
+
deviceAuthEndpoint: 'https://github.com/login/device/code',
|
|
31
|
+
tokenEndpoint: 'https://github.com/login/oauth/access_token',
|
|
32
|
+
scopes: this.tokenManager.scopes,
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
return await flow.initiate();
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Complete device code flow
|
|
40
|
+
* @param {string} deviceCode - Device code from initiateAuth
|
|
41
|
+
* @param {number} interval - Poll interval in seconds
|
|
42
|
+
*/
|
|
43
|
+
async completeAuth(deviceCode, interval) {
|
|
44
|
+
const flow = new DeviceCodeFlow({
|
|
45
|
+
clientId: this.tokenManager.clientId,
|
|
46
|
+
deviceAuthEndpoint: 'https://github.com/login/device/code',
|
|
47
|
+
tokenEndpoint: 'https://github.com/login/oauth/access_token',
|
|
48
|
+
scopes: this.tokenManager.scopes,
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
const tokens = await flow.pollForToken(deviceCode, interval);
|
|
52
|
+
await this.tokenManager.storeTokens(tokens);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Generate commit message using GitHub Copilot
|
|
57
|
+
* Note: This is a conceptual implementation
|
|
58
|
+
* Real GitHub Copilot API may differ or require enterprise access
|
|
59
|
+
*/
|
|
60
|
+
async generate(prompt) {
|
|
61
|
+
// GitHub Copilot doesn't have a public API for commit message generation yet
|
|
62
|
+
// This is a placeholder that would use GitHub's model endpoint when available
|
|
63
|
+
|
|
64
|
+
// For now, we'll use GitHub Models (preview) or return a helpful message
|
|
65
|
+
const isAuthenticated = await this.isAuthenticated();
|
|
66
|
+
|
|
67
|
+
if (!isAuthenticated) {
|
|
68
|
+
throw new Error(
|
|
69
|
+
'GitHub Copilot Enterprise authentication required. ' +
|
|
70
|
+
'Please authenticate with: git super auth login --provider github-copilot'
|
|
71
|
+
);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
try {
|
|
75
|
+
// Attempt to use GitHub Models API (if available)
|
|
76
|
+
// https://github.blog/2024-05-21-github-models-a-new-generation-of-ai-models/
|
|
77
|
+
const response = await this.authenticatedFetch(
|
|
78
|
+
'https://models.inference.ai.azure.com/chat/completions',
|
|
79
|
+
{
|
|
80
|
+
method: 'POST',
|
|
81
|
+
body: JSON.stringify({
|
|
82
|
+
model: this.config.aiModel || 'gpt-4o',
|
|
83
|
+
messages: [{ role: 'user', content: prompt }],
|
|
84
|
+
max_tokens: 200,
|
|
85
|
+
temperature: 0.7,
|
|
86
|
+
}),
|
|
87
|
+
}
|
|
88
|
+
);
|
|
89
|
+
|
|
90
|
+
if (!response.ok) {
|
|
91
|
+
throw new Error(`GitHub Copilot API error: ${response.status} ${response.statusText}`);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
const data = await response.json();
|
|
95
|
+
return data.choices[0].message.content.trim().replace(/^["']|["']$/g, '');
|
|
96
|
+
} catch (error) {
|
|
97
|
+
throw new Error(
|
|
98
|
+
`GitHub Copilot is not fully available yet for commit generation. ` +
|
|
99
|
+
`Original error: ${error.message}\n\n` +
|
|
100
|
+
`Note: GitHub Copilot Enterprise API for commit messages may require ` +
|
|
101
|
+
`special access or may use a different endpoint. Please check GitHub's ` +
|
|
102
|
+
`documentation for the latest API details.`
|
|
103
|
+
);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Get provider display name
|
|
109
|
+
*/
|
|
110
|
+
getName() {
|
|
111
|
+
return 'github-copilot';
|
|
112
|
+
}
|
|
113
|
+
}
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Ollama AI Provider implementation
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { BaseAIProvider } from './base-provider.mjs';
|
|
6
|
+
import { NoAuthStrategy } from '../auth/auth-strategy.mjs';
|
|
7
|
+
|
|
8
|
+
export class OllamaProvider extends BaseAIProvider {
|
|
9
|
+
constructor(config) {
|
|
10
|
+
// Ollama doesn't need authentication (local server)
|
|
11
|
+
super(config, new NoAuthStrategy(config));
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
async generate(prompt) {
|
|
15
|
+
let modelToUse = this.config.aiModel;
|
|
16
|
+
|
|
17
|
+
let response = await fetch(`${this.config.ollamaUrl}/api/generate`, {
|
|
18
|
+
method: 'POST',
|
|
19
|
+
headers: { 'Content-Type': 'application/json' },
|
|
20
|
+
body: JSON.stringify({
|
|
21
|
+
model: modelToUse,
|
|
22
|
+
prompt,
|
|
23
|
+
stream: false,
|
|
24
|
+
options: {
|
|
25
|
+
temperature: 0.7,
|
|
26
|
+
top_p: 0.9,
|
|
27
|
+
},
|
|
28
|
+
}),
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
// If model not found, try to find an alternative
|
|
32
|
+
if (response.status === 404) {
|
|
33
|
+
const availableModels = await this.getAvailableModels();
|
|
34
|
+
|
|
35
|
+
if (availableModels.length === 0) {
|
|
36
|
+
throw new Error(`Ollama is running but no models are installed. Install one with: ollama pull mistral`);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
modelToUse = this.findBestModel(availableModels);
|
|
40
|
+
console.log(` ℹ️ Model '${this.config.aiModel}' not found, using '${modelToUse}'`);
|
|
41
|
+
|
|
42
|
+
// Retry with available model
|
|
43
|
+
response = await fetch(`${this.config.ollamaUrl}/api/generate`, {
|
|
44
|
+
method: 'POST',
|
|
45
|
+
headers: { 'Content-Type': 'application/json' },
|
|
46
|
+
body: JSON.stringify({
|
|
47
|
+
model: modelToUse,
|
|
48
|
+
prompt,
|
|
49
|
+
stream: false,
|
|
50
|
+
options: {
|
|
51
|
+
temperature: 0.7,
|
|
52
|
+
top_p: 0.9,
|
|
53
|
+
},
|
|
54
|
+
}),
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
if (!response.ok) {
|
|
59
|
+
throw new Error(`Ollama error: ${response.status} ${response.statusText}`);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
const data = await response.json();
|
|
63
|
+
return this.cleanResponse(data.response);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
async getAvailableModels() {
|
|
67
|
+
try {
|
|
68
|
+
const response = await fetch(`${this.config.ollamaUrl}/api/tags`);
|
|
69
|
+
if (!response.ok) return [];
|
|
70
|
+
const data = await response.json();
|
|
71
|
+
return data.models?.map(m => m.name) || [];
|
|
72
|
+
} catch {
|
|
73
|
+
return [];
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
findBestModel(availableModels) {
|
|
78
|
+
const preferredModels = [
|
|
79
|
+
'qwen2.5-coder',
|
|
80
|
+
'deepseek-coder',
|
|
81
|
+
'codellama',
|
|
82
|
+
'mistral',
|
|
83
|
+
'llama3',
|
|
84
|
+
'llama2'
|
|
85
|
+
];
|
|
86
|
+
|
|
87
|
+
return preferredModels.find(m =>
|
|
88
|
+
availableModels.some(a => a.startsWith(m))
|
|
89
|
+
) || availableModels[0];
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
cleanResponse(response) {
|
|
93
|
+
let message = response.trim()
|
|
94
|
+
.replace(/^["'`]|["'`]$/g, '') // Remove quotes
|
|
95
|
+
.replace(/^\*\*|\*\*$/g, '') // Remove bold markdown
|
|
96
|
+
.replace(/^```.*\n?|\n?```$/g, '') // Remove code blocks
|
|
97
|
+
.split('\n')[0] // Take first line only
|
|
98
|
+
.trim();
|
|
99
|
+
|
|
100
|
+
// If message looks invalid, return generic
|
|
101
|
+
if (message.length > 100 ||
|
|
102
|
+
message.toLowerCase().includes('based on') ||
|
|
103
|
+
message.toLowerCase().includes('appears')) {
|
|
104
|
+
return 'chore: update files';
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
return message;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI AI Provider implementation
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { BaseAIProvider } from './base-provider.mjs';
|
|
6
|
+
import { ApiKeyAuthStrategy } from '../auth/auth-strategy.mjs';
|
|
7
|
+
|
|
8
|
+
export class OpenAIProvider extends BaseAIProvider {
|
|
9
|
+
constructor(config) {
|
|
10
|
+
// Use API Key authentication strategy
|
|
11
|
+
const authStrategy = new ApiKeyAuthStrategy(config, {
|
|
12
|
+
keyName: 'openaiKey',
|
|
13
|
+
headerName: 'Authorization',
|
|
14
|
+
headerFormat: 'Bearer {key}',
|
|
15
|
+
});
|
|
16
|
+
super(config, authStrategy);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
async generate(prompt) {
|
|
20
|
+
// Get auth headers from strategy
|
|
21
|
+
const authHeaders = await this.authStrategy.getAuthHeaders();
|
|
22
|
+
|
|
23
|
+
const response = await fetch('https://api.openai.com/v1/chat/completions', {
|
|
24
|
+
method: 'POST',
|
|
25
|
+
headers: {
|
|
26
|
+
'Content-Type': 'application/json',
|
|
27
|
+
...authHeaders,
|
|
28
|
+
},
|
|
29
|
+
body: JSON.stringify({
|
|
30
|
+
model: this.config.aiModel || 'gpt-4',
|
|
31
|
+
messages: [{ role: 'user', content: prompt }],
|
|
32
|
+
max_tokens: 200,
|
|
33
|
+
temperature: 0.7,
|
|
34
|
+
}),
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
if (!response.ok) {
|
|
38
|
+
throw new Error(`OpenAI error: ${response.statusText}`);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const data = await response.json();
|
|
42
|
+
return data.choices[0].message.content.trim().replace(/^["']|["']$/g, '');
|
|
43
|
+
}
|
|
44
|
+
}
|