eckra 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CONTRIBUTING.md +85 -0
- package/LICENSE +21 -0
- package/README.md +109 -0
- package/package.json +47 -0
- package/screenshot.jpg +0 -0
- package/src/helpers/ai.js +240 -0
- package/src/helpers/config.js +122 -0
- package/src/helpers/git.js +655 -0
- package/src/helpers/lmstudio.js +11 -0
- package/src/helpers/patch.js +91 -0
- package/src/index.js +73 -0
- package/src/ui/app.js +177 -0
- package/src/ui/branch.js +295 -0
- package/src/ui/commit.js +250 -0
- package/src/ui/common.js +106 -0
- package/src/ui/config.js +269 -0
- package/src/ui/log.js +146 -0
- package/src/ui/menu.js +393 -0
- package/src/ui/modules/amend.js +43 -0
- package/src/ui/modules/blame.js +56 -0
- package/src/ui/modules/branch.js +223 -0
- package/src/ui/modules/commit.js +232 -0
- package/src/ui/modules/conflict.js +93 -0
- package/src/ui/modules/diff.js +68 -0
- package/src/ui/modules/log.js +52 -0
- package/src/ui/modules/more.js +94 -0
- package/src/ui/modules/rebase.js +72 -0
- package/src/ui/modules/remote.js +74 -0
- package/src/ui/modules/search.js +46 -0
- package/src/ui/modules/settings.js +123 -0
- package/src/ui/modules/stage.js +174 -0
- package/src/ui/modules/stash.js +96 -0
- package/src/ui/modules/stats.js +57 -0
- package/src/ui/modules/status.js +86 -0
- package/src/ui/modules/sync.js +73 -0
- package/src/ui/modules/tag.js +85 -0
- package/src/ui/modules/undo.js +49 -0
- package/src/ui/modules/worktree.js +131 -0
- package/src/ui/push.js +184 -0
- package/src/ui/status.js +156 -0
- package/tests/ai.test.js +112 -0
- package/tests/config.test.js +123 -0
- package/tests/patch.test.js +44 -0
package/CONTRIBUTING.md
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
# Contributing to eckra
|
|
2
|
+
|
|
3
|
+
Thank you for your interest in contributing to eckra! This guide will help you get started with the development environment and understand the project structure.
|
|
4
|
+
|
|
5
|
+
## ๐ Getting Started
|
|
6
|
+
|
|
7
|
+
### Prerequisites
|
|
8
|
+
- **Node.js**: Version 14.0.0 or higher.
|
|
9
|
+
- **Git**: Installed and configured on your system.
|
|
10
|
+
- **LM Studio** (Optional): For AI-powered features, ensure LM Studio is running and accessible (default: `http://localhost:1234`).
|
|
11
|
+
|
|
12
|
+
### Setup
|
|
13
|
+
1. Clone the repository:
|
|
14
|
+
```bash
|
|
15
|
+
git clone https://github.com/your-username/eckra.git
|
|
16
|
+
cd eckra
|
|
17
|
+
```
|
|
18
|
+
2. Install dependencies:
|
|
19
|
+
```bash
|
|
20
|
+
npm install
|
|
21
|
+
```
|
|
22
|
+
3. Run in development mode:
|
|
23
|
+
```bash
|
|
24
|
+
npm start
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
## ๐งช Testing
|
|
28
|
+
|
|
29
|
+
We use **Jest** for testing. To run the tests:
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
npm test
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
Please ensure that all tests pass before submitting a pull request. Adding new tests for new features is highly encouraged.
|
|
36
|
+
|
|
37
|
+
## ๐ Architecture
|
|
38
|
+
|
|
39
|
+
The project is structured into three main layers:
|
|
40
|
+
|
|
41
|
+
### 1. Entry Point (`src/index.js`)
|
|
42
|
+
Handles CLI command definitions using `commander`. It routes commands to the appropriate UI or helper functions.
|
|
43
|
+
|
|
44
|
+
### 2. UI Layer (`src/ui/`)
|
|
45
|
+
Responsible for all user interactions.
|
|
46
|
+
- **`app.js`**: The main application loop and dashboard menu.
|
|
47
|
+
- **`common.js`**: Shared styles, icons, and UI utility functions (like `clear`, `header`, `box`).
|
|
48
|
+
- **`modules/`**: Contains individual feature modules. Each module (e.g., `commit.js`, `status.js`) handles a specific git flow.
|
|
49
|
+
|
|
50
|
+
#### UI Module Pattern
|
|
51
|
+
Most UI modules follow this pattern:
|
|
52
|
+
```javascript
|
|
53
|
+
async function doFeature(info) {
|
|
54
|
+
// 1. Clear screen and show header
|
|
55
|
+
clear();
|
|
56
|
+
header();
|
|
57
|
+
|
|
58
|
+
// 2. Perform logic or ask questions via inquirer
|
|
59
|
+
const { choice } = await inquirer.prompt([...]);
|
|
60
|
+
|
|
61
|
+
// 3. Execute git/helper operations
|
|
62
|
+
// 4. Show results/feedback
|
|
63
|
+
}
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
### 3. Helpers (`src/helpers/`)
|
|
67
|
+
Core business logic separated from the UI.
|
|
68
|
+
- **`git.js`**: Wraps `simple-git` for all Git operations.
|
|
69
|
+
- **`ai.js`**: Handles communication with AI providers (like LM Studio) for suggestions.
|
|
70
|
+
- **`config.js`**: Manages user configuration.
|
|
71
|
+
- **`patch.js`**: Utilities for handling git patches and diffs.
|
|
72
|
+
|
|
73
|
+
## ๐จ Style Guide
|
|
74
|
+
|
|
75
|
+
- Use the styles defined in `src/ui/common.js` (e.g., `s.primary`, `s.success`) to maintain visual consistency.
|
|
76
|
+
- Prefer `inquirer` for interactive prompts.
|
|
77
|
+
- Keep UI logic in `src/ui/modules` and Git/AI logic in `src/helpers`.
|
|
78
|
+
|
|
79
|
+
## ๐ Pull Request Process
|
|
80
|
+
|
|
81
|
+
1. Create a new branch for your feature or bugfix.
|
|
82
|
+
2. Make your changes and add tests if applicable.
|
|
83
|
+
3. Ensure `npm test` passes.
|
|
84
|
+
4. Commit your changes with a clear and descriptive message.
|
|
85
|
+
5. Submit a pull request!
|
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Eren รakar
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
# Eckra
|
|
2
|
+
|
|
3
|
+
Eckra is a command-line interface (CLI) for Git repository management. It provides an interactive terminal interface for standard Git operations and integrates with Large Language Models (LLMs) to automate the generation of commit messages.
|
|
4
|
+
|
|
5
|
+
<p align="center">
|
|
6
|
+
<img src="screenshot.jpg" alt="Eckra Interface" width="800">
|
|
7
|
+
</p>
|
|
8
|
+
|
|
9
|
+
## Overview
|
|
10
|
+
|
|
11
|
+
The tool is designed to streamline version control workflows by providing a structured interface for staging, committing, and branch management. It supports integration with local LLM providers such as LM Studio and Ollama, as well as cloud-based services including OpenAI and Anthropic.
|
|
12
|
+
|
|
13
|
+
## Core Features
|
|
14
|
+
|
|
15
|
+
- **Automated Commit Generation**: Uses git diff data to generate contextually relevant commit messages via configured AI providers.
|
|
16
|
+
- **Interactive Status Management**: Provides a visual interface for viewing repository status and staging changes.
|
|
17
|
+
- **Partial Staging**: Supports the selection of specific file hunks for staging.
|
|
18
|
+
- **Branch Management**: Tools for creating, deleting, and switching branches, including ahead/behind tracking.
|
|
19
|
+
- **Conflict Resolution**: Structured workflow for resolving merge conflicts.
|
|
20
|
+
- **History Visualization**: Renders commit history and branch graphs within the terminal.
|
|
21
|
+
|
|
22
|
+
## Installation
|
|
23
|
+
|
|
24
|
+
### Prerequisites
|
|
25
|
+
|
|
26
|
+
- Node.js (version 14.0.0 or higher)
|
|
27
|
+
- Git
|
|
28
|
+
|
|
29
|
+
### Setup from Source
|
|
30
|
+
|
|
31
|
+
1. Clone the repository:
|
|
32
|
+
|
|
33
|
+
```bash
|
|
34
|
+
git clone https://github.com/sudoeren/eckra.git
|
|
35
|
+
cd eckra
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
2. Install dependencies:
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
npm install
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
3. Link the application globally:
|
|
45
|
+
```bash
|
|
46
|
+
npm link
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
## Usage
|
|
50
|
+
|
|
51
|
+
Execute the primary command within a Git repository to launch the dashboard:
|
|
52
|
+
|
|
53
|
+
```bash
|
|
54
|
+
eckra
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
### Direct Commands
|
|
58
|
+
|
|
59
|
+
The tool also supports direct access to specific modules:
|
|
60
|
+
|
|
61
|
+
- `eckra status`: Open the status and staging interface.
|
|
62
|
+
- `eckra commit`: Initiate the AI-assisted commit flow.
|
|
63
|
+
- `eckra push`: Push changes to the remote repository.
|
|
64
|
+
|
|
65
|
+
## AI Integration
|
|
66
|
+
|
|
67
|
+
Eckra can be configured to use various AI backends for commit message generation.
|
|
68
|
+
|
|
69
|
+
| Provider | Endpoint | Use Case |
|
|
70
|
+
| :-------- | :--------------------- | :------------------------------------- |
|
|
71
|
+
| LM Studio | http://localhost:1234 | Local inference and privacy |
|
|
72
|
+
| Ollama | http://localhost:11434 | Local inference (Llama, Mistral, etc.) |
|
|
73
|
+
| OpenAI | API | Cloud-based GPT models |
|
|
74
|
+
| Anthropic | API | Cloud-based Claude models |
|
|
75
|
+
|
|
76
|
+
## Configuration
|
|
77
|
+
|
|
78
|
+
Configuration is managed through JSON files. The application follows a cascading priority:
|
|
79
|
+
|
|
80
|
+
1. **Global Configuration**: Located at `~/.eckra/config.json`.
|
|
81
|
+
2. **Local Configuration**: Defined in a `.eckrarc` file within the project root.
|
|
82
|
+
|
|
83
|
+
### Configuration Example
|
|
84
|
+
|
|
85
|
+
```json
|
|
86
|
+
{
|
|
87
|
+
"aiProvider": "ollama",
|
|
88
|
+
"ollamaModel": "llama3",
|
|
89
|
+
"aiInstruction": "Ensure commit messages follow Conventional Commits specification."
|
|
90
|
+
}
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
## Development and Testing
|
|
94
|
+
|
|
95
|
+
The project uses Jest for unit testing. To execute the test suite:
|
|
96
|
+
|
|
97
|
+
```bash
|
|
98
|
+
npm test
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
## License
|
|
102
|
+
|
|
103
|
+
This project is licensed under the MIT License.
|
|
104
|
+
|
|
105
|
+
---
|
|
106
|
+
|
|
107
|
+
<p align="center">
|
|
108
|
+
Developed by <b>Eren รakar</b>
|
|
109
|
+
</p>
|
package/package.json
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "eckra",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "AI-powered Git management CLI with LM Studio integration for smart commit messages",
|
|
5
|
+
"main": "src/index.js",
|
|
6
|
+
"bin": {
|
|
7
|
+
"eckra": "./src/index.js"
|
|
8
|
+
},
|
|
9
|
+
"scripts": {
|
|
10
|
+
"start": "node src/index.js",
|
|
11
|
+
"test": "jest"
|
|
12
|
+
},
|
|
13
|
+
"jest": {
|
|
14
|
+
"testEnvironment": "node",
|
|
15
|
+
"verbose": true
|
|
16
|
+
},
|
|
17
|
+
"keywords": [
|
|
18
|
+
"git",
|
|
19
|
+
"cli",
|
|
20
|
+
"ai",
|
|
21
|
+
"commit",
|
|
22
|
+
"lm-studio",
|
|
23
|
+
"version-control"
|
|
24
|
+
],
|
|
25
|
+
"author": "",
|
|
26
|
+
"license": "MIT",
|
|
27
|
+
"dependencies": {
|
|
28
|
+
"axios": "^1.6.2",
|
|
29
|
+
"boxen": "^5.1.2",
|
|
30
|
+
"chalk": "^4.1.2",
|
|
31
|
+
"cli-table3": "^0.6.3",
|
|
32
|
+
"commander": "^11.1.0",
|
|
33
|
+
"inquirer": "^8.2.6",
|
|
34
|
+
"ora": "^5.4.1",
|
|
35
|
+
"simple-git": "^3.21.0"
|
|
36
|
+
},
|
|
37
|
+
"engines": {
|
|
38
|
+
"node": ">=14.0.0"
|
|
39
|
+
},
|
|
40
|
+
"repository": {
|
|
41
|
+
"type": "git",
|
|
42
|
+
"url": ""
|
|
43
|
+
},
|
|
44
|
+
"devDependencies": {
|
|
45
|
+
"jest": "^30.2.0"
|
|
46
|
+
}
|
|
47
|
+
}
|
package/screenshot.jpg
ADDED
|
Binary file
|
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
const axios = require("axios");
|
|
2
|
+
const { getConfig } = require("./config");
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Call the selected AI provider
|
|
6
|
+
*/
|
|
7
|
+
async function callProvider(provider, messages, temperature = 0.3, max_tokens = 100) {
|
|
8
|
+
const config = getConfig();
|
|
9
|
+
let url, headers, body;
|
|
10
|
+
|
|
11
|
+
switch (provider) {
|
|
12
|
+
case "openai":
|
|
13
|
+
url = "https://api.openai.com/v1/chat/completions";
|
|
14
|
+
headers = {
|
|
15
|
+
"Content-Type": "application/json",
|
|
16
|
+
"Authorization": `Bearer ${config.openaiApiKey}`,
|
|
17
|
+
};
|
|
18
|
+
body = {
|
|
19
|
+
model: config.openaiModel || "gpt-4o",
|
|
20
|
+
messages,
|
|
21
|
+
temperature,
|
|
22
|
+
max_tokens,
|
|
23
|
+
};
|
|
24
|
+
break;
|
|
25
|
+
|
|
26
|
+
case "anthropic":
|
|
27
|
+
url = "https://api.anthropic.com/v1/messages";
|
|
28
|
+
headers = {
|
|
29
|
+
"Content-Type": "application/json",
|
|
30
|
+
"x-api-key": config.anthropicApiKey,
|
|
31
|
+
"anthropic-version": "2023-06-01",
|
|
32
|
+
};
|
|
33
|
+
// Anthropic messages format is slightly different (system is a separate field)
|
|
34
|
+
const systemMessage = messages.find(m => m.role === "system")?.content;
|
|
35
|
+
const userMessages = messages.filter(m => m.role !== "system");
|
|
36
|
+
body = {
|
|
37
|
+
model: config.anthropicModel || "claude-3-5-sonnet-20240620",
|
|
38
|
+
system: systemMessage,
|
|
39
|
+
messages: userMessages,
|
|
40
|
+
max_tokens,
|
|
41
|
+
temperature,
|
|
42
|
+
};
|
|
43
|
+
break;
|
|
44
|
+
|
|
45
|
+
case "ollama":
|
|
46
|
+
url = `${config.ollamaUrl || "http://localhost:11434"}/api/chat`;
|
|
47
|
+
headers = { "Content-Type": "application/json" };
|
|
48
|
+
body = {
|
|
49
|
+
model: config.ollamaModel || "llama3",
|
|
50
|
+
messages,
|
|
51
|
+
temperature,
|
|
52
|
+
stream: false,
|
|
53
|
+
};
|
|
54
|
+
break;
|
|
55
|
+
|
|
56
|
+
case "lmstudio":
|
|
57
|
+
default:
|
|
58
|
+
url = `${config.lmStudioUrl || "http://localhost:1234"}/v1/chat/completions`;
|
|
59
|
+
headers = { "Content-Type": "application/json" };
|
|
60
|
+
body = {
|
|
61
|
+
model: config.model,
|
|
62
|
+
messages,
|
|
63
|
+
temperature,
|
|
64
|
+
max_tokens,
|
|
65
|
+
stream: false,
|
|
66
|
+
};
|
|
67
|
+
break;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
try {
|
|
71
|
+
const response = await axios.post(url, body, { headers, timeout: 30000 });
|
|
72
|
+
|
|
73
|
+
let content = "";
|
|
74
|
+
if (provider === "anthropic") {
|
|
75
|
+
content = response.data.content[0].text;
|
|
76
|
+
} else if (provider === "ollama") {
|
|
77
|
+
content = response.data.message.content;
|
|
78
|
+
} else {
|
|
79
|
+
content = response.data.choices[0].message.content;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
return content.trim();
|
|
83
|
+
} catch (error) {
|
|
84
|
+
if (error.response) {
|
|
85
|
+
throw new Error(`AI Provider Error (${provider}): ${error.response.status} - ${JSON.stringify(error.response.data)}`);
|
|
86
|
+
}
|
|
87
|
+
throw error;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Generate commit message using the configured AI provider
|
|
93
|
+
*/
|
|
94
|
+
async function generateCommitMessage(diff, filesList) {
|
|
95
|
+
const config = getConfig();
|
|
96
|
+
const instructionText = config.aiInstruction
|
|
97
|
+
? `\nIMPORTANT USER INSTRUCTION: ${config.aiInstruction}\n`
|
|
98
|
+
: "";
|
|
99
|
+
|
|
100
|
+
const prompt = `You are a Git commit message generator. Based on the following changes${config.aiInstruction ? " and the user instruction" : ""}, create a short, descriptive commit message in Conventional Commits format.
|
|
101
|
+
${instructionText}
|
|
102
|
+
Conventional Commits format:
|
|
103
|
+
- feat: A new feature
|
|
104
|
+
- fix: A bug fix
|
|
105
|
+
- docs: Documentation only changes
|
|
106
|
+
- style: Changes that do not affect the code (whitespace, formatting, missing semicolons, etc.)
|
|
107
|
+
- refactor: Code change that neither fixes a bug nor adds a feature
|
|
108
|
+
- perf: Code change that improves performance
|
|
109
|
+
- test: Adding missing tests or correcting existing tests
|
|
110
|
+
- chore: Changes to the build process or auxiliary tools
|
|
111
|
+
|
|
112
|
+
Changed files:
|
|
113
|
+
${filesList.join("\n")}
|
|
114
|
+
|
|
115
|
+
Diff:
|
|
116
|
+
${diff.substring(0, 3000)}
|
|
117
|
+
|
|
118
|
+
Write only the commit message, do not add any other explanation. The message should be in English and should not exceed 72 characters.`;
|
|
119
|
+
|
|
120
|
+
const messages = [
|
|
121
|
+
{
|
|
122
|
+
role: "system",
|
|
123
|
+
content: "You are a helpful assistant that generates concise and meaningful Git commit messages following Conventional Commits specification.",
|
|
124
|
+
},
|
|
125
|
+
{
|
|
126
|
+
role: "user",
|
|
127
|
+
content: prompt,
|
|
128
|
+
},
|
|
129
|
+
];
|
|
130
|
+
|
|
131
|
+
let message = await callProvider(config.aiProvider, messages, 0.3, 100);
|
|
132
|
+
|
|
133
|
+
// Clean up the message
|
|
134
|
+
message = message.replace(/^["']|["']$/g, "");
|
|
135
|
+
message = message.split("\n")[0]; // Take only first line
|
|
136
|
+
return message;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
/**
|
|
140
|
+
* Generate multiple commit message suggestions
|
|
141
|
+
*/
|
|
142
|
+
async function generateCommitSuggestions(diff, filesList, count = 3, instruction = null) {
|
|
143
|
+
const config = getConfig();
|
|
144
|
+
const activeInstruction = instruction || config.aiInstruction;
|
|
145
|
+
|
|
146
|
+
let instructionText = "";
|
|
147
|
+
if (activeInstruction) {
|
|
148
|
+
instructionText = `\nIMPORTANT USER INSTRUCTION: ${activeInstruction}\n`;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
const prompt = `You are a Git commit message generator. Based on the following changes${activeInstruction ? " and the user instruction" : ""}, suggest ${count} different commit messages. Each should be in Conventional Commits format.
|
|
152
|
+
${instructionText}
|
|
153
|
+
Changed files:
|
|
154
|
+
${filesList.join("\n")}
|
|
155
|
+
|
|
156
|
+
Diff:
|
|
157
|
+
${diff.substring(0, 3000)}
|
|
158
|
+
|
|
159
|
+
Write ${count} different commit messages, each on a new line. Write only the messages, do not add numbers or explanations.`;
|
|
160
|
+
|
|
161
|
+
const messages = [
|
|
162
|
+
{
|
|
163
|
+
role: "system",
|
|
164
|
+
content: "You are a helpful assistant that generates concise and meaningful Git commit messages.",
|
|
165
|
+
},
|
|
166
|
+
{
|
|
167
|
+
role: "user",
|
|
168
|
+
content: prompt,
|
|
169
|
+
},
|
|
170
|
+
];
|
|
171
|
+
|
|
172
|
+
try {
|
|
173
|
+
const content = await callProvider(config.aiProvider, messages, 0.7, 200);
|
|
174
|
+
|
|
175
|
+
// Clean backtick blocks
|
|
176
|
+
let cleanedContent = content.replace(/```[\s\S]*?```/g, "");
|
|
177
|
+
cleanedContent = cleanedContent.replace(/`/g, "");
|
|
178
|
+
|
|
179
|
+
const suggestions = cleanedContent
|
|
180
|
+
.split("\n")
|
|
181
|
+
.map((line) => {
|
|
182
|
+
let cleaned = line
|
|
183
|
+
.replace(/^\d+[\.)\-:]\s*/, "") // Remove numbers
|
|
184
|
+
.replace(/^[-*]\s*/, "") // Remove list markers
|
|
185
|
+
.replace(/^["']|["']$/g, "") // Remove quotes
|
|
186
|
+
.trim();
|
|
187
|
+
return cleaned;
|
|
188
|
+
})
|
|
189
|
+
.filter((line) => line.length > 5 && !line.startsWith("```"))
|
|
190
|
+
.slice(0, count);
|
|
191
|
+
|
|
192
|
+
if (suggestions.length === 0) {
|
|
193
|
+
return ["chore: update files", "refactor: improve code", "feat: add changes"];
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
return suggestions;
|
|
197
|
+
} catch (error) {
|
|
198
|
+
return ["chore: update files", "refactor: improve code", "feat: add changes"];
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
/**
|
|
203
|
+
* Check if the configured AI provider is available
|
|
204
|
+
*/
|
|
205
|
+
async function checkAIConnection() {
|
|
206
|
+
const config = getConfig();
|
|
207
|
+
const provider = config.aiProvider || "lmstudio";
|
|
208
|
+
|
|
209
|
+
try {
|
|
210
|
+
if (provider === "lmstudio") {
|
|
211
|
+
const response = await axios.get(`${config.lmStudioUrl}/v1/models`, { timeout: 5000 });
|
|
212
|
+
return { connected: true, models: response.data?.data || [] };
|
|
213
|
+
} else if (provider === "ollama") {
|
|
214
|
+
const response = await axios.get(`${config.ollamaUrl}/api/tags`, { timeout: 5000 });
|
|
215
|
+
return { connected: true, models: response.data?.models || [] };
|
|
216
|
+
} else if (provider === "openai") {
|
|
217
|
+
if (!config.openaiApiKey) return { connected: false, error: "OpenAI API Key is missing" };
|
|
218
|
+
// Simple check by listing models
|
|
219
|
+
const response = await axios.get("https://api.openai.com/v1/models", {
|
|
220
|
+
headers: { "Authorization": `Bearer ${config.openaiApiKey}` },
|
|
221
|
+
timeout: 5000
|
|
222
|
+
});
|
|
223
|
+
return { connected: true, models: response.data?.data || [] };
|
|
224
|
+
} else if (provider === "anthropic") {
|
|
225
|
+
if (!config.anthropicApiKey) return { connected: false, error: "Anthropic API Key is missing" };
|
|
226
|
+
return { connected: true, note: "Anthropic connection assumed (listing models not supported via simple GET)" };
|
|
227
|
+
}
|
|
228
|
+
return { connected: true };
|
|
229
|
+
} catch (error) {
|
|
230
|
+
return { connected: false, error: error.message };
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
module.exports = {
|
|
235
|
+
generateCommitMessage,
|
|
236
|
+
generateCommitSuggestions,
|
|
237
|
+
checkAIConnection,
|
|
238
|
+
// Alias for backward compatibility
|
|
239
|
+
checkLMStudioConnection: checkAIConnection,
|
|
240
|
+
};
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
const fs = require("fs");
|
|
2
|
+
const path = require("path");
|
|
3
|
+
const os = require("os");
|
|
4
|
+
|
|
5
|
+
const CONFIG_DIR = path.join(os.homedir(), ".eckra");
|
|
6
|
+
const CONFIG_FILE = path.join(CONFIG_DIR, "config.json");
|
|
7
|
+
const LOCAL_CONFIG_FILENAME = ".eckrarc";
|
|
8
|
+
|
|
9
|
+
const DEFAULT_CONFIG = {
|
|
10
|
+
aiProvider: "lmstudio",
|
|
11
|
+
lmStudioUrl: "http://localhost:1234",
|
|
12
|
+
openaiApiKey: "",
|
|
13
|
+
openaiModel: "gpt-4o",
|
|
14
|
+
anthropicApiKey: "",
|
|
15
|
+
anthropicModel: "claude-3-5-sonnet-20240620",
|
|
16
|
+
ollamaUrl: "http://localhost:11434",
|
|
17
|
+
ollamaModel: "llama3",
|
|
18
|
+
model: "git-commit-message/unsloth.Q4_K_M.gguf",
|
|
19
|
+
language: "en",
|
|
20
|
+
autoStage: false,
|
|
21
|
+
autoPush: false,
|
|
22
|
+
commitPrefix: true,
|
|
23
|
+
aiInstruction: "Use concise, present tense, and descriptive language. Focus on the 'why' of the changes.",
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Ensure config directory exists
|
|
28
|
+
*/
|
|
29
|
+
function ensureConfigDir() {
|
|
30
|
+
if (!fs.existsSync(CONFIG_DIR)) {
|
|
31
|
+
fs.mkdirSync(CONFIG_DIR, { recursive: true });
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Find local configuration file in current or parent directories
|
|
37
|
+
*/
|
|
38
|
+
function findLocalConfig(startDir = process.cwd()) {
|
|
39
|
+
let currentDir = startDir;
|
|
40
|
+
while (currentDir !== path.parse(currentDir).root) {
|
|
41
|
+
const localPath = path.join(currentDir, LOCAL_CONFIG_FILENAME);
|
|
42
|
+
if (fs.existsSync(localPath)) {
|
|
43
|
+
return localPath;
|
|
44
|
+
}
|
|
45
|
+
currentDir = path.dirname(currentDir);
|
|
46
|
+
}
|
|
47
|
+
// Check root
|
|
48
|
+
const rootLocalPath = path.join(currentDir, LOCAL_CONFIG_FILENAME);
|
|
49
|
+
if (fs.existsSync(rootLocalPath)) {
|
|
50
|
+
return rootLocalPath;
|
|
51
|
+
}
|
|
52
|
+
return null;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Get current configuration
|
|
57
|
+
*/
|
|
58
|
+
function getConfig() {
|
|
59
|
+
ensureConfigDir();
|
|
60
|
+
|
|
61
|
+
let config = { ...DEFAULT_CONFIG };
|
|
62
|
+
|
|
63
|
+
// 1. Global config
|
|
64
|
+
if (fs.existsSync(CONFIG_FILE)) {
|
|
65
|
+
try {
|
|
66
|
+
const globalData = fs.readFileSync(CONFIG_FILE, "utf8");
|
|
67
|
+
config = { ...config, ...JSON.parse(globalData) };
|
|
68
|
+
} catch (error) {
|
|
69
|
+
// Silently ignore errors
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// 2. Local config (overrides global)
|
|
74
|
+
const localConfigPath = findLocalConfig();
|
|
75
|
+
if (localConfigPath) {
|
|
76
|
+
try {
|
|
77
|
+
const localData = fs.readFileSync(localConfigPath, "utf8");
|
|
78
|
+
config = { ...config, ...JSON.parse(localData) };
|
|
79
|
+
} catch (error) {
|
|
80
|
+
// Silently ignore errors
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
return config;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Save configuration
|
|
89
|
+
*/
|
|
90
|
+
function saveConfig(config) {
|
|
91
|
+
ensureConfigDir();
|
|
92
|
+
|
|
93
|
+
const currentConfig = getConfig();
|
|
94
|
+
const newConfig = { ...currentConfig, ...config };
|
|
95
|
+
|
|
96
|
+
fs.writeFileSync(CONFIG_FILE, JSON.stringify(newConfig, null, 2));
|
|
97
|
+
return newConfig;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Reset configuration to defaults
|
|
102
|
+
*/
|
|
103
|
+
function resetConfig() {
|
|
104
|
+
ensureConfigDir();
|
|
105
|
+
fs.writeFileSync(CONFIG_FILE, JSON.stringify(DEFAULT_CONFIG, null, 2));
|
|
106
|
+
return DEFAULT_CONFIG;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
/**
|
|
110
|
+
* Get config file path
|
|
111
|
+
*/
|
|
112
|
+
function getConfigPath() {
|
|
113
|
+
return CONFIG_FILE;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
module.exports = {
|
|
117
|
+
getConfig,
|
|
118
|
+
saveConfig,
|
|
119
|
+
resetConfig,
|
|
120
|
+
getConfigPath,
|
|
121
|
+
DEFAULT_CONFIG,
|
|
122
|
+
};
|