haitask 0.3.5 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -72,6 +72,22 @@ For machine-readable output: `haitask run --dry --json`.
72
72
  | `haitask run --commits N` | Combine the last N commits into one task (e.g. `--commits 3`) |
73
73
  | `haitask run --type <type>` | (Jira only) Override issue type for this run (Task, Bug, Story) |
74
74
  | `haitask run --status <status>` | (Jira only) Override status after create (Done, "To Do", etc.) |
75
+ | `haitask run --lang <lang>` | Language for task title and description (en, az, tr, ru). Default: en |
76
+
77
+ ---
78
+
79
+ ## Language Support
80
+
81
+ By default, task titles and descriptions are generated in English. Use `--lang` to generate them in a different language:
82
+
83
+ ```bash
84
+ haitask run --lang az # Azerbaijani
85
+ haitask run --lang tr # Turkish
86
+ haitask run --lang ru # Russian
87
+ haitask run --lang en # English (default)
88
+ ```
89
+
90
+ Supported languages: `en` (English), `az` (Azerbaijani), `tr` (Turkish), `ru` (Russian).
75
91
 
76
92
  ---
77
93
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "haitask",
3
- "version": "0.3.5",
3
+ "version": "0.4.0",
4
4
  "description": "HAITASK \u2014 AI-powered task creation from Git commits. Creates issues in Jira, Trello, or Linear from your latest commit message and branch.",
5
5
  "type": "module",
6
6
  "main": "src/index.js",
@@ -12,16 +12,18 @@ const DEEPSEEK_API_URL = 'https://api.deepseek.com/v1/chat/completions';
12
12
  * Call Deepseek and return task payload for the configured target.
13
13
  * @param {{ message: string, branch: string, repoName: string }} commitData
14
14
  * @param {{ ai: { model?: string } }} config
15
+ * @param {{ lang?: string }} options
15
16
  * @returns {Promise<{ title: string, description: string, labels: string[] }>}
16
17
  */
17
- export async function generateDeepseek(commitData, config) {
18
+ export async function generateDeepseek(commitData, config, options = {}) {
18
19
  const apiKey = process.env.DEEPSEEK_API_KEY;
19
20
  if (!apiKey?.trim()) {
20
21
  throw new Error('DEEPSEEK_API_KEY is not set. Add it to .env. Get free key at https://platform.deepseek.com/');
21
22
  }
22
23
 
23
24
  const model = config?.ai?.model || 'deepseek-chat';
24
- const { system, user } = buildPrompt(commitData, config?.target);
25
+ const { lang } = options || {};
26
+ const { system, user } = buildPrompt(commitData, config?.target, { lang });
25
27
 
26
28
  const response = await fetch(DEEPSEEK_API_URL, {
27
29
  method: 'POST',
package/src/ai/groq.js CHANGED
@@ -12,9 +12,10 @@ const GROQ_API_URL = 'https://api.groq.com/openai/v1/chat/completions';
12
12
  * Call Groq and return task payload for the configured target.
13
13
  * @param {{ message: string, branch: string, repoName: string }} commitData
14
14
  * @param {{ ai: { model?: string } }} config
15
+ * @param {{ lang?: string }} options
15
16
  * @returns {Promise<{ title: string, description: string, labels: string[] }>}
16
17
  */
17
- export async function generateGroq(commitData, config) {
18
+ export async function generateGroq(commitData, config, options = {}) {
18
19
  const apiKey = process.env.GROQ_API_KEY;
19
20
  if (!apiKey?.trim()) {
20
21
  throw new Error(
@@ -23,7 +24,8 @@ export async function generateGroq(commitData, config) {
23
24
  }
24
25
 
25
26
  const model = config?.ai?.model || 'llama-3.1-8b-instant';
26
- const { system, user } = buildPrompt(commitData, config?.target);
27
+ const { lang } = options || {};
28
+ const { system, user } = buildPrompt(commitData, config?.target, { lang });
27
29
 
28
30
  const response = await fetch(GROQ_API_URL, {
29
31
  method: 'POST',
package/src/ai/index.js CHANGED
@@ -16,18 +16,20 @@ const PROVIDERS = ['openai', 'deepseek', 'groq'];
16
16
  * Main entry: generate task payload based on provider in config.
17
17
  * @param {{ message: string, branch: string, repoName: string }} commitData
18
18
  * @param {{ ai: { provider?: string, model?: string } }} config
19
+ * @param {{ lang?: string }} options
19
20
  * @returns {Promise<{ title: string, description: string, labels: string[] }>}
20
21
  */
21
- export async function generateTaskPayload(commitData, config) {
22
+ export async function generateTaskPayload(commitData, config, options = {}) {
22
23
  const provider = (config?.ai?.provider || 'groq').toLowerCase();
24
+ const { lang } = options || {};
23
25
 
24
26
  switch (provider) {
25
27
  case 'openai':
26
- return generateOpenAI(commitData, config);
28
+ return generateOpenAI(commitData, config, { lang });
27
29
  case 'deepseek':
28
- return generateDeepseek(commitData, config);
30
+ return generateDeepseek(commitData, config, { lang });
29
31
  case 'groq':
30
- return generateGroq(commitData, config);
32
+ return generateGroq(commitData, config, { lang });
31
33
  default:
32
34
  throw new Error(`Unknown AI provider: ${provider}. Supported: ${PROVIDERS.join(', ')}`);
33
35
  }
package/src/ai/openai.js CHANGED
@@ -10,16 +10,18 @@ const OPENAI_API_URL = 'https://api.openai.com/v1/chat/completions';
10
10
  * Call OpenAI and return task payload for the configured target.
11
11
  * @param {{ message: string, branch: string, repoName: string }} commitData
12
12
  * @param {{ ai: { model?: string } }} config
13
+ * @param {{ lang?: string }} options
13
14
  * @returns {Promise<{ title: string, description: string, labels: string[] }>}
14
15
  */
15
- export async function generateOpenAI(commitData, config) {
16
+ export async function generateOpenAI(commitData, config, options = {}) {
16
17
  const apiKey = process.env.OPENAI_API_KEY;
17
18
  if (!apiKey?.trim()) {
18
19
  throw new Error('OPENAI_API_KEY is not set. Add it to .env.');
19
20
  }
20
21
 
21
22
  const model = config?.ai?.model || 'gpt-4o-mini';
22
- const { system, user } = buildPrompt(commitData, config?.target);
23
+ const { lang } = options || {};
24
+ const { system, user } = buildPrompt(commitData, config?.target, { lang });
23
25
 
24
26
  const response = await fetch(OPENAI_API_URL, {
25
27
  method: 'POST',
package/src/ai/utils.js CHANGED
@@ -4,10 +4,38 @@
4
4
 
5
5
  const CONVENTIONAL_PREFIXES = /^(feat|fix|chore|docs|style|refactor|test|build|ci):\s*/i;
6
6
 
7
+ const LANG_ALIASES = {
8
+ en: 'en',
9
+ english: 'en',
10
+ az: 'az',
11
+ azerbaijani: 'az',
12
+ 'az-az': 'az',
13
+ tr: 'tr',
14
+ turkish: 'tr',
15
+ 'tr-tr': 'tr',
16
+ ru: 'ru',
17
+ russian: 'ru',
18
+ 'ru-ru': 'ru',
19
+ };
20
+
21
+ const LANG_INSTRUCTIONS = {
22
+ en: 'Write the title and description in English.',
23
+ az: 'Write the title and description in Azerbaijani (Azərbaycan) language.',
24
+ tr: 'Write the title and description in Turkish language.',
25
+ ru: 'Write the title and description in Russian language.',
26
+ };
27
+
28
+ function normalizeLang(lang) {
29
+ if (!lang) return 'en';
30
+ const normalized = lang.toLowerCase().trim();
31
+ return LANG_ALIASES[normalized] || 'en';
32
+ }
33
+
7
34
  /**
8
35
  * Build system + user prompt from commit data.
9
36
  * @param {{ message: string, branch: string, repoName: string }} commitData
10
37
  * @param {string} [target='jira']
38
+ * @param {{ lang?: string }} options
11
39
  * @returns {{ system: string, user: string }}
12
40
  */
13
41
  const BATCH_SEP = '\n\n---\n\n';
@@ -18,7 +46,7 @@ const TARGET_META = {
18
46
  linear: { displayName: 'Linear', workItem: 'issue', priorityField: true },
19
47
  };
20
48
 
21
- export function buildPrompt(commitData, target = 'jira') {
49
+ export function buildPrompt(commitData, target = 'jira', options = {}) {
22
50
  const { message, branch, repoName } = commitData;
23
51
  const isBatch = message.includes(BATCH_SEP);
24
52
  const normalizedTarget = (target || 'jira').toLowerCase();
@@ -29,8 +57,13 @@ export function buildPrompt(commitData, target = 'jira') {
29
57
  const priorityRule = meta.priorityField
30
58
  ? '- "priority": One of: "Highest", "High", "Medium", "Low", "Lowest". Infer from commit message (e.g. "urgent", "critical", "hotfix" → High; "minor", "tweak" → Low; unclear → "Medium"). Default to "Medium" if unsure.'
31
59
  : '- "priority": Still provide one of "Highest", "High", "Medium", "Low", "Lowest". Some targets may not have a native priority field; it can be used for description context.';
60
+
61
+ const lang = normalizeLang(options?.lang);
62
+ const langInstruction = LANG_INSTRUCTIONS[lang] || LANG_INSTRUCTIONS.en;
63
+
32
64
  const system = `You generate a ${meta.displayName} ${meta.workItem} from a Git commit. Reply with a single JSON object only, no markdown or extra text.
33
- ${batchHint}Keys:
65
+ ${batchHint}${langInstruction}
66
+ Keys:
34
67
  - "title": Short, formal ${meta.displayName} ${meta.workItem} summary (professional wording). Do NOT copy the commit message verbatim. Rewrite as a clear, formal title. Do NOT include prefixes like feat:, fix:, chore: in the title.
35
68
  - "description": Detailed description in plain language, suitable for ${meta.displayName}. Expand and formalize the intent of the commit; do not just paste the commit message.
36
69
  - "labels": Array of strings, e.g. ["auto", "commit"].
@@ -88,6 +88,7 @@ export async function runRun(options = {}) {
88
88
  const type = options.type?.trim() || undefined;
89
89
  const status = options.status?.trim() || undefined;
90
90
  const commits = Number(options.commits ?? 1);
91
+ const lang = options.lang?.trim() || undefined;
91
92
 
92
93
  let config;
93
94
  try {
@@ -98,7 +99,7 @@ export async function runRun(options = {}) {
98
99
  }
99
100
 
100
101
  try {
101
- const result = await runPipeline(config, { dry, issueType: type, transitionToStatus: status, commits });
102
+ const result = await runPipeline(config, { dry, issueType: type, transitionToStatus: status, commits, lang });
102
103
 
103
104
  if (!result.ok) {
104
105
  printFailure(json, result.error || 'Pipeline failed.');
@@ -51,11 +51,11 @@ export function validateRules(commitData, config) {
51
51
  /**
52
52
  * Run full pipeline: Git → validate → AI → target (unless dry).
53
53
  * @param {object} config - Loaded .haitaskrc
54
- * @param {{ dry?: boolean, issueType?: string, transitionToStatus?: string, commits?: number }} options
54
+ * @param {{ dry?: boolean, issueType?: string, transitionToStatus?: string, commits?: number, lang?: string }} options
55
55
  * @returns {Promise<{ ok: boolean, dry?: boolean, key?: string, url?: string, payload?: object, commitData?: object, error?: string }>}
56
56
  */
57
57
  export async function runPipeline(config, options = {}) {
58
- const { dry = false, issueType: typeOverride, transitionToStatus: statusOverride, commits: commitsOpt } = options;
58
+ const { dry = false, issueType: typeOverride, transitionToStatus: statusOverride, commits: commitsOpt, lang } = options;
59
59
  const numCommits = Math.max(1, Number(commitsOpt) || 1);
60
60
 
61
61
  const commitData = numCommits > 1 ? await getLatestCommitsData(numCommits) : await getLatestCommitData();
@@ -71,7 +71,7 @@ export async function runPipeline(config, options = {}) {
71
71
  return { ok: true, commented: true, key, url, commitData };
72
72
  }
73
73
 
74
- const payload = await generateTaskPayload(commitData, config);
74
+ const payload = await generateTaskPayload(commitData, config, { lang });
75
75
 
76
76
  if (dry) {
77
77
  return { ok: true, dry: true, payload, commitData };
package/src/index.js CHANGED
@@ -37,6 +37,7 @@ program
37
37
  .option('-c, --commits <n>', 'Number of commits to combine into one task (default: 1)', '1')
38
38
  .option('-t, --type <type>', 'Jira issue type for this run (e.g. Task, Bug, Story). Overrides .haitaskrc jira.issueType')
39
39
  .option('-s, --status <status>', 'Jira transition-to status after create (e.g. Done, "To Do"). Overrides .haitaskrc jira.transitionToStatus')
40
+ .option('-l, --lang <language>', 'Language for task title and description (en, az, tr, ru). Default: en')
40
41
  .action((opts) => runRun(opts));
41
42
 
42
43
  program.parse();