@stephansama/ai-commit-msg 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Stephan Randle
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,43 @@
1
+ # @stephansama/ai-commit-msg
2
+
3
+ [![Source code](https://img.shields.io/badge/Source-666666?style=flat&logo=github&label=Github&labelColor=211F1F)](https://github.com/stephansama/packages/tree/main/core/ai-commit-msg)
4
+ [![Documentation](https://img.shields.io/badge/Documentation-211F1F?style=flat&logo=Wikibooks&labelColor=211F1F)](https://packages.stephansama.info/api/@stephansama/ai-commit-msg)
5
+ [![NPM Version](https://img.shields.io/npm/v/%40stephansama%2Fai-commit-msg?logo=npm&logoColor=red&color=211F1F&labelColor=211F1F)](https://www.npmjs.com/package/@stephansama/ai-commit-msg)
6
+ [![npm downloads](https://img.shields.io/npm/dw/@stephansama/ai-commit-msg?labelColor=211F1F)](https://www.npmjs.com/package/@stephansama/ai-commit-msg)
7
+
8
+ generate commit messages using ai
9
+
10
+ ##### Table of contents
11
+
12
+ <details><summary>Open Table of contents</summary>
13
+
14
+ - [Installation](#installation)
15
+ - [Usage](#usage)
16
+ - [Husky](#husky)
17
+
18
+ </details>
19
+
20
+ ## Installation
21
+
22
+ ```sh
23
+ pnpm install @stephansama/ai-commit-msg
24
+ ```
25
+
26
+ ## Usage
27
+
28
+ ### Husky
29
+
30
+ 1. Install and initialize husky
31
+
32
+ ```sh
33
+ npm install --save-dev husky && npx husky init
34
+ ```
35
+
36
+ 2. create the `prepare-commit-msg` hook by creating a file located at
37
+ `.husky/prepare-commit-msg`
38
+
39
+ ```sh
40
+ #!/bin/sh
41
+
42
+ ai-commit-msg -o "$1"
43
+ ```
package/cli.mjs ADDED
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env node
2
+
3
+ "use strict";
4
+
5
+ import("./dist/index.js").then((mod) => mod.run());
@@ -0,0 +1,63 @@
1
+ //#region rolldown:runtime
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __copyProps = (to, from, except, desc) => {
9
+ if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
10
+ key = keys[i];
11
+ if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
12
+ get: ((k) => from[k]).bind(null, key),
13
+ enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
14
+ });
15
+ }
16
+ return to;
17
+ };
18
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
19
+ value: mod,
20
+ enumerable: true
21
+ }) : target, mod));
22
+
23
+ //#endregion
24
+ let zod = require("zod");
25
+ zod = __toESM(zod);
26
+
27
+ //#region src/schema.ts
28
+ const defaultPrompt = `generate a conventional commit message based on the following diff. the subject should be all lowercase, and lines should not exceed 100 characters \n\n{{diff}}`;
29
+ const models = ["gemini-2.5-flash"];
30
+ const providers = [
31
+ "google",
32
+ "openai",
33
+ "ollama"
34
+ ];
35
+ const envSchema = {
36
+ google: zod.object({ GOOGLE_GENERATIVE_AI_API_KEY: zod.string().min(1) }),
37
+ ollama: zod.object({}),
38
+ openai: zod.object({ OPENAI_API_KEY: zod.string().min(1) })
39
+ };
40
+ const providerSchema = zod.enum(providers);
41
+ const configSchema = zod.object({
42
+ baseURL: zod.string().optional(),
43
+ headers: zod.record(zod.string(), zod.string()).optional(),
44
+ model: zod.string().meta({ description: "model to use from provider" }),
45
+ prompt: zod.string().default(defaultPrompt).meta({ description: "prompt used to fuel generated commit" }),
46
+ provider: providerSchema,
47
+ skipNextRun: zod.boolean().optional().meta({ description: "skip the next git hook invocation (usually used when manually running cli)" }),
48
+ useConventionalCommits: zod.boolean().default(true),
49
+ verbose: zod.union([
50
+ zod.literal(0),
51
+ zod.literal(1),
52
+ zod.literal(2),
53
+ zod.literal(3)
54
+ ]).default(0)
55
+ });
56
+
57
+ //#endregion
58
+ exports.configSchema = configSchema;
59
+ exports.defaultPrompt = defaultPrompt;
60
+ exports.envSchema = envSchema;
61
+ exports.models = models;
62
+ exports.providerSchema = providerSchema;
63
+ exports.providers = providers;
@@ -0,0 +1,39 @@
1
+ import * as z from "zod";
2
+
3
+ //#region src/schema.d.ts
4
+ declare const defaultPrompt = "generate a conventional commit message based on the following diff. the subject should be all lowercase, and lines should not exceed 100 characters \n\n{{diff}}";
5
+ declare const models: readonly ["gemini-2.5-flash"];
6
+ type Model = (typeof models)[number];
7
+ declare const providers: readonly ["google", "openai", "ollama"];
8
+ type Provider = (typeof providers)[number];
9
+ declare const envSchema: {
10
+ google: z.ZodObject<{
11
+ GOOGLE_GENERATIVE_AI_API_KEY: z.ZodString;
12
+ }, z.core.$strip>;
13
+ ollama: z.ZodObject<{}, z.core.$strip>;
14
+ openai: z.ZodObject<{
15
+ OPENAI_API_KEY: z.ZodString;
16
+ }, z.core.$strip>;
17
+ };
18
+ declare const providerSchema: z.ZodEnum<{
19
+ google: "google";
20
+ openai: "openai";
21
+ ollama: "ollama";
22
+ }>;
23
+ declare const configSchema: z.ZodObject<{
24
+ baseURL: z.ZodOptional<z.ZodString>;
25
+ headers: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodString>>;
26
+ model: z.ZodString;
27
+ prompt: z.ZodDefault<z.ZodString>;
28
+ provider: z.ZodEnum<{
29
+ google: "google";
30
+ openai: "openai";
31
+ ollama: "ollama";
32
+ }>;
33
+ skipNextRun: z.ZodOptional<z.ZodBoolean>;
34
+ useConventionalCommits: z.ZodDefault<z.ZodBoolean>;
35
+ verbose: z.ZodDefault<z.ZodUnion<readonly [z.ZodLiteral<0>, z.ZodLiteral<1>, z.ZodLiteral<2>, z.ZodLiteral<3>]>>;
36
+ }, z.core.$strip>;
37
+ type Config = Partial<z.infer<typeof configSchema>>;
38
+ //#endregion
39
+ export { Config, Model, Provider, configSchema, defaultPrompt, envSchema, models, providerSchema, providers };
@@ -0,0 +1,39 @@
1
+ import * as z from "zod";
2
+
3
+ //#region src/schema.d.ts
4
+ declare const defaultPrompt = "generate a conventional commit message based on the following diff. the subject should be all lowercase, and lines should not exceed 100 characters \n\n{{diff}}";
5
+ declare const models: readonly ["gemini-2.5-flash"];
6
+ type Model = (typeof models)[number];
7
+ declare const providers: readonly ["google", "openai", "ollama"];
8
+ type Provider = (typeof providers)[number];
9
+ declare const envSchema: {
10
+ google: z.ZodObject<{
11
+ GOOGLE_GENERATIVE_AI_API_KEY: z.ZodString;
12
+ }, z.core.$strip>;
13
+ ollama: z.ZodObject<{}, z.core.$strip>;
14
+ openai: z.ZodObject<{
15
+ OPENAI_API_KEY: z.ZodString;
16
+ }, z.core.$strip>;
17
+ };
18
+ declare const providerSchema: z.ZodEnum<{
19
+ google: "google";
20
+ openai: "openai";
21
+ ollama: "ollama";
22
+ }>;
23
+ declare const configSchema: z.ZodObject<{
24
+ baseURL: z.ZodOptional<z.ZodString>;
25
+ headers: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodString>>;
26
+ model: z.ZodString;
27
+ prompt: z.ZodDefault<z.ZodString>;
28
+ provider: z.ZodEnum<{
29
+ google: "google";
30
+ openai: "openai";
31
+ ollama: "ollama";
32
+ }>;
33
+ skipNextRun: z.ZodOptional<z.ZodBoolean>;
34
+ useConventionalCommits: z.ZodDefault<z.ZodBoolean>;
35
+ verbose: z.ZodDefault<z.ZodUnion<readonly [z.ZodLiteral<0>, z.ZodLiteral<1>, z.ZodLiteral<2>, z.ZodLiteral<3>]>>;
36
+ }, z.core.$strip>;
37
+ type Config = Partial<z.infer<typeof configSchema>>;
38
+ //#endregion
39
+ export { Config, Model, Provider, configSchema, defaultPrompt, envSchema, models, providerSchema, providers };
@@ -0,0 +1,34 @@
1
+ import * as z from "zod";
2
+
3
+ //#region src/schema.ts
4
+ const defaultPrompt = `generate a conventional commit message based on the following diff. the subject should be all lowercase, and lines should not exceed 100 characters \n\n{{diff}}`;
5
+ const models = ["gemini-2.5-flash"];
6
+ const providers = [
7
+ "google",
8
+ "openai",
9
+ "ollama"
10
+ ];
11
+ const envSchema = {
12
+ google: z.object({ GOOGLE_GENERATIVE_AI_API_KEY: z.string().min(1) }),
13
+ ollama: z.object({}),
14
+ openai: z.object({ OPENAI_API_KEY: z.string().min(1) })
15
+ };
16
+ const providerSchema = z.enum(providers);
17
+ const configSchema = z.object({
18
+ baseURL: z.string().optional(),
19
+ headers: z.record(z.string(), z.string()).optional(),
20
+ model: z.string().meta({ description: "model to use from provider" }),
21
+ prompt: z.string().default(defaultPrompt).meta({ description: "prompt used to fuel generated commit" }),
22
+ provider: providerSchema,
23
+ skipNextRun: z.boolean().optional().meta({ description: "skip the next git hook invocation (usually used when manually running cli)" }),
24
+ useConventionalCommits: z.boolean().default(true),
25
+ verbose: z.union([
26
+ z.literal(0),
27
+ z.literal(1),
28
+ z.literal(2),
29
+ z.literal(3)
30
+ ]).default(0)
31
+ });
32
+
33
+ //#endregion
34
+ export { configSchema, defaultPrompt, envSchema, models, providerSchema, providers };
@@ -0,0 +1 @@
1
+ {"$schema":"https://json-schema.org/draft/2020-12/schema","type":"object","properties":{"baseURL":{"type":"string"},"headers":{"type":"object","propertyNames":{"type":"string"},"additionalProperties":{"type":"string"}},"model":{"description":"model to use from provider","type":"string"},"prompt":{"description":"prompt used to fuel generated commit","default":"generate a conventional commit message based on the following diff. the subject should be all lowercase, and lines should not exceed 100 characters \n\n{{diff}}","type":"string"},"provider":{"type":"string","enum":["google","openai","ollama"]},"skipNextRun":{"description":"skip the next git hook invocation (usually used when manually running cli)","type":"boolean"},"useConventionalCommits":{"default":true,"type":"boolean"},"verbose":{"default":0,"anyOf":[{"type":"number","const":0},{"type":"number","const":1},{"type":"number","const":2},{"type":"number","const":3}]}},"required":["model","prompt","provider","useConventionalCommits","verbose"],"additionalProperties":false}
package/dist/index.cjs ADDED
@@ -0,0 +1,194 @@
1
+ #!/usr/bin/env node
2
+ //#region rolldown:runtime
3
+ var __create = Object.create;
4
+ var __defProp = Object.defineProperty;
5
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
6
+ var __getOwnPropNames = Object.getOwnPropertyNames;
7
+ var __getProtoOf = Object.getPrototypeOf;
8
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
9
+ var __copyProps = (to, from, except, desc) => {
10
+ if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
11
+ key = keys[i];
12
+ if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
13
+ get: ((k) => from[k]).bind(null, key),
14
+ enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
15
+ });
16
+ }
17
+ return to;
18
+ };
19
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
20
+ value: mod,
21
+ enumerable: true
22
+ }) : target, mod));
23
+
24
+ //#endregion
25
+ let __dotenvx_dotenvx = require("@dotenvx/dotenvx");
26
+ __dotenvx_dotenvx = __toESM(__dotenvx_dotenvx);
27
+ let ai = require("ai");
28
+ ai = __toESM(ai);
29
+ let node_child_process = require("node:child_process");
30
+ node_child_process = __toESM(node_child_process);
31
+ let node_fs_promises = require("node:fs/promises");
32
+ node_fs_promises = __toESM(node_fs_promises);
33
+ let __ai_sdk_google = require("@ai-sdk/google");
34
+ __ai_sdk_google = __toESM(__ai_sdk_google);
35
+ let __ai_sdk_openai = require("@ai-sdk/openai");
36
+ __ai_sdk_openai = __toESM(__ai_sdk_openai);
37
+ let neverthrow = require("neverthrow");
38
+ neverthrow = __toESM(neverthrow);
39
+ let ollama_ai_provider_v2 = require("ollama-ai-provider-v2");
40
+ ollama_ai_provider_v2 = __toESM(ollama_ai_provider_v2);
41
+ let zod = require("zod");
42
+ zod = __toESM(zod);
43
+ let obug = require("obug");
44
+ obug = __toESM(obug);
45
+ let yargs = require("yargs");
46
+ yargs = __toESM(yargs);
47
+ let yargs_helpers = require("yargs/helpers");
48
+ yargs_helpers = __toESM(yargs_helpers);
49
+ let cosmiconfig = require("cosmiconfig");
50
+ cosmiconfig = __toESM(cosmiconfig);
51
+
52
+ //#region src/schema.ts
53
+ const defaultPrompt = `generate a conventional commit message based on the following diff. the subject should be all lowercase, and lines should not exceed 100 characters \n\n{{diff}}`;
54
+ const providers = [
55
+ "google",
56
+ "openai",
57
+ "ollama"
58
+ ];
59
+ const envSchema = {
60
+ google: zod.object({ GOOGLE_GENERATIVE_AI_API_KEY: zod.string().min(1) }),
61
+ ollama: zod.object({}),
62
+ openai: zod.object({ OPENAI_API_KEY: zod.string().min(1) })
63
+ };
64
+ const providerSchema = zod.enum(providers);
65
+ const configSchema = zod.object({
66
+ baseURL: zod.string().optional(),
67
+ headers: zod.record(zod.string(), zod.string()).optional(),
68
+ model: zod.string().meta({ description: "model to use from provider" }),
69
+ prompt: zod.string().default(defaultPrompt).meta({ description: "prompt used to fuel generated commit" }),
70
+ provider: providerSchema,
71
+ skipNextRun: zod.boolean().optional().meta({ description: "skip the next git hook invocation (usually used when manually running cli)" }),
72
+ useConventionalCommits: zod.boolean().default(true),
73
+ verbose: zod.union([
74
+ zod.literal(0),
75
+ zod.literal(1),
76
+ zod.literal(2),
77
+ zod.literal(3)
78
+ ]).default(0)
79
+ });
80
+
81
+ //#endregion
82
+ //#region src/ai.ts
83
+ const providerMap = {
84
+ google: __ai_sdk_google.google,
85
+ ollama: ollama_ai_provider_v2.ollama,
86
+ openai: __ai_sdk_openai.openai
87
+ };
88
+ function getProvider(provider, model) {
89
+ const result = envSchema[provider].safeParse(process.env);
90
+ if (result.error) return (0, neverthrow.err)(new Error(result.error.message));
91
+ const selected = providerMap[provider];
92
+ if (selected) return (0, neverthrow.ok)(selected(model));
93
+ return (0, neverthrow.err)(/* @__PURE__ */ new Error("unable to find message"));
94
+ }
95
+
96
+ //#endregion
97
+ //#region package.json
98
+ var name = "@stephansama/ai-commit-msg";
99
+
100
+ //#endregion
101
+ //#region src/util.ts
102
+ const moduleName = name.split("/").at(-1).replace(/-/g, "");
103
+ const debug = (0, obug.createDebug)(namespace("debug"));
104
+ const info = (0, obug.createDebug)(namespace("info"));
105
+ const warn = (0, obug.createDebug)(namespace("warn"));
106
+ function namespace(space) {
107
+ return [moduleName, space].join(":");
108
+ }
109
+
110
+ //#endregion
111
+ //#region src/args.ts
112
+ const args = {
113
+ config: {
114
+ alias: "c",
115
+ description: "Path to config file",
116
+ type: "string"
117
+ },
118
+ output: {
119
+ alias: "o",
120
+ description: "Output file for commit-msg",
121
+ type: "string"
122
+ },
123
+ verbose: {
124
+ alias: "v",
125
+ description: "Enable verbose logging",
126
+ type: "boolean"
127
+ }
128
+ };
129
+ async function parseArgs() {
130
+ const yargsInstance = (0, yargs.default)((0, yargs_helpers.hideBin)(process.argv)).options(args).help("h").alias("h", "help").epilogue(`--> @stephansama open-source ${(/* @__PURE__ */ new Date()).getFullYear()}`);
131
+ const parsed = await yargsInstance.wrap(yargsInstance.terminalWidth()).parse();
132
+ if (parsed.verbose) (0, obug.enable)(`${moduleName}*`);
133
+ return parsed;
134
+ }
135
+
136
+ //#endregion
137
+ //#region src/config.ts
138
+ const searchPlaces = getSearchPlaces();
139
+ const defaultConfig = {
140
+ model: "llama2",
141
+ provider: "ollama",
142
+ useConventionalCommits: true
143
+ };
144
+ async function loadConfig() {
145
+ const result = await (0, cosmiconfig.cosmiconfig)(moduleName, { searchPlaces }).search();
146
+ return configSchema.parse(result?.config || defaultConfig);
147
+ }
148
+ function getSearchPlaces() {
149
+ return [
150
+ ...(0, cosmiconfig.getDefaultSearchPlaces)(moduleName),
151
+ `.config/.${moduleName}rc.json`,
152
+ `.config/.${moduleName}rc.yaml`,
153
+ `.config/.${moduleName}rc.yml`,
154
+ `.config/.${moduleName}rc`
155
+ ];
156
+ }
157
+
158
+ //#endregion
159
+ //#region src/index.ts
160
+ async function run() {
161
+ __dotenvx_dotenvx.default.config();
162
+ const args$1 = await parseArgs();
163
+ if (!args$1.output) args$1.output = getCommitEditMsgFile();
164
+ const config = await loadConfig();
165
+ if (config.skipNextRun) {
166
+ console.warn("skipNextRun flag supplied skipping current run");
167
+ return process.exit(0);
168
+ }
169
+ const providerResult = getProvider(config.provider, config.model);
170
+ if (providerResult.isErr()) {
171
+ console.error(providerResult.error.message);
172
+ return process.exit(1);
173
+ }
174
+ const model = providerResult.value;
175
+ const diff = getDiff();
176
+ if (!diff) throw new Error("unable to get git diff");
177
+ const { text } = await (0, ai.generateText)({
178
+ model,
179
+ prompt: (config.prompt || defaultPrompt).replace("{{diff}}", diff)
180
+ });
181
+ await node_fs_promises.writeFile(args$1.output, text);
182
+ }
183
+ function getCommitEditMsgFile() {
184
+ const output = node_child_process.execSync(`git rev-parse --git-path COMMIT_EDITMSG`, { encoding: "utf8" });
185
+ if (output) return output.trim();
186
+ throw new Error("unable to find commit edit msg. please use within a git directory or provide the output flag -o");
187
+ }
188
+ function getDiff() {
189
+ const output = node_child_process.execSync(`git --no-pager diff --staged`, { encoding: "utf8" });
190
+ if (output) return output.substring(0, 8e3).trim();
191
+ }
192
+
193
+ //#endregion
194
+ exports.run = run;
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env node
2
+ //#region src/index.d.ts
3
+ declare function run(): Promise<undefined>;
4
+ //#endregion
5
+ export { run };
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env node
2
+ //#region src/index.d.ts
3
+ declare function run(): Promise<undefined>;
4
+ //#endregion
5
+ export { run };
package/dist/index.js ADDED
@@ -0,0 +1,158 @@
1
+ #!/usr/bin/env node
2
+ import dotenvx from "@dotenvx/dotenvx";
3
+ import { generateText } from "ai";
4
+ import * as cp from "node:child_process";
5
+ import * as fsp from "node:fs/promises";
6
+ import { google } from "@ai-sdk/google";
7
+ import { openai } from "@ai-sdk/openai";
8
+ import { err, ok } from "neverthrow";
9
+ import { ollama } from "ollama-ai-provider-v2";
10
+ import * as z from "zod";
11
+ import { createDebug, enable } from "obug";
12
+ import yargs from "yargs";
13
+ import { hideBin } from "yargs/helpers";
14
+ import { cosmiconfig, getDefaultSearchPlaces } from "cosmiconfig";
15
+
16
+ //#region src/schema.ts
17
+ const defaultPrompt = `generate a conventional commit message based on the following diff. the subject should be all lowercase, and lines should not exceed 100 characters \n\n{{diff}}`;
18
+ const providers = [
19
+ "google",
20
+ "openai",
21
+ "ollama"
22
+ ];
23
+ const envSchema = {
24
+ google: z.object({ GOOGLE_GENERATIVE_AI_API_KEY: z.string().min(1) }),
25
+ ollama: z.object({}),
26
+ openai: z.object({ OPENAI_API_KEY: z.string().min(1) })
27
+ };
28
+ const providerSchema = z.enum(providers);
29
+ const configSchema = z.object({
30
+ baseURL: z.string().optional(),
31
+ headers: z.record(z.string(), z.string()).optional(),
32
+ model: z.string().meta({ description: "model to use from provider" }),
33
+ prompt: z.string().default(defaultPrompt).meta({ description: "prompt used to fuel generated commit" }),
34
+ provider: providerSchema,
35
+ skipNextRun: z.boolean().optional().meta({ description: "skip the next git hook invocation (usually used when manually running cli)" }),
36
+ useConventionalCommits: z.boolean().default(true),
37
+ verbose: z.union([
38
+ z.literal(0),
39
+ z.literal(1),
40
+ z.literal(2),
41
+ z.literal(3)
42
+ ]).default(0)
43
+ });
44
+
45
+ //#endregion
46
+ //#region src/ai.ts
47
+ const providerMap = {
48
+ google,
49
+ ollama,
50
+ openai
51
+ };
52
+ function getProvider(provider, model) {
53
+ const result = envSchema[provider].safeParse(process.env);
54
+ if (result.error) return err(new Error(result.error.message));
55
+ const selected = providerMap[provider];
56
+ if (selected) return ok(selected(model));
57
+ return err(/* @__PURE__ */ new Error("unable to find message"));
58
+ }
59
+
60
+ //#endregion
61
+ //#region package.json
62
+ var name = "@stephansama/ai-commit-msg";
63
+
64
+ //#endregion
65
+ //#region src/util.ts
66
+ const moduleName = name.split("/").at(-1).replace(/-/g, "");
67
+ const debug = createDebug(namespace("debug"));
68
+ const info = createDebug(namespace("info"));
69
+ const warn = createDebug(namespace("warn"));
70
+ function namespace(space) {
71
+ return [moduleName, space].join(":");
72
+ }
73
+
74
+ //#endregion
75
+ //#region src/args.ts
76
+ const args = {
77
+ config: {
78
+ alias: "c",
79
+ description: "Path to config file",
80
+ type: "string"
81
+ },
82
+ output: {
83
+ alias: "o",
84
+ description: "Output file for commit-msg",
85
+ type: "string"
86
+ },
87
+ verbose: {
88
+ alias: "v",
89
+ description: "Enable verbose logging",
90
+ type: "boolean"
91
+ }
92
+ };
93
+ async function parseArgs() {
94
+ const yargsInstance = yargs(hideBin(process.argv)).options(args).help("h").alias("h", "help").epilogue(`--> @stephansama open-source ${(/* @__PURE__ */ new Date()).getFullYear()}`);
95
+ const parsed = await yargsInstance.wrap(yargsInstance.terminalWidth()).parse();
96
+ if (parsed.verbose) enable(`${moduleName}*`);
97
+ return parsed;
98
+ }
99
+
100
+ //#endregion
101
+ //#region src/config.ts
102
+ const searchPlaces = getSearchPlaces();
103
+ const defaultConfig = {
104
+ model: "llama2",
105
+ provider: "ollama",
106
+ useConventionalCommits: true
107
+ };
108
+ async function loadConfig() {
109
+ const result = await cosmiconfig(moduleName, { searchPlaces }).search();
110
+ return configSchema.parse(result?.config || defaultConfig);
111
+ }
112
+ function getSearchPlaces() {
113
+ return [
114
+ ...getDefaultSearchPlaces(moduleName),
115
+ `.config/.${moduleName}rc.json`,
116
+ `.config/.${moduleName}rc.yaml`,
117
+ `.config/.${moduleName}rc.yml`,
118
+ `.config/.${moduleName}rc`
119
+ ];
120
+ }
121
+
122
+ //#endregion
123
+ //#region src/index.ts
124
+ async function run() {
125
+ dotenvx.config();
126
+ const args$1 = await parseArgs();
127
+ if (!args$1.output) args$1.output = getCommitEditMsgFile();
128
+ const config = await loadConfig();
129
+ if (config.skipNextRun) {
130
+ console.warn("skipNextRun flag supplied skipping current run");
131
+ return process.exit(0);
132
+ }
133
+ const providerResult = getProvider(config.provider, config.model);
134
+ if (providerResult.isErr()) {
135
+ console.error(providerResult.error.message);
136
+ return process.exit(1);
137
+ }
138
+ const model = providerResult.value;
139
+ const diff = getDiff();
140
+ if (!diff) throw new Error("unable to get git diff");
141
+ const { text } = await generateText({
142
+ model,
143
+ prompt: (config.prompt || defaultPrompt).replace("{{diff}}", diff)
144
+ });
145
+ await fsp.writeFile(args$1.output, text);
146
+ }
147
+ function getCommitEditMsgFile() {
148
+ const output = cp.execSync(`git rev-parse --git-path COMMIT_EDITMSG`, { encoding: "utf8" });
149
+ if (output) return output.trim();
150
+ throw new Error("unable to find commit edit msg. please use within a git directory or provide the output flag -o");
151
+ }
152
+ function getDiff() {
153
+ const output = cp.execSync(`git --no-pager diff --staged`, { encoding: "utf8" });
154
+ if (output) return output.substring(0, 8e3).trim();
155
+ }
156
+
157
+ //#endregion
158
+ export { run };
package/package.json ADDED
@@ -0,0 +1,71 @@
1
+ {
2
+ "name": "@stephansama/ai-commit-msg",
3
+ "version": "1.0.0",
4
+ "description": "generate commit messages using ai",
5
+ "keywords": [
6
+ "ai-commit-msg"
7
+ ],
8
+ "homepage": "https://packages.stephansama.info/api/@stephansama/ai-commit-msg",
9
+ "repository": {
10
+ "type": "git",
11
+ "url": "https://github.com/stephansama/packages",
12
+ "directory": "core/ai-commit-msg"
13
+ },
14
+ "license": "MIT",
15
+ "author": {
16
+ "name": "Stephan Randle",
17
+ "email": "stephanrandle.dev@gmail.com",
18
+ "url": "https://stephansama.info"
19
+ },
20
+ "type": "module",
21
+ "exports": {
22
+ ".": {
23
+ "import": {
24
+ "types": "./config/schema.d.ts",
25
+ "default": "./config/schema.js"
26
+ },
27
+ "require": {
28
+ "types": "./config/schema.d.cts",
29
+ "default": "./config/schema.cjs"
30
+ }
31
+ },
32
+ "./schema.json": "./config/schema.json",
33
+ "./package.json": "./package.json"
34
+ },
35
+ "main": "./config/schema.js",
36
+ "module": "./config/schema.js",
37
+ "types": "./config/schema.d.ts",
38
+ "bin": "./cli.mjs",
39
+ "files": [
40
+ "./dist",
41
+ "./config",
42
+ "./cli.mjs"
43
+ ],
44
+ "dependencies": {
45
+ "@ai-sdk/google": "^3.0.1",
46
+ "@ai-sdk/openai": "^3.0.1",
47
+ "@commitlint/config-conventional": "^19.8.1",
48
+ "@dotenvx/dotenvx": "^1.51.2",
49
+ "ai": "^6.0.3",
50
+ "cosmiconfig": "9.0.0",
51
+ "es-toolkit": "1.43.0",
52
+ "neverthrow": "^8.2.0",
53
+ "obug": "2.1.1",
54
+ "ollama-ai-provider-v2": "^1.5.5",
55
+ "yargs": "18.0.0",
56
+ "zod": "4.2.1"
57
+ },
58
+ "devDependencies": {
59
+ "@types/yargs": "^17.0.33",
60
+ "tsdown": "0.15.12"
61
+ },
62
+ "publishConfig": {
63
+ "access": "public",
64
+ "provenance": true
65
+ },
66
+ "scripts": {
67
+ "build": "node build.mjs",
68
+ "dev": "tsdown --watch",
69
+ "lint": "eslint ./src/ --pass-on-no-patterns --no-error-on-unmatched-pattern"
70
+ }
71
+ }