@eldrforge/kodrdriv 0.0.3 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.kodrdriv/config.yaml +13 -5
- package/.kodrdriv/context/content.md +1 -0
- package/README.md +421 -4
- package/dist/arguments.js +93 -16
- package/dist/arguments.js.map +1 -1
- package/dist/commands/commit.js +15 -8
- package/dist/commands/commit.js.map +1 -1
- package/dist/commands/link.js +183 -0
- package/dist/commands/link.js.map +1 -0
- package/dist/commands/publish.js +216 -0
- package/dist/commands/publish.js.map +1 -0
- package/dist/commands/release.js +4 -1
- package/dist/commands/release.js.map +1 -1
- package/dist/commands/unlink.js +179 -0
- package/dist/commands/unlink.js.map +1 -0
- package/dist/constants.js +24 -3
- package/dist/constants.js.map +1 -1
- package/dist/content/diff.js.map +1 -1
- package/dist/content/log.js.map +1 -1
- package/dist/error/ExitError.js.map +1 -1
- package/dist/logging.js.map +1 -1
- package/dist/main.js +13 -3
- package/dist/main.js.map +1 -1
- package/dist/prompt/instructions/release.md +29 -28
- package/dist/prompt/prompts.js.map +1 -1
- package/dist/types.js +17 -0
- package/dist/types.js.map +1 -1
- package/dist/util/child.js.map +1 -1
- package/dist/util/general.js +13 -1
- package/dist/util/general.js.map +1 -1
- package/dist/util/github.js +144 -0
- package/dist/util/github.js.map +1 -0
- package/dist/util/openai.js.map +1 -1
- package/dist/util/storage.js +4 -0
- package/dist/util/storage.js.map +1 -1
- package/package.json +19 -18
- package/vitest.config.ts +7 -4
- package/.kodrdriv/context/people/context.md +0 -5
- package/.kodrdriv/context/projects/context.md +0 -3
- package/.kodrdriv/instructions/INACTIVE-release-pre.md +0 -1
|
@@ -4,6 +4,24 @@ Task #2: Provide a detailed list of changes involved in this release, and make s
|
|
|
4
4
|
|
|
5
5
|
Task #3: Use the content in the <context> section to help you write the release notes and to help make connections with people, projects, issues, features, and other information.
|
|
6
6
|
|
|
7
|
+
### Output Format
|
|
8
|
+
|
|
9
|
+
Your response MUST be a valid JSON object with the following structure:
|
|
10
|
+
{
|
|
11
|
+
"title": "A single-line, concise title for the release.",
|
|
12
|
+
"body": "The detailed release notes in Markdown format."
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
**Instructions for the `title` field:**
|
|
16
|
+
- It must be a single line.
|
|
17
|
+
- It should capture the most significant, substantive changes in the release.
|
|
18
|
+
- Focus on what is noticeable to developers using the software.
|
|
19
|
+
- AVOID mentioning trivial changes like "improving formatting," "updating dependencies," or "refactoring code."
|
|
20
|
+
|
|
21
|
+
**Instructions for the `body` field:**
|
|
22
|
+
- This should be the full release notes in Markdown format.
|
|
23
|
+
- Follow the detailed instructions below for structuring and writing the release notes.
|
|
24
|
+
|
|
7
25
|
### Output Restrictions
|
|
8
26
|
|
|
9
27
|
- Do not mention and people or contributors in the release notes. For example, do not say, "Thanks to John Doe for this feature." Release notes are to be impersonal and not focused on indiviudals.
|
|
@@ -48,12 +66,12 @@ Create release notes that:
|
|
|
48
66
|
|
|
49
67
|
3. **Use clear, factual bullet points** under each section. Briefly describe what changed and why it's relevant — **but do not use marketing language**. Avoid vague or exaggerated terms like:
|
|
50
68
|
|
|
51
|
-
*
|
|
52
|
-
*
|
|
53
|
-
*
|
|
54
|
-
*
|
|
69
|
+
* "awesome new feature"
|
|
70
|
+
* "significant boost"
|
|
71
|
+
* "exciting changes"
|
|
72
|
+
* "revolutionary update"
|
|
55
73
|
|
|
56
|
-
4. **Keep your tone technical, neutral, and useful.** It
|
|
74
|
+
4. **Keep your tone technical, neutral, and useful.** It's okay to include references to:
|
|
57
75
|
|
|
58
76
|
* Affected files or systems
|
|
59
77
|
* Internal components (if relevant to the audience)
|
|
@@ -64,26 +82,9 @@ Create release notes that:
|
|
|
64
82
|
|
|
65
83
|
## 📝 Output Format Example
|
|
66
84
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
* Unified `vite.config.ts` and `webpack.config.js` into a single environment-aware module
|
|
74
|
-
* Reduced config nesting depth in `tsconfig.json` to improve readability
|
|
75
|
-
* Updated CI scripts to use `.env.defaults` instead of `.env.local`
|
|
76
|
-
|
|
77
|
-
**Bug Fixes**
|
|
78
|
-
|
|
79
|
-
* Fixed crash in config loader when optional fields were undefined
|
|
80
|
-
* Resolved issue with `yarn build` failing on Windows due to missing path escape
|
|
81
|
-
|
|
82
|
-
**Documentation Updates**
|
|
83
|
-
|
|
84
|
-
* Rewrote setup instructions in `README.md` to reflect unified config process
|
|
85
|
-
* Removed legacy instructions for `env.local.js`
|
|
86
|
-
|
|
87
|
-
---
|
|
88
|
-
|
|
89
|
-
Let me know if you'd like a version optimized for a changelog generator plugin or a GitHub Actions pipeline.
|
|
85
|
+
```json
|
|
86
|
+
{
|
|
87
|
+
"title": "New Generation Types, and Updates to API",
|
|
88
|
+
"body": "This release focuses on simplifying the configuration system and removing deprecated environment-specific files. Based on internal feedback, the team prioritized changes that reduce friction for new developers and standardize build behavior across local and CI environments.\\n\\n**Improvements**\\n\\n* Unified `vite.config.ts` and `webpack.config.js` into a single environment-aware module\\n* Reduced config nesting depth in `tsconfig.json` to improve readability\\n* Updated CI scripts to use `.env.defaults` instead of `.env.local`\\n\\n**Bug Fixes**\\n\\n* Fixed crash in config loader when optional fields were undefined\\n* Resolved issue with `yarn build` failing on Windows due to missing path escape\\n\\n**Documentation Updates**\\n\\n* Rewrote setup instructions in `README.md` to reflect unified config process\\n* Removed legacy instructions for `env.local.js`"
|
|
89
|
+
}
|
|
90
|
+
```
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"prompts.js","sources":["../../src/prompt/prompts.ts"],"sourcesContent":["import { Builder, Formatter, Model, Prompt, Request } from '@riotprompt/riotprompt';\nimport path from 'path';\nimport { fileURLToPath } from 'url';\nimport { DEFAULT_INSTRUCTIONS_COMMIT_FILE, DEFAULT_INSTRUCTIONS_RELEASE_FILE, DEFAULT_PERSONA_COMMITTER_FILE, DEFAULT_PERSONA_RELEASER_FILE } from '../constants';\nimport { getLogger } from '../logging';\nimport { Config as RunConfig } from '../types';\n\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = path.dirname(__filename);\n\nexport interface Factory {\n createCommitPrompt: (content: string, logContent: string, context?: string) => Promise<Prompt>;\n createReleasePrompt: (content: string, diffContent: string, context?: string) => Promise<Prompt>;\n format: (prompt: Prompt) => Request;\n}\n\nexport const create = (model: Model, runConfig: RunConfig): Factory => {\n\n const logger = getLogger();\n\n const createCommitPrompt = async (content: string, logContent: string, context?: string): Promise<Prompt> => {\n let builder: Builder.Instance = Builder.create({ logger, basePath: __dirname, overridePath: runConfig?.configDirectory, overrides: runConfig?.overrides || false });\n builder = await builder.addPersonaPath(DEFAULT_PERSONA_COMMITTER_FILE);\n builder = await builder.addInstructionPath(DEFAULT_INSTRUCTIONS_COMMIT_FILE);\n if (context) {\n builder = await builder.addContent(`\\n\\n[User Context]\\n${context}`);\n }\n builder = await builder.addContent(`\\n\\n[Diff]\\n${content}`);\n builder = await builder.addContent(`\\n\\n[Log]\\n${logContent}`);\n\n if (runConfig.contextDirectories) {\n builder = await builder.loadContext(runConfig.contextDirectories);\n }\n\n const prompt = await builder.build();\n return prompt;\n };\n\n const createReleasePrompt = async (content: string, diffContent: string, context?: string): Promise<Prompt> => {\n let builder: Builder.Instance = Builder.create({ logger, basePath: __dirname, overridePath: runConfig?.configDirectory, overrides: runConfig?.overrides || false });\n builder = await builder.addPersonaPath(DEFAULT_PERSONA_RELEASER_FILE);\n builder = await builder.addInstructionPath(DEFAULT_INSTRUCTIONS_RELEASE_FILE);\n if (context) {\n builder = await builder.addContent(`\\n\\n[User Context]\\n${context}`);\n }\n builder = await builder.addContent(`\\n\\n[Log]\\n${content}`);\n builder = await builder.addContent(`\\n\\n[Diff]\\n${diffContent}`);\n if (runConfig.contextDirectories) {\n builder = await builder.loadContext(runConfig.contextDirectories);\n }\n\n const prompt = await builder.build();\n return prompt;\n }\n\n const format = (prompt: Prompt): Request => {\n const formatter = Formatter.create();\n return formatter.formatPrompt(model, prompt);\n };\n\n return {\n createCommitPrompt,\n createReleasePrompt,\n format,\n };\n}\n\n"],"names":["__filename","fileURLToPath","url","__dirname","path","dirname","create","model","runConfig","logger","getLogger","createCommitPrompt","content","logContent","context","builder","Builder","basePath","overridePath","configDirectory","overrides","addPersonaPath","DEFAULT_PERSONA_COMMITTER_FILE","addInstructionPath","DEFAULT_INSTRUCTIONS_COMMIT_FILE","addContent","contextDirectories","loadContext","prompt","build","createReleasePrompt","diffContent","DEFAULT_PERSONA_RELEASER_FILE","DEFAULT_INSTRUCTIONS_RELEASE_FILE","format","formatter","Formatter","formatPrompt"],"mappings":";;;;;;AAOA,MAAMA,UAAAA,GAAaC,
|
|
1
|
+
{"version":3,"file":"prompts.js","sources":["../../src/prompt/prompts.ts"],"sourcesContent":["import { Builder, Formatter, Model, Prompt, Request } from '@riotprompt/riotprompt';\nimport path from 'path';\nimport { fileURLToPath } from 'url';\nimport { DEFAULT_INSTRUCTIONS_COMMIT_FILE, DEFAULT_INSTRUCTIONS_RELEASE_FILE, DEFAULT_PERSONA_COMMITTER_FILE, DEFAULT_PERSONA_RELEASER_FILE } from '../constants';\nimport { getLogger } from '../logging';\nimport { Config as RunConfig } from '../types';\n\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = path.dirname(__filename);\n\nexport interface Factory {\n createCommitPrompt: (content: string, logContent: string, context?: string) => Promise<Prompt>;\n createReleasePrompt: (content: string, diffContent: string, context?: string) => Promise<Prompt>;\n format: (prompt: Prompt) => Request;\n}\n\nexport const create = (model: Model, runConfig: RunConfig): Factory => {\n\n const logger = getLogger();\n\n const createCommitPrompt = async (content: string, logContent: string, context?: string): Promise<Prompt> => {\n let builder: Builder.Instance = Builder.create({ logger, basePath: __dirname, overridePath: runConfig?.configDirectory, overrides: runConfig?.overrides || false });\n builder = await builder.addPersonaPath(DEFAULT_PERSONA_COMMITTER_FILE);\n builder = await builder.addInstructionPath(DEFAULT_INSTRUCTIONS_COMMIT_FILE);\n if (context) {\n builder = await builder.addContent(`\\n\\n[User Context]\\n${context}`);\n }\n builder = await builder.addContent(`\\n\\n[Diff]\\n${content}`);\n builder = await builder.addContent(`\\n\\n[Log]\\n${logContent}`);\n\n if (runConfig.contextDirectories) {\n builder = await builder.loadContext(runConfig.contextDirectories);\n }\n\n const prompt = await builder.build();\n return prompt;\n };\n\n const createReleasePrompt = async (content: string, diffContent: string, context?: string): Promise<Prompt> => {\n let builder: Builder.Instance = Builder.create({ logger, basePath: __dirname, overridePath: runConfig?.configDirectory, overrides: runConfig?.overrides || false });\n builder = await builder.addPersonaPath(DEFAULT_PERSONA_RELEASER_FILE);\n builder = await builder.addInstructionPath(DEFAULT_INSTRUCTIONS_RELEASE_FILE);\n if (context) {\n builder = await builder.addContent(`\\n\\n[User Context]\\n${context}`);\n }\n builder = await builder.addContent(`\\n\\n[Log]\\n${content}`);\n builder = await builder.addContent(`\\n\\n[Diff]\\n${diffContent}`);\n if (runConfig.contextDirectories) {\n builder = await builder.loadContext(runConfig.contextDirectories);\n }\n\n const prompt = await builder.build();\n return prompt;\n }\n\n const format = (prompt: Prompt): Request => {\n const formatter = Formatter.create();\n return formatter.formatPrompt(model, prompt);\n };\n\n return {\n createCommitPrompt,\n createReleasePrompt,\n format,\n };\n}\n\n"],"names":["__filename","fileURLToPath","url","__dirname","path","dirname","create","model","runConfig","logger","getLogger","createCommitPrompt","content","logContent","context","builder","Builder","basePath","overridePath","configDirectory","overrides","addPersonaPath","DEFAULT_PERSONA_COMMITTER_FILE","addInstructionPath","DEFAULT_INSTRUCTIONS_COMMIT_FILE","addContent","contextDirectories","loadContext","prompt","build","createReleasePrompt","diffContent","DEFAULT_PERSONA_RELEASER_FILE","DEFAULT_INSTRUCTIONS_RELEASE_FILE","format","formatter","Formatter","formatPrompt"],"mappings":";;;;;;AAOA,MAAMA,UAAAA,GAAaC,aAAAA,CAAc,MAAA,CAAA,IAAA,CAAYC,GAAG,CAAA;AAChD,MAAMC,SAAAA,GAAYC,IAAAA,CAAKC,OAAO,CAACL,UAAAA,CAAAA;AAQxB,MAAMM,MAAAA,GAAS,CAACC,KAAAA,EAAcC,SAAAA,GAAAA;AAEjC,IAAA,MAAMC,MAAAA,GAASC,SAAAA,EAAAA;IAEf,MAAMC,kBAAAA,GAAqB,OAAOC,OAAAA,EAAiBC,UAAAA,EAAoBC,OAAAA,GAAAA;QACnE,IAAIC,OAAAA,GAA4BC,OAAAA,CAAQV,MAAM,CAAC;AAAEG,YAAAA,MAAAA;YAAQQ,QAAAA,EAAUd,SAAAA;AAAWe,YAAAA,YAAY,EAAEV,SAAAA,KAAAA,IAAAA,IAAAA,SAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,SAAAA,CAAWW,eAAe;AAAEC,YAAAA,SAAAA,EAAWZ,CAAAA,SAAAA,KAAAA,IAAAA,IAAAA,SAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,SAAAA,CAAWY,SAAS,KAAI;AAAM,SAAA,CAAA;QACjKL,OAAAA,GAAU,MAAMA,OAAAA,CAAQM,cAAc,CAACC,8BAAAA,CAAAA;QACvCP,OAAAA,GAAU,MAAMA,OAAAA,CAAQQ,kBAAkB,CAACC,gCAAAA,CAAAA;AAC3C,QAAA,IAAIV,OAAAA,EAAS;AACTC,YAAAA,OAAAA,GAAU,MAAMA,OAAAA,CAAQU,UAAU,CAAC,CAAC,oBAAoB,EAAEX,OAAAA,CAAAA,CAAS,CAAA;AACvE;AACAC,QAAAA,OAAAA,GAAU,MAAMA,OAAAA,CAAQU,UAAU,CAAC,CAAC,YAAY,EAAEb,OAAAA,CAAAA,CAAS,CAAA;AAC3DG,QAAAA,OAAAA,GAAU,MAAMA,OAAAA,CAAQU,UAAU,CAAC,CAAC,WAAW,EAAEZ,UAAAA,CAAAA,CAAY,CAAA;QAE7D,IAAIL,SAAAA,CAAUkB,kBAAkB,EAAE;AAC9BX,YAAAA,OAAAA,GAAU,MAAMA,OAAAA,CAAQY,WAAW,CAACnB,UAAUkB,kBAAkB,CAAA;AACpE;QAEA,MAAME,MAAAA,GAAS,MAAMb,OAAAA,CAAQc,KAAK,EAAA;QAClC,OAAOD,MAAAA;AACX,KAAA;IAEA,MAAME,mBAAAA,GAAsB,OAAOlB,OAAAA,EAAiBmB,WAAAA,EAAqBjB,OAAAA,GAAAA;QACrE,IAAIC,OAAAA,GAA4BC,OAAAA,CAAQV,MAAM,CAAC;AAAEG,YAAAA,MAAAA;YAAQQ,QAAAA,EAAUd,SAAAA;AAAWe,YAAAA,YAAY,EAAEV,SAAAA,KAAAA,IAAAA,IAAAA,SAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,SAAAA,CAAWW,eAAe;AAAEC,YAAAA,SAAAA,EAAWZ,CAAAA,SAAAA,KAAAA,IAAAA,IAAAA,SAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,SAAAA,CAAWY,SAAS,KAAI;AAAM,SAAA,CAAA;QACjKL,OAAAA,GAAU,MAAMA,OAAAA,CAAQM,cAAc,CAACW,6BAAAA,CAAAA;QACvCjB,OAAAA,GAAU,MAAMA,OAAAA,CAAQQ,kBAAkB,CAACU,iCAAAA,CAAAA;AAC3C,QAAA,IAAInB,OAAAA,EAAS;AACTC,YAAAA,OAAAA,GAAU,MAAMA,OAAAA,CAAQU,UAAU,CAAC,CAAC,oBAAoB,EAAEX,OAAAA,CAAAA,CAAS,CAAA;AACvE;AACAC,QAAAA,OAAAA,GAAU,MAAMA,OAAAA,CAAQU,UAAU,CAAC,CAAC,WAAW,EAAEb,OAAAA,CAAAA,CAAS,CAAA;AAC1DG,QAAAA,OAAAA,GAAU,MAAMA,OAAAA,CAAQU,UAAU,CAAC,CAAC,YAAY,EAAEM,WAAAA,CAAAA,CAAa,CAAA;QAC/D,IAAIvB,SAAAA,CAAUkB,kBAAkB,EAAE;AAC9BX,YAAAA,OAAAA,GAAU,MAAMA,OAAAA,CAAQY,WAAW,CAACnB,UAAUkB,kBAAkB,CAAA;AACpE;QAEA,MAAME,MAAAA,GAAS,MAAMb,OAAAA,CAAQc,KAAK,EAAA;QAClC,OAAOD,MAAAA;AACX,KAAA;AAEA,IAAA,MAAMM,SAAS,CAACN,MAAAA,GAAAA;QACZ,MAAMO,SAAAA,GAAYC,UAAU9B,MAAM,EAAA;QAClC,OAAO6B,SAAAA,CAAUE,YAAY,CAAC9B,KAAAA,EAAOqB,MAAAA,CAAAA;AACzC,KAAA;IAEA,OAAO;AACHjB,QAAAA,kBAAAA;AACAmB,QAAAA,mBAAAA;AACAI,QAAAA;AACJ,KAAA;AACJ;;;;"}
|
package/dist/types.js
CHANGED
|
@@ -9,6 +9,7 @@ const ConfigSchema = z.object({
|
|
|
9
9
|
model: z.string().optional(),
|
|
10
10
|
contextDirectories: z.array(z.string()).optional(),
|
|
11
11
|
commit: z.object({
|
|
12
|
+
add: z.boolean().optional(),
|
|
12
13
|
cached: z.boolean().optional(),
|
|
13
14
|
sendit: z.boolean().optional(),
|
|
14
15
|
messageLimit: z.number().optional(),
|
|
@@ -20,6 +21,22 @@ const ConfigSchema = z.object({
|
|
|
20
21
|
messageLimit: z.number().optional(),
|
|
21
22
|
context: z.string().optional()
|
|
22
23
|
}).optional(),
|
|
24
|
+
publish: z.object({
|
|
25
|
+
mergeMethod: z.enum([
|
|
26
|
+
'merge',
|
|
27
|
+
'squash',
|
|
28
|
+
'rebase'
|
|
29
|
+
]).optional(),
|
|
30
|
+
dependencyUpdatePatterns: z.array(z.string()).optional(),
|
|
31
|
+
requiredEnvVars: z.array(z.string()).optional(),
|
|
32
|
+
linkWorkspacePackages: z.boolean().optional(),
|
|
33
|
+
unlinkWorkspacePackages: z.boolean().optional()
|
|
34
|
+
}).optional(),
|
|
35
|
+
link: z.object({
|
|
36
|
+
scopeRoots: z.record(z.string(), z.string()).optional(),
|
|
37
|
+
workspaceFile: z.string().optional(),
|
|
38
|
+
dryRun: z.boolean().optional()
|
|
39
|
+
}).optional(),
|
|
23
40
|
excludedPatterns: z.array(z.string()).optional()
|
|
24
41
|
});
|
|
25
42
|
z.object({
|
package/dist/types.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.js","sources":["../src/types.ts"],"sourcesContent":["import * as Cardigantime from '@theunwalked/cardigantime';\nimport { z } from \"zod\";\n\nexport const ConfigSchema = z.object({\n dryRun: z.boolean().optional(),\n verbose: z.boolean().optional(),\n debug: z.boolean().optional(),\n overrides: z.boolean().optional(),\n instructions: z.string().optional(),\n model: z.string().optional(),\n contextDirectories: z.array(z.string()).optional(),\n commit: z.object({\n cached: z.boolean().optional(),\n sendit: z.boolean().optional(),\n messageLimit: z.number().optional(),\n context: z.string().optional(),\n }).optional(),\n release: z.object({\n from: z.string().optional(),\n to: z.string().optional(),\n messageLimit: z.number().optional(),\n context: z.string().optional(),\n }).optional(),\n excludedPatterns: z.array(z.string()).optional(),\n});\n\nexport const SecureConfigSchema = z.object({\n openaiApiKey: z.string().optional(),\n});\n\nexport const CommandConfigSchema = z.object({\n commandName: z.string().optional(),\n});\n\nexport type Config = z.infer<typeof ConfigSchema> & Cardigantime.Config;\nexport type SecureConfig = z.infer<typeof SecureConfigSchema>;\nexport type CommandConfig = z.infer<typeof CommandConfigSchema>;\n"],"names":["ConfigSchema","z","object","dryRun","boolean","optional","verbose","debug","overrides","instructions","string","model","contextDirectories","array","commit","cached","sendit","messageLimit","number","context","release","from","to","excludedPatterns","openaiApiKey","commandName"],"mappings":";;
|
|
1
|
+
{"version":3,"file":"types.js","sources":["../src/types.ts"],"sourcesContent":["import * as Cardigantime from '@theunwalked/cardigantime';\nimport { z } from \"zod\";\n\nexport const ConfigSchema = z.object({\n dryRun: z.boolean().optional(),\n verbose: z.boolean().optional(),\n debug: z.boolean().optional(),\n overrides: z.boolean().optional(),\n instructions: z.string().optional(),\n model: z.string().optional(),\n contextDirectories: z.array(z.string()).optional(),\n commit: z.object({\n add: z.boolean().optional(),\n cached: z.boolean().optional(),\n sendit: z.boolean().optional(),\n messageLimit: z.number().optional(),\n context: z.string().optional(),\n }).optional(),\n release: z.object({\n from: z.string().optional(),\n to: z.string().optional(),\n messageLimit: z.number().optional(),\n context: z.string().optional(),\n }).optional(),\n publish: z.object({\n mergeMethod: z.enum(['merge', 'squash', 'rebase']).optional(),\n dependencyUpdatePatterns: z.array(z.string()).optional(),\n requiredEnvVars: z.array(z.string()).optional(),\n linkWorkspacePackages: z.boolean().optional(),\n unlinkWorkspacePackages: z.boolean().optional(),\n }).optional(),\n link: z.object({\n scopeRoots: z.record(z.string(), z.string()).optional(),\n workspaceFile: z.string().optional(),\n dryRun: z.boolean().optional(),\n }).optional(),\n excludedPatterns: z.array(z.string()).optional(),\n});\n\nexport const SecureConfigSchema = z.object({\n openaiApiKey: z.string().optional(),\n});\n\nexport const CommandConfigSchema = z.object({\n commandName: z.string().optional(),\n});\n\nexport type Config = z.infer<typeof ConfigSchema> & Cardigantime.Config;\nexport type SecureConfig = z.infer<typeof SecureConfigSchema>;\nexport type CommandConfig = z.infer<typeof CommandConfigSchema>;\n\nexport type MergeMethod = 'merge' | 'squash' | 'rebase';\n\nexport interface PullRequest {\n html_url: string;\n number: number;\n labels: {\n name: string;\n }[];\n}\n\nexport type ReleaseSummary = {\n title: string;\n body: string;\n}\n\nexport type ReleaseConfig = {\n from?: string;\n to?: string;\n context?: string;\n}\n\nexport type PublishConfig = {\n from?: string;\n to?: string;\n}\n"],"names":["ConfigSchema","z","object","dryRun","boolean","optional","verbose","debug","overrides","instructions","string","model","contextDirectories","array","commit","add","cached","sendit","messageLimit","number","context","release","from","to","publish","mergeMethod","enum","dependencyUpdatePatterns","requiredEnvVars","linkWorkspacePackages","unlinkWorkspacePackages","link","scopeRoots","record","workspaceFile","excludedPatterns","openaiApiKey","commandName"],"mappings":";;AAGO,MAAMA,YAAAA,GAAeC,CAAAA,CAAEC,MAAM,CAAC;IACjCC,MAAAA,EAAQF,CAAAA,CAAEG,OAAO,EAAA,CAAGC,QAAQ,EAAA;IAC5BC,OAAAA,EAASL,CAAAA,CAAEG,OAAO,EAAA,CAAGC,QAAQ,EAAA;IAC7BE,KAAAA,EAAON,CAAAA,CAAEG,OAAO,EAAA,CAAGC,QAAQ,EAAA;IAC3BG,SAAAA,EAAWP,CAAAA,CAAEG,OAAO,EAAA,CAAGC,QAAQ,EAAA;IAC/BI,YAAAA,EAAcR,CAAAA,CAAES,MAAM,EAAA,CAAGL,QAAQ,EAAA;IACjCM,KAAAA,EAAOV,CAAAA,CAAES,MAAM,EAAA,CAAGL,QAAQ,EAAA;AAC1BO,IAAAA,kBAAAA,EAAoBX,EAAEY,KAAK,CAACZ,CAAAA,CAAES,MAAM,IAAIL,QAAQ,EAAA;IAChDS,MAAAA,EAAQb,CAAAA,CAAEC,MAAM,CAAC;QACba,GAAAA,EAAKd,CAAAA,CAAEG,OAAO,EAAA,CAAGC,QAAQ,EAAA;QACzBW,MAAAA,EAAQf,CAAAA,CAAEG,OAAO,EAAA,CAAGC,QAAQ,EAAA;QAC5BY,MAAAA,EAAQhB,CAAAA,CAAEG,OAAO,EAAA,CAAGC,QAAQ,EAAA;QAC5Ba,YAAAA,EAAcjB,CAAAA,CAAEkB,MAAM,EAAA,CAAGd,QAAQ,EAAA;QACjCe,OAAAA,EAASnB,CAAAA,CAAES,MAAM,EAAA,CAAGL,QAAQ;AAChC,KAAA,CAAA,CAAGA,QAAQ,EAAA;IACXgB,OAAAA,EAASpB,CAAAA,CAAEC,MAAM,CAAC;QACdoB,IAAAA,EAAMrB,CAAAA,CAAES,MAAM,EAAA,CAAGL,QAAQ,EAAA;QACzBkB,EAAAA,EAAItB,CAAAA,CAAES,MAAM,EAAA,CAAGL,QAAQ,EAAA;QACvBa,YAAAA,EAAcjB,CAAAA,CAAEkB,MAAM,EAAA,CAAGd,QAAQ,EAAA;QACjCe,OAAAA,EAASnB,CAAAA,CAAES,MAAM,EAAA,CAAGL,QAAQ;AAChC,KAAA,CAAA,CAAGA,QAAQ,EAAA;IACXmB,OAAAA,EAASvB,CAAAA,CAAEC,MAAM,CAAC;QACduB,WAAAA,EAAaxB,CAAAA,CAAEyB,IAAI,CAAC;AAAC,YAAA,OAAA;AAAS,YAAA,QAAA;AAAU,YAAA;AAAS,SAAA,CAAA,CAAErB,QAAQ,EAAA;AAC3DsB,QAAAA,wBAAAA,EAA0B1B,EAAEY,KAAK,CAACZ,CAAAA,CAAES,MAAM,IAAIL,QAAQ,EAAA;AACtDuB,QAAAA,eAAAA,EAAiB3B,EAAEY,KAAK,CAACZ,CAAAA,CAAES,MAAM,IAAIL,QAAQ,EAAA;QAC7CwB,qBAAAA,EAAuB5B,CAAAA,CAAEG,OAAO,EAAA,CAAGC,QAAQ,EAAA;QAC3CyB,uBAAAA,EAAyB7B,CAAAA,CAAEG,OAAO,EAAA,CAAGC,QAAQ;AACjD,KAAA,CAAA,CAAGA,QAAQ,EAAA;IACX0B,IAAAA,EAAM9B,CAAAA,CAAEC,MAAM,CAAC;QACX8B,UAAAA,EAAY/B,CAAAA,CAAEgC,MAAM,CAAChC,CAAAA,CAAES,MAAM,EAAA,EAAIT,CAAAA,CAAES,MAAM,EAAA,CAAA,CAAIL,QAAQ,EAAA;QACrD6B,aAAAA,EAAejC,CAAAA,CAAES,MAAM,EAAA,CAAGL,QAAQ,EAAA;QAClCF,MAAAA,EAAQF,CAAAA,CAAEG,OAAO,EAAA,CAAGC,QAAQ;AAChC,KAAA,CAAA,CAAGA,QAAQ,EAAA;AACX8B,IAAAA,gBAAAA,EAAkBlC,EAAEY,KAAK,CAACZ,CAAAA,CAAES,MAAM,IAAIL,QAAQ;AAClD,CAAA;AAEkCJ,CAAAA,CAAEC,MAAM,CAAC;IACvCkC,YAAAA,EAAcnC,CAAAA,CAAES,MAAM,EAAA,CAAGL,QAAQ;AACrC,CAAA;AAEmCJ,CAAAA,CAAEC,MAAM,CAAC;IACxCmC,WAAAA,EAAapC,CAAAA,CAAES,MAAM,EAAA,CAAGL,QAAQ;AACpC,CAAA;;;;"}
|
package/dist/util/child.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"child.js","sources":["../../src/util/child.ts"],"sourcesContent":["#!/usr/bin/env node\nimport child_process, { exec } from 'child_process';\nimport util from 'util';\n\nexport async function run(command: string, options: child_process.ExecOptions = {}): Promise<{ stdout: string; stderr: string }> {\n const execPromise = util.promisify(exec);\n return execPromise(command, options);\n}"],"names":["run","command","options","execPromise","util","promisify","exec"],"mappings":";;;;AAIO,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAeA,
|
|
1
|
+
{"version":3,"file":"child.js","sources":["../../src/util/child.ts"],"sourcesContent":["#!/usr/bin/env node\nimport child_process, { exec } from 'child_process';\nimport util from 'util';\n\nexport async function run(command: string, options: child_process.ExecOptions = {}): Promise<{ stdout: string; stderr: string }> {\n const execPromise = util.promisify(exec);\n return execPromise(command, options);\n}"],"names":["run","command","options","execPromise","util","promisify","exec"],"mappings":";;;;AAIO,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAeA,GAAAA,CAAIC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAe,EAAEC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAqC,EAAE,CAAA,CAAA,CAAA;IAC9E,CAAA,CAAA,CAAA,CAAA,CAAA,CAAMC,WAAAA,CAAAA,CAAAA,CAAcC,CAAAA,CAAAA,CAAAA,CAAAA,CAAKC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAS,CAACC,IAAAA,CAAAA,CAAAA;AACnC,CAAA,CAAA,CAAA,CAAA,OAAOH,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,EAAYF,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAASC,OAAAA,CAAAA,CAAAA;AAChC,CAAA;;"}
|
package/dist/util/general.js
CHANGED
|
@@ -43,6 +43,18 @@ const stringifyJSON = function(obj, options = {
|
|
|
43
43
|
}
|
|
44
44
|
return '';
|
|
45
45
|
};
|
|
46
|
+
const incrementPatchVersion = (version)=>{
|
|
47
|
+
const parts = version.split('.');
|
|
48
|
+
if (parts.length !== 3) {
|
|
49
|
+
throw new Error(`Invalid version string: ${version}`);
|
|
50
|
+
}
|
|
51
|
+
const patch = parseInt(parts[2], 10);
|
|
52
|
+
if (isNaN(patch)) {
|
|
53
|
+
throw new Error(`Invalid patch version: ${parts[2]}`);
|
|
54
|
+
}
|
|
55
|
+
parts[2] = (patch + 1).toString();
|
|
56
|
+
return parts.join('.');
|
|
57
|
+
};
|
|
46
58
|
|
|
47
|
-
export { stringifyJSON };
|
|
59
|
+
export { incrementPatchVersion, stringifyJSON };
|
|
48
60
|
//# sourceMappingURL=general.js.map
|
package/dist/util/general.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"general.js","sources":["../../src/util/general.ts"],"sourcesContent":["// Utility function for deep merging two objects.\nexport function deepMerge(target: any, source: any): any {\n for (const key in source) {\n if (Object.prototype.hasOwnProperty.call(source, key)) {\n if (key === \"__proto__\" || key === \"constructor\") {\n continue; // Skip prototype-polluting keys\n }\n if (source[key] && typeof source[key] === 'object' && !Array.isArray(source[key])) {\n if (!target[key]) {\n target[key] = {};\n }\n deepMerge(target[key], source[key]);\n } else {\n target[key] = source[key];\n }\n }\n }\n return target;\n}\n\n//Recursive implementation of jSON.stringify;\nexport const stringifyJSON = function (obj: any, options: { depth: number } = { depth: 0 }): string {\n\n if (options.depth > 10) {\n return '{\"error\": \"Maximum depth reached\"}';\n }\n\n const arrOfKeyVals: string[] = [];\n const arrVals: string[] = [];\n let objKeys: string[] = [];\n\n /*********CHECK FOR PRIMITIVE TYPES**********/\n if (typeof obj === 'number' || typeof obj === 'boolean' || obj === null)\n return '' + obj;\n else if (typeof obj === 'string')\n return '\"' + obj + '\"';\n\n /*********CHECK FOR ARRAY**********/\n else if (Array.isArray(obj)) {\n //check for empty array\n if (obj[0] === undefined)\n return '[]';\n else {\n obj.forEach(function (el) {\n arrVals.push(stringifyJSON(el, { depth: options.depth + 1 }));\n });\n return '[' + arrVals + ']';\n }\n }\n /*********CHECK FOR OBJECT**********/\n else if (obj instanceof Object) {\n //get object keys\n objKeys = Object.keys(obj);\n //set key output;\n objKeys.forEach(function (key) {\n const keyOut = '\"' + key + '\":';\n const keyValOut = obj[key];\n //skip functions and undefined properties\n if (keyValOut instanceof Function || keyValOut === undefined)\n arrOfKeyVals.push('');\n else if (typeof keyValOut === 'string')\n arrOfKeyVals.push(keyOut + '\"' + keyValOut + '\"');\n else if (typeof keyValOut === 'boolean' || typeof keyValOut === 'number' || keyValOut === null)\n arrOfKeyVals.push(keyOut + keyValOut);\n //check for nested objects, call recursively until no more objects\n else if (keyValOut instanceof Object) {\n arrOfKeyVals.push(keyOut + stringifyJSON(keyValOut, { depth: options.depth + 1 }));\n }\n });\n return '{' + arrOfKeyVals + '}';\n }\n return '';\n};"],"names":["stringifyJSON","obj","options","depth","arrOfKeyVals","arrVals","objKeys","Array","isArray","undefined","forEach","el","push","Object","keys","key","keyOut","keyValOut","Function"],"mappings":"AAAA;AAoBA;
|
|
1
|
+
{"version":3,"file":"general.js","sources":["../../src/util/general.ts"],"sourcesContent":["// Utility function for deep merging two objects.\nexport function deepMerge(target: any, source: any): any {\n for (const key in source) {\n if (Object.prototype.hasOwnProperty.call(source, key)) {\n if (key === \"__proto__\" || key === \"constructor\") {\n continue; // Skip prototype-polluting keys\n }\n if (source[key] && typeof source[key] === 'object' && !Array.isArray(source[key])) {\n if (!target[key]) {\n target[key] = {};\n }\n deepMerge(target[key], source[key]);\n } else {\n target[key] = source[key];\n }\n }\n }\n return target;\n}\n\n//Recursive implementation of jSON.stringify;\nexport const stringifyJSON = function (obj: any, options: { depth: number } = { depth: 0 }): string {\n\n if (options.depth > 10) {\n return '{\"error\": \"Maximum depth reached\"}';\n }\n\n const arrOfKeyVals: string[] = [];\n const arrVals: string[] = [];\n let objKeys: string[] = [];\n\n /*********CHECK FOR PRIMITIVE TYPES**********/\n if (typeof obj === 'number' || typeof obj === 'boolean' || obj === null)\n return '' + obj;\n else if (typeof obj === 'string')\n return '\"' + obj + '\"';\n\n /*********CHECK FOR ARRAY**********/\n else if (Array.isArray(obj)) {\n //check for empty array\n if (obj[0] === undefined)\n return '[]';\n else {\n obj.forEach(function (el) {\n arrVals.push(stringifyJSON(el, { depth: options.depth + 1 }));\n });\n return '[' + arrVals + ']';\n }\n }\n /*********CHECK FOR OBJECT**********/\n else if (obj instanceof Object) {\n //get object keys\n objKeys = Object.keys(obj);\n //set key output;\n objKeys.forEach(function (key) {\n const keyOut = '\"' + key + '\":';\n const keyValOut = obj[key];\n //skip functions and undefined properties\n if (keyValOut instanceof Function || keyValOut === undefined)\n arrOfKeyVals.push('');\n else if (typeof keyValOut === 'string')\n arrOfKeyVals.push(keyOut + '\"' + keyValOut + '\"');\n else if (typeof keyValOut === 'boolean' || typeof keyValOut === 'number' || keyValOut === null)\n arrOfKeyVals.push(keyOut + keyValOut);\n //check for nested objects, call recursively until no more objects\n else if (keyValOut instanceof Object) {\n arrOfKeyVals.push(keyOut + stringifyJSON(keyValOut, { depth: options.depth + 1 }));\n }\n });\n return '{' + arrOfKeyVals + '}';\n }\n return '';\n};\n\nexport const incrementPatchVersion = (version: string): string => {\n const parts = version.split('.');\n if (parts.length !== 3) {\n throw new Error(`Invalid version string: ${version}`);\n }\n const patch = parseInt(parts[2], 10);\n if (isNaN(patch)) {\n throw new Error(`Invalid patch version: ${parts[2]}`);\n }\n parts[2] = (patch + 1).toString();\n return parts.join('.');\n};"],"names":["stringifyJSON","obj","options","depth","arrOfKeyVals","arrVals","objKeys","Array","isArray","undefined","forEach","el","push","Object","keys","key","keyOut","keyValOut","Function","incrementPatchVersion","version","parts","split","length","Error","patch","parseInt","isNaN","toString","join"],"mappings":"AAAA;AAoBA;AACO,MAAMA,aAAAA,GAAgB,SAAUC,GAAQ,EAAEC,OAAAA,GAA6B;IAAEC,KAAAA,EAAO;AAAE,CAAC,EAAA;IAEtF,IAAID,OAAAA,CAAQC,KAAK,GAAG,EAAA,EAAI;QACpB,OAAO,oCAAA;AACX;AAEA,IAAA,MAAMC,eAAyB,EAAE;AACjC,IAAA,MAAMC,UAAoB,EAAE;AAC5B,IAAA,IAAIC,UAAoB,EAAE;mDAG1B,IAAI,OAAOL,GAAAA,KAAQ,QAAA,IAAY,OAAOA,GAAAA,KAAQ,SAAA,IAAaA,GAAAA,KAAQ,IAAA,EAC/D,OAAO,EAAA,GAAKA,GAAAA;AACX,SAAA,IAAI,OAAOA,GAAAA,KAAQ,QAAA,EACpB,OAAO,MAAMA,GAAAA,GAAM,GAAA;SAGlB,IAAIM,KAAAA,CAAMC,OAAO,CAACP,GAAAA,CAAAA,EAAM;;AAEzB,QAAA,IAAIA,GAAG,CAAC,CAAA,CAAE,KAAKQ,WACX,OAAO,IAAA;AACN,aAAA;YACDR,GAAAA,CAAIS,OAAO,CAAC,SAAUC,EAAE,EAAA;gBACpBN,OAAAA,CAAQO,IAAI,CAACZ,aAAAA,CAAcW,EAAAA,EAAI;oBAAER,KAAAA,EAAOD,OAAAA,CAAQC,KAAK,GAAG;AAAE,iBAAA,CAAA,CAAA;AAC9D,aAAA,CAAA;AACA,YAAA,OAAO,MAAME,OAAAA,GAAU,GAAA;AAC3B;KACJ,MAEK,IAAIJ,eAAeY,MAAAA,EAAQ;;QAE5BP,OAAAA,GAAUO,MAAAA,CAAOC,IAAI,CAACb,GAAAA,CAAAA;;QAEtBK,OAAAA,CAAQI,OAAO,CAAC,SAAUK,GAAG,EAAA;YACzB,MAAMC,MAAAA,GAAS,MAAMD,GAAAA,GAAM,IAAA;YAC3B,MAAME,SAAAA,GAAYhB,GAAG,CAACc,GAAAA,CAAI;;AAE1B,YAAA,IAAIE,qBAAqBC,QAAAA,IAAYD,SAAAA,KAAcR,SAAAA,EAC/CL,YAAAA,CAAaQ,IAAI,CAAC,EAAA,CAAA;iBACjB,IAAI,OAAOK,cAAc,QAAA,EAC1Bb,YAAAA,CAAaQ,IAAI,CAACI,MAAAA,GAAS,MAAMC,SAAAA,GAAY,GAAA,CAAA;iBAC5C,IAAI,OAAOA,SAAAA,KAAc,SAAA,IAAa,OAAOA,SAAAA,KAAc,QAAA,IAAYA,SAAAA,KAAc,IAAA,EACtFb,YAAAA,CAAaQ,IAAI,CAACI,MAAAA,GAASC,SAAAA,CAAAA;AAE1B,iBAAA,IAAIA,qBAAqBJ,MAAAA,EAAQ;AAClCT,gBAAAA,YAAAA,CAAaQ,IAAI,CAACI,MAAAA,GAAShB,aAAAA,CAAciB,SAAAA,EAAW;oBAAEd,KAAAA,EAAOD,OAAAA,CAAQC,KAAK,GAAG;AAAE,iBAAA,CAAA,CAAA;AACnF;AACJ,SAAA,CAAA;AACA,QAAA,OAAO,MAAMC,YAAAA,GAAe,GAAA;AAChC;IACA,OAAO,EAAA;AACX;AAEO,MAAMe,wBAAwB,CAACC,OAAAA,GAAAA;IAClC,MAAMC,KAAAA,GAAQD,OAAAA,CAAQE,KAAK,CAAC,GAAA,CAAA;IAC5B,IAAID,KAAAA,CAAME,MAAM,KAAK,CAAA,EAAG;AACpB,QAAA,MAAM,IAAIC,KAAAA,CAAM,CAAC,wBAAwB,EAAEJ,OAAAA,CAAAA,CAAS,CAAA;AACxD;AACA,IAAA,MAAMK,KAAAA,GAAQC,QAAAA,CAASL,KAAK,CAAC,EAAE,EAAE,EAAA,CAAA;AACjC,IAAA,IAAIM,MAAMF,KAAAA,CAAAA,EAAQ;QACd,MAAM,IAAID,MAAM,CAAC,uBAAuB,EAAEH,KAAK,CAAC,EAAE,CAAA,CAAE,CAAA;AACxD;IACAA,KAAK,CAAC,EAAE,GAAII,CAAAA,KAAAA,GAAQ,CAAA,EAAGG,QAAQ,EAAA;IAC/B,OAAOP,KAAAA,CAAMQ,IAAI,CAAC,GAAA,CAAA;AACtB;;;;"}
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import { Octokit } from '@octokit/rest';
|
|
2
|
+
import { getLogger } from '../logging.js';
|
|
3
|
+
import { run } from './child.js';
|
|
4
|
+
|
|
5
|
+
const getOctokit = ()=>{
|
|
6
|
+
const logger = getLogger();
|
|
7
|
+
const token = process.env.GITHUB_TOKEN;
|
|
8
|
+
if (!token) {
|
|
9
|
+
logger.error('GITHUB_TOKEN environment variable is not set.');
|
|
10
|
+
throw new Error('GITHUB_TOKEN is not set.');
|
|
11
|
+
}
|
|
12
|
+
return new Octokit({
|
|
13
|
+
auth: token
|
|
14
|
+
});
|
|
15
|
+
};
|
|
16
|
+
const getCurrentBranchName = async ()=>{
|
|
17
|
+
const { stdout } = await run('git rev-parse --abbrev-ref HEAD');
|
|
18
|
+
return stdout.trim();
|
|
19
|
+
};
|
|
20
|
+
const getRepoDetails = async ()=>{
|
|
21
|
+
const { stdout } = await run('git remote get-url origin');
|
|
22
|
+
const url = stdout.trim();
|
|
23
|
+
// git@github.com:owner/repo.git or https://github.com/owner/repo.git
|
|
24
|
+
const match = url.match(/github\.com[/:]([\w-]+)\/([\w-]+)\.git/);
|
|
25
|
+
if (!match) {
|
|
26
|
+
throw new Error('Could not parse repository owner and name from origin URL.');
|
|
27
|
+
}
|
|
28
|
+
return {
|
|
29
|
+
owner: match[1],
|
|
30
|
+
repo: match[2]
|
|
31
|
+
};
|
|
32
|
+
};
|
|
33
|
+
const createPullRequest = async (title, body, head, base = 'main')=>{
|
|
34
|
+
const octokit = getOctokit();
|
|
35
|
+
const { owner, repo } = await getRepoDetails();
|
|
36
|
+
const response = await octokit.pulls.create({
|
|
37
|
+
owner,
|
|
38
|
+
repo,
|
|
39
|
+
title,
|
|
40
|
+
body,
|
|
41
|
+
head,
|
|
42
|
+
base
|
|
43
|
+
});
|
|
44
|
+
return response.data;
|
|
45
|
+
};
|
|
46
|
+
const findOpenPullRequestByHeadRef = async (head)=>{
|
|
47
|
+
const octokit = getOctokit();
|
|
48
|
+
const { owner, repo } = await getRepoDetails();
|
|
49
|
+
const response = await octokit.pulls.list({
|
|
50
|
+
owner,
|
|
51
|
+
repo,
|
|
52
|
+
state: 'open',
|
|
53
|
+
head: `${owner}:${head}`
|
|
54
|
+
});
|
|
55
|
+
var _response_data_;
|
|
56
|
+
return (_response_data_ = response.data[0]) !== null && _response_data_ !== void 0 ? _response_data_ : null;
|
|
57
|
+
};
|
|
58
|
+
const delay = (ms)=>new Promise((resolve)=>setTimeout(resolve, ms));
|
|
59
|
+
const waitForPullRequestChecks = async (prNumber)=>{
|
|
60
|
+
const octokit = getOctokit();
|
|
61
|
+
const { owner, repo } = await getRepoDetails();
|
|
62
|
+
const logger = getLogger();
|
|
63
|
+
while(true){
|
|
64
|
+
const pr = await octokit.pulls.get({
|
|
65
|
+
owner,
|
|
66
|
+
repo,
|
|
67
|
+
pull_number: prNumber
|
|
68
|
+
});
|
|
69
|
+
const checkRunsResponse = await octokit.checks.listForRef({
|
|
70
|
+
owner,
|
|
71
|
+
repo,
|
|
72
|
+
ref: pr.data.head.sha
|
|
73
|
+
});
|
|
74
|
+
const checkRuns = checkRunsResponse.data.check_runs;
|
|
75
|
+
if (checkRuns.length === 0) {
|
|
76
|
+
logger.info(`PR #${prNumber}: No checks found. Waiting...`);
|
|
77
|
+
await delay(10000);
|
|
78
|
+
continue;
|
|
79
|
+
}
|
|
80
|
+
const failingChecks = checkRuns.filter((cr)=>cr.conclusion && [
|
|
81
|
+
'failure',
|
|
82
|
+
'timed_out',
|
|
83
|
+
'cancelled'
|
|
84
|
+
].includes(cr.conclusion));
|
|
85
|
+
if (failingChecks.length > 0) {
|
|
86
|
+
logger.error(`PR #${prNumber} has failing checks:`);
|
|
87
|
+
for (const check of failingChecks){
|
|
88
|
+
logger.error(`- ${check.name}: ${check.conclusion}`);
|
|
89
|
+
}
|
|
90
|
+
throw new Error(`PR #${prNumber} checks failed.`);
|
|
91
|
+
}
|
|
92
|
+
const allChecksCompleted = checkRuns.every((cr)=>cr.status === 'completed');
|
|
93
|
+
if (allChecksCompleted) {
|
|
94
|
+
logger.info(`All checks for PR #${prNumber} have completed successfully.`);
|
|
95
|
+
return;
|
|
96
|
+
}
|
|
97
|
+
const completedCount = checkRuns.filter((cr)=>cr.status === 'completed').length;
|
|
98
|
+
logger.info(`PR #${prNumber} checks: ${completedCount}/${checkRuns.length} completed. Waiting...`);
|
|
99
|
+
await delay(10000); // wait 10 seconds
|
|
100
|
+
}
|
|
101
|
+
};
|
|
102
|
+
const mergePullRequest = async (prNumber, mergeMethod = 'squash')=>{
|
|
103
|
+
const octokit = getOctokit();
|
|
104
|
+
const { owner, repo } = await getRepoDetails();
|
|
105
|
+
const logger = getLogger();
|
|
106
|
+
logger.info(`Merging PR #${prNumber} using ${mergeMethod} method...`);
|
|
107
|
+
const pr = await octokit.pulls.get({
|
|
108
|
+
owner,
|
|
109
|
+
repo,
|
|
110
|
+
pull_number: prNumber
|
|
111
|
+
});
|
|
112
|
+
const headBranch = pr.data.head.ref;
|
|
113
|
+
await octokit.pulls.merge({
|
|
114
|
+
owner,
|
|
115
|
+
repo,
|
|
116
|
+
pull_number: prNumber,
|
|
117
|
+
merge_method: mergeMethod
|
|
118
|
+
});
|
|
119
|
+
logger.info(`PR #${prNumber} merged using ${mergeMethod} method.`);
|
|
120
|
+
logger.info(`Deleting branch ${headBranch}...`);
|
|
121
|
+
await octokit.git.deleteRef({
|
|
122
|
+
owner,
|
|
123
|
+
repo,
|
|
124
|
+
ref: `heads/${headBranch}`
|
|
125
|
+
});
|
|
126
|
+
logger.info(`Branch ${headBranch} deleted.`);
|
|
127
|
+
};
|
|
128
|
+
const createRelease = async (tagName, title, notes)=>{
|
|
129
|
+
const octokit = getOctokit();
|
|
130
|
+
const { owner, repo } = await getRepoDetails();
|
|
131
|
+
const logger = getLogger();
|
|
132
|
+
logger.info(`Creating release for tag ${tagName}...`);
|
|
133
|
+
await octokit.repos.createRelease({
|
|
134
|
+
owner,
|
|
135
|
+
repo,
|
|
136
|
+
tag_name: tagName,
|
|
137
|
+
name: title,
|
|
138
|
+
body: notes
|
|
139
|
+
});
|
|
140
|
+
logger.info(`Release ${tagName} created.`);
|
|
141
|
+
};
|
|
142
|
+
|
|
143
|
+
export { createPullRequest, createRelease, findOpenPullRequestByHeadRef, getCurrentBranchName, getOctokit, getRepoDetails, mergePullRequest, waitForPullRequestChecks };
|
|
144
|
+
//# sourceMappingURL=github.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"github.js","sources":["../../src/util/github.ts"],"sourcesContent":["import { Octokit } from '@octokit/rest';\nimport { getLogger } from '../logging';\nimport { PullRequest, MergeMethod } from '../types';\nimport { run } from './child';\n\nexport const getOctokit = (): Octokit => {\n const logger = getLogger();\n const token = process.env.GITHUB_TOKEN;\n\n if (!token) {\n logger.error('GITHUB_TOKEN environment variable is not set.');\n throw new Error('GITHUB_TOKEN is not set.');\n }\n\n return new Octokit({\n auth: token,\n });\n};\n\nexport const getCurrentBranchName = async (): Promise<string> => {\n const { stdout } = await run('git rev-parse --abbrev-ref HEAD');\n return stdout.trim();\n};\n\nexport const getRepoDetails = async (): Promise<{ owner: string; repo: string }> => {\n const { stdout } = await run('git remote get-url origin');\n const url = stdout.trim();\n // git@github.com:owner/repo.git or https://github.com/owner/repo.git\n const match = url.match(/github\\.com[/:]([\\w-]+)\\/([\\w-]+)\\.git/);\n if (!match) {\n throw new Error('Could not parse repository owner and name from origin URL.');\n }\n return { owner: match[1], repo: match[2] };\n};\n\nexport const createPullRequest = async (\n title: string,\n body: string,\n head: string,\n base: string = 'main'\n): Promise<PullRequest> => {\n const octokit = getOctokit();\n const { owner, repo } = await getRepoDetails();\n\n const response = await octokit.pulls.create({\n owner,\n repo,\n title,\n body,\n head,\n base,\n });\n\n return response.data;\n};\n\nexport const findOpenPullRequestByHeadRef = async (head: string): Promise<PullRequest | null> => {\n const octokit = getOctokit();\n const { owner, repo } = await getRepoDetails();\n\n const response = await octokit.pulls.list({\n owner,\n repo,\n state: 'open',\n head: `${owner}:${head}`,\n });\n\n return response.data[0] ?? null;\n};\n\nconst delay = (ms: number) => new Promise(resolve => setTimeout(resolve, ms));\n\nexport const waitForPullRequestChecks = async (prNumber: number): Promise<void> => {\n const octokit = getOctokit();\n const { owner, repo } = await getRepoDetails();\n const logger = getLogger();\n\n while (true) {\n const pr = await octokit.pulls.get({\n owner,\n repo,\n pull_number: prNumber,\n });\n\n const checkRunsResponse = await octokit.checks.listForRef({\n owner,\n repo,\n ref: pr.data.head.sha,\n });\n\n const checkRuns = checkRunsResponse.data.check_runs;\n\n if (checkRuns.length === 0) {\n logger.info(`PR #${prNumber}: No checks found. Waiting...`);\n await delay(10000);\n continue;\n }\n\n const failingChecks = checkRuns.filter(\n (cr) => cr.conclusion && ['failure', 'timed_out', 'cancelled'].includes(cr.conclusion)\n );\n\n if (failingChecks.length > 0) {\n logger.error(`PR #${prNumber} has failing checks:`);\n for (const check of failingChecks) {\n logger.error(`- ${check.name}: ${check.conclusion}`);\n }\n throw new Error(`PR #${prNumber} checks failed.`);\n }\n\n const allChecksCompleted = checkRuns.every((cr) => cr.status === 'completed');\n\n if (allChecksCompleted) {\n logger.info(`All checks for PR #${prNumber} have completed successfully.`);\n return;\n }\n\n const completedCount = checkRuns.filter(cr => cr.status === 'completed').length;\n logger.info(`PR #${prNumber} checks: ${completedCount}/${checkRuns.length} completed. Waiting...`);\n\n await delay(10000); // wait 10 seconds\n }\n};\n\nexport const mergePullRequest = async (prNumber: number, mergeMethod: MergeMethod = 'squash'): Promise<void> => {\n const octokit = getOctokit();\n const { owner, repo } = await getRepoDetails();\n const logger = getLogger();\n\n logger.info(`Merging PR #${prNumber} using ${mergeMethod} method...`);\n const pr = await octokit.pulls.get({\n owner,\n repo,\n pull_number: prNumber,\n });\n const headBranch = pr.data.head.ref;\n\n await octokit.pulls.merge({\n owner,\n repo,\n pull_number: prNumber,\n merge_method: mergeMethod,\n });\n logger.info(`PR #${prNumber} merged using ${mergeMethod} method.`);\n\n logger.info(`Deleting branch ${headBranch}...`);\n await octokit.git.deleteRef({\n owner,\n repo,\n ref: `heads/${headBranch}`,\n });\n logger.info(`Branch ${headBranch} deleted.`);\n};\n\nexport const createRelease = async (tagName: string, title: string, notes: string): Promise<void> => {\n const octokit = getOctokit();\n const { owner, repo } = await getRepoDetails();\n const logger = getLogger();\n\n logger.info(`Creating release for tag ${tagName}...`);\n await octokit.repos.createRelease({\n owner,\n repo,\n tag_name: tagName,\n name: title,\n body: notes,\n });\n logger.info(`Release ${tagName} created.`);\n}; "],"names":["getOctokit","logger","getLogger","token","process","env","GITHUB_TOKEN","error","Error","Octokit","auth","getCurrentBranchName","stdout","run","trim","getRepoDetails","url","match","owner","repo","createPullRequest","title","body","head","base","octokit","response","pulls","create","data","findOpenPullRequestByHeadRef","list","state","delay","ms","Promise","resolve","setTimeout","waitForPullRequestChecks","prNumber","pr","get","pull_number","checkRunsResponse","checks","listForRef","ref","sha","checkRuns","check_runs","length","info","failingChecks","filter","cr","conclusion","includes","check","name","allChecksCompleted","every","status","completedCount","mergePullRequest","mergeMethod","headBranch","merge","merge_method","git","deleteRef","createRelease","tagName","notes","repos","tag_name"],"mappings":";;;;MAKaA,UAAAA,GAAa,IAAA;AACtB,IAAA,MAAMC,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQC,OAAAA,CAAQC,GAAG,CAACC,YAAY;AAEtC,IAAA,IAAI,CAACH,KAAAA,EAAO;AACRF,QAAAA,MAAAA,CAAOM,KAAK,CAAC,+CAAA,CAAA;AACb,QAAA,MAAM,IAAIC,KAAAA,CAAM,0BAAA,CAAA;AACpB;AAEA,IAAA,OAAO,IAAIC,OAAAA,CAAQ;QACfC,IAAAA,EAAMP;AACV,KAAA,CAAA;AACJ;MAEaQ,oBAAAA,GAAuB,UAAA;AAChC,IAAA,MAAM,EAAEC,MAAM,EAAE,GAAG,MAAMC,GAAAA,CAAI,iCAAA,CAAA;AAC7B,IAAA,OAAOD,OAAOE,IAAI,EAAA;AACtB;MAEaC,cAAAA,GAAiB,UAAA;AAC1B,IAAA,MAAM,EAAEH,MAAM,EAAE,GAAG,MAAMC,GAAAA,CAAI,2BAAA,CAAA;IAC7B,MAAMG,GAAAA,GAAMJ,OAAOE,IAAI,EAAA;;IAEvB,MAAMG,KAAAA,GAAQD,GAAAA,CAAIC,KAAK,CAAC,wCAAA,CAAA;AACxB,IAAA,IAAI,CAACA,KAAAA,EAAO;AACR,QAAA,MAAM,IAAIT,KAAAA,CAAM,4DAAA,CAAA;AACpB;IACA,OAAO;QAAEU,KAAAA,EAAOD,KAAK,CAAC,CAAA,CAAE;QAAEE,IAAAA,EAAMF,KAAK,CAAC,CAAA;AAAG,KAAA;AAC7C;MAEaG,iBAAAA,GAAoB,OAC7BC,OACAC,IAAAA,EACAC,IAAAA,EACAC,OAAe,MAAM,GAAA;AAErB,IAAA,MAAMC,OAAAA,GAAUzB,UAAAA,EAAAA;AAChB,IAAA,MAAM,EAAEkB,KAAK,EAAEC,IAAI,EAAE,GAAG,MAAMJ,cAAAA,EAAAA;AAE9B,IAAA,MAAMW,WAAW,MAAMD,OAAAA,CAAQE,KAAK,CAACC,MAAM,CAAC;AACxCV,QAAAA,KAAAA;AACAC,QAAAA,IAAAA;AACAE,QAAAA,KAAAA;AACAC,QAAAA,IAAAA;AACAC,QAAAA,IAAAA;AACAC,QAAAA;AACJ,KAAA,CAAA;AAEA,IAAA,OAAOE,SAASG,IAAI;AACxB;AAEO,MAAMC,+BAA+B,OAAOP,IAAAA,GAAAA;AAC/C,IAAA,MAAME,OAAAA,GAAUzB,UAAAA,EAAAA;AAChB,IAAA,MAAM,EAAEkB,KAAK,EAAEC,IAAI,EAAE,GAAG,MAAMJ,cAAAA,EAAAA;AAE9B,IAAA,MAAMW,WAAW,MAAMD,OAAAA,CAAQE,KAAK,CAACI,IAAI,CAAC;AACtCb,QAAAA,KAAAA;AACAC,QAAAA,IAAAA;QACAa,KAAAA,EAAO,MAAA;AACPT,QAAAA,IAAAA,EAAM,CAAA,EAAGL,KAAAA,CAAM,CAAC,EAAEK,IAAAA,CAAAA;AACtB,KAAA,CAAA;AAEOG,IAAAA,IAAAA,eAAAA;IAAP,OAAOA,CAAAA,kBAAAA,QAAAA,CAASG,IAAI,CAAC,CAAA,CAAE,MAAA,IAAA,IAAhBH,6BAAAA,eAAAA,GAAoB,IAAA;AAC/B;AAEA,MAAMO,KAAAA,GAAQ,CAACC,EAAAA,GAAe,IAAIC,QAAQC,CAAAA,OAAAA,GAAWC,WAAWD,OAAAA,EAASF,EAAAA,CAAAA,CAAAA;AAElE,MAAMI,2BAA2B,OAAOC,QAAAA,GAAAA;AAC3C,IAAA,MAAMd,OAAAA,GAAUzB,UAAAA,EAAAA;AAChB,IAAA,MAAM,EAAEkB,KAAK,EAAEC,IAAI,EAAE,GAAG,MAAMJ,cAAAA,EAAAA;AAC9B,IAAA,MAAMd,MAAAA,GAASC,SAAAA,EAAAA;AAEf,IAAA,MAAO,IAAA,CAAM;AACT,QAAA,MAAMsC,KAAK,MAAMf,OAAAA,CAAQE,KAAK,CAACc,GAAG,CAAC;AAC/BvB,YAAAA,KAAAA;AACAC,YAAAA,IAAAA;YACAuB,WAAAA,EAAaH;AACjB,SAAA,CAAA;AAEA,QAAA,MAAMI,oBAAoB,MAAMlB,OAAAA,CAAQmB,MAAM,CAACC,UAAU,CAAC;AACtD3B,YAAAA,KAAAA;AACAC,YAAAA,IAAAA;AACA2B,YAAAA,GAAAA,EAAKN,EAAAA,CAAGX,IAAI,CAACN,IAAI,CAACwB;AACtB,SAAA,CAAA;AAEA,QAAA,MAAMC,SAAAA,GAAYL,iBAAAA,CAAkBd,IAAI,CAACoB,UAAU;QAEnD,IAAID,SAAAA,CAAUE,MAAM,KAAK,CAAA,EAAG;AACxBjD,YAAAA,MAAAA,CAAOkD,IAAI,CAAC,CAAC,IAAI,EAAEZ,QAAAA,CAAS,6BAA6B,CAAC,CAAA;AAC1D,YAAA,MAAMN,KAAAA,CAAM,KAAA,CAAA;AACZ,YAAA;AACJ;QAEA,MAAMmB,aAAAA,GAAgBJ,UAAUK,MAAM,CAClC,CAACC,EAAAA,GAAOA,EAAAA,CAAGC,UAAU,IAAI;AAAC,gBAAA,SAAA;AAAW,gBAAA,WAAA;AAAa,gBAAA;aAAY,CAACC,QAAQ,CAACF,EAAAA,CAAGC,UAAU,CAAA,CAAA;QAGzF,IAAIH,aAAAA,CAAcF,MAAM,GAAG,CAAA,EAAG;AAC1BjD,YAAAA,MAAAA,CAAOM,KAAK,CAAC,CAAC,IAAI,EAAEgC,QAAAA,CAAS,oBAAoB,CAAC,CAAA;YAClD,KAAK,MAAMkB,SAASL,aAAAA,CAAe;AAC/BnD,gBAAAA,MAAAA,CAAOM,KAAK,CAAC,CAAC,EAAE,EAAEkD,KAAAA,CAAMC,IAAI,CAAC,EAAE,EAAED,KAAAA,CAAMF,UAAU,CAAA,CAAE,CAAA;AACvD;AACA,YAAA,MAAM,IAAI/C,KAAAA,CAAM,CAAC,IAAI,EAAE+B,QAAAA,CAAS,eAAe,CAAC,CAAA;AACpD;QAEA,MAAMoB,kBAAAA,GAAqBX,UAAUY,KAAK,CAAC,CAACN,EAAAA,GAAOA,EAAAA,CAAGO,MAAM,KAAK,WAAA,CAAA;AAEjE,QAAA,IAAIF,kBAAAA,EAAoB;AACpB1D,YAAAA,MAAAA,CAAOkD,IAAI,CAAC,CAAC,mBAAmB,EAAEZ,QAAAA,CAAS,6BAA6B,CAAC,CAAA;AACzE,YAAA;AACJ;QAEA,MAAMuB,cAAAA,GAAiBd,SAAAA,CAAUK,MAAM,CAACC,CAAAA,KAAMA,EAAAA,CAAGO,MAAM,KAAK,WAAA,CAAA,CAAaX,MAAM;AAC/EjD,QAAAA,MAAAA,CAAOkD,IAAI,CAAC,CAAC,IAAI,EAAEZ,QAAAA,CAAS,SAAS,EAAEuB,cAAAA,CAAe,CAAC,EAAEd,SAAAA,CAAUE,MAAM,CAAC,sBAAsB,CAAC,CAAA;QAEjG,MAAMjB,KAAAA,CAAM;AAChB;AACJ;AAEO,MAAM8B,gBAAAA,GAAmB,OAAOxB,QAAAA,EAAkByB,cAA2B,QAAQ,GAAA;AACxF,IAAA,MAAMvC,OAAAA,GAAUzB,UAAAA,EAAAA;AAChB,IAAA,MAAM,EAAEkB,KAAK,EAAEC,IAAI,EAAE,GAAG,MAAMJ,cAAAA,EAAAA;AAC9B,IAAA,MAAMd,MAAAA,GAASC,SAAAA,EAAAA;IAEfD,MAAAA,CAAOkD,IAAI,CAAC,CAAC,YAAY,EAAEZ,SAAS,OAAO,EAAEyB,WAAAA,CAAY,UAAU,CAAC,CAAA;AACpE,IAAA,MAAMxB,KAAK,MAAMf,OAAAA,CAAQE,KAAK,CAACc,GAAG,CAAC;AAC/BvB,QAAAA,KAAAA;AACAC,QAAAA,IAAAA;QACAuB,WAAAA,EAAaH;AACjB,KAAA,CAAA;AACA,IAAA,MAAM0B,aAAazB,EAAAA,CAAGX,IAAI,CAACN,IAAI,CAACuB,GAAG;AAEnC,IAAA,MAAMrB,OAAAA,CAAQE,KAAK,CAACuC,KAAK,CAAC;AACtBhD,QAAAA,KAAAA;AACAC,QAAAA,IAAAA;QACAuB,WAAAA,EAAaH,QAAAA;QACb4B,YAAAA,EAAcH;AAClB,KAAA,CAAA;IACA/D,MAAAA,CAAOkD,IAAI,CAAC,CAAC,IAAI,EAAEZ,SAAS,cAAc,EAAEyB,WAAAA,CAAY,QAAQ,CAAC,CAAA;AAEjE/D,IAAAA,MAAAA,CAAOkD,IAAI,CAAC,CAAC,gBAAgB,EAAEc,UAAAA,CAAW,GAAG,CAAC,CAAA;AAC9C,IAAA,MAAMxC,OAAAA,CAAQ2C,GAAG,CAACC,SAAS,CAAC;AACxBnD,QAAAA,KAAAA;AACAC,QAAAA,IAAAA;QACA2B,GAAAA,EAAK,CAAC,MAAM,EAAEmB,UAAAA,CAAAA;AAClB,KAAA,CAAA;AACAhE,IAAAA,MAAAA,CAAOkD,IAAI,CAAC,CAAC,OAAO,EAAEc,UAAAA,CAAW,SAAS,CAAC,CAAA;AAC/C;AAEO,MAAMK,aAAAA,GAAgB,OAAOC,OAAAA,EAAiBlD,KAAAA,EAAemD,KAAAA,GAAAA;AAChE,IAAA,MAAM/C,OAAAA,GAAUzB,UAAAA,EAAAA;AAChB,IAAA,MAAM,EAAEkB,KAAK,EAAEC,IAAI,EAAE,GAAG,MAAMJ,cAAAA,EAAAA;AAC9B,IAAA,MAAMd,MAAAA,GAASC,SAAAA,EAAAA;AAEfD,IAAAA,MAAAA,CAAOkD,IAAI,CAAC,CAAC,yBAAyB,EAAEoB,OAAAA,CAAQ,GAAG,CAAC,CAAA;AACpD,IAAA,MAAM9C,OAAAA,CAAQgD,KAAK,CAACH,aAAa,CAAC;AAC9BpD,QAAAA,KAAAA;AACAC,QAAAA,IAAAA;QACAuD,QAAAA,EAAUH,OAAAA;QACVb,IAAAA,EAAMrC,KAAAA;QACNC,IAAAA,EAAMkD;AACV,KAAA,CAAA;AACAvE,IAAAA,MAAAA,CAAOkD,IAAI,CAAC,CAAC,QAAQ,EAAEoB,OAAAA,CAAQ,SAAS,CAAC,CAAA;AAC7C;;;;"}
|
package/dist/util/openai.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai.js","sources":["../../src/util/openai.ts"],"sourcesContent":["import { OpenAI } from 'openai';\nimport { ChatCompletionMessageParam } from 'openai/resources';\nimport * as Storage from './storage';\nimport { getLogger } from '../logging';\nexport interface Transcription {\n text: string;\n}\n\nexport class OpenAIError extends Error {\n constructor(message: string) {\n super(message);\n this.name = 'OpenAIError';\n }\n}\n\nexport async function createCompletion(messages: ChatCompletionMessageParam[], options: { responseFormat?: any, model?: string, debug?: boolean, debugFile?: string } = { model: \"gpt-4o-mini\" }): Promise<string | any> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n const openai = new OpenAI({\n apiKey: apiKey,\n });\n\n logger.debug('Sending prompt to OpenAI: %j', messages);\n\n const completion = await openai.chat.completions.create({\n model: options.model || \"gpt-4o-mini\",\n messages,\n max_completion_tokens: 10000,\n response_format: options.responseFormat,\n });\n\n if (options.debug && options.debugFile) {\n await storage.writeFile(options.debugFile, JSON.stringify(completion, null, 2), 'utf8');\n logger.debug('Wrote debug file to %s', options.debugFile);\n }\n\n const response = completion.choices[0]?.message?.content?.trim();\n if (!response) {\n throw new OpenAIError('No response received from OpenAI');\n }\n\n logger.debug('Received response from OpenAI: %s...', response.substring(0, 30));\n if (options.responseFormat) {\n return JSON.parse(response);\n } else {\n return response;\n }\n\n } catch (error: any) {\n logger.error('Error calling OpenAI API: %s %s', error.message, error.stack);\n throw new OpenAIError(`Failed to create completion: ${error.message}`);\n }\n}\n\nexport async function transcribeAudio(filePath: string, options: { model?: string, debug?: boolean, debugFile?: string } = { model: \"whisper-1\" }): Promise<Transcription> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n const openai = new OpenAI({\n apiKey: apiKey,\n });\n\n logger.debug('Transcribing audio file: %s', filePath);\n\n const audioStream = await storage.readStream(filePath);\n const transcription = await openai.audio.transcriptions.create({\n model: options.model || \"whisper-1\",\n file: audioStream,\n response_format: \"json\",\n });\n\n if (options.debug && options.debugFile) {\n await storage.writeFile(options.debugFile, JSON.stringify(transcription, null, 2), 'utf8');\n logger.debug('Wrote debug file to %s', options.debugFile);\n }\n\n const response = transcription;\n if (!response) {\n throw new OpenAIError('No transcription received from OpenAI');\n }\n\n logger.debug('Received transcription from OpenAI: %s', response);\n return response;\n\n } catch (error: any) {\n logger.error('Error transcribing audio file: %s %s', error.message, error.stack);\n throw new OpenAIError(`Failed to transcribe audio: ${error.message}`);\n }\n}\n"],"names":["OpenAIError","Error","
|
|
1
|
+
{"version":3,"file":"openai.js","sources":["../../src/util/openai.ts"],"sourcesContent":["import { OpenAI } from 'openai';\nimport { ChatCompletionMessageParam } from 'openai/resources';\nimport * as Storage from './storage';\nimport { getLogger } from '../logging';\nexport interface Transcription {\n text: string;\n}\n\nexport class OpenAIError extends Error {\n constructor(message: string) {\n super(message);\n this.name = 'OpenAIError';\n }\n}\n\nexport async function createCompletion(messages: ChatCompletionMessageParam[], options: { responseFormat?: any, model?: string, debug?: boolean, debugFile?: string } = { model: \"gpt-4o-mini\" }): Promise<string | any> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n const openai = new OpenAI({\n apiKey: apiKey,\n });\n\n logger.debug('Sending prompt to OpenAI: %j', messages);\n\n const completion = await openai.chat.completions.create({\n model: options.model || \"gpt-4o-mini\",\n messages,\n max_completion_tokens: 10000,\n response_format: options.responseFormat,\n });\n\n if (options.debug && options.debugFile) {\n await storage.writeFile(options.debugFile, JSON.stringify(completion, null, 2), 'utf8');\n logger.debug('Wrote debug file to %s', options.debugFile);\n }\n\n const response = completion.choices[0]?.message?.content?.trim();\n if (!response) {\n throw new OpenAIError('No response received from OpenAI');\n }\n\n logger.debug('Received response from OpenAI: %s...', response.substring(0, 30));\n if (options.responseFormat) {\n return JSON.parse(response);\n } else {\n return response;\n }\n\n } catch (error: any) {\n logger.error('Error calling OpenAI API: %s %s', error.message, error.stack);\n throw new OpenAIError(`Failed to create completion: ${error.message}`);\n }\n}\n\nexport async function transcribeAudio(filePath: string, options: { model?: string, debug?: boolean, debugFile?: string } = { model: \"whisper-1\" }): Promise<Transcription> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n const openai = new OpenAI({\n apiKey: apiKey,\n });\n\n logger.debug('Transcribing audio file: %s', filePath);\n\n const audioStream = await storage.readStream(filePath);\n const transcription = await openai.audio.transcriptions.create({\n model: options.model || \"whisper-1\",\n file: audioStream,\n response_format: \"json\",\n });\n\n if (options.debug && options.debugFile) {\n await storage.writeFile(options.debugFile, JSON.stringify(transcription, null, 2), 'utf8');\n logger.debug('Wrote debug file to %s', options.debugFile);\n }\n\n const response = transcription;\n if (!response) {\n throw new OpenAIError('No transcription received from OpenAI');\n }\n\n logger.debug('Received transcription from OpenAI: %s', response);\n return response;\n\n } catch (error: any) {\n logger.error('Error transcribing audio file: %s %s', error.message, error.stack);\n throw new OpenAIError(`Failed to transcribe audio: ${error.message}`);\n }\n}\n"],"names":["OpenAIError","Error","message","name","createCompletion","messages","options","model","logger","getLogger","storage","Storage","log","debug","completion","apiKey","process","env","OPENAI_API_KEY","openai","OpenAI","chat","completions","create","max_completion_tokens","response_format","responseFormat","debugFile","writeFile","JSON","stringify","response","choices","content","trim","substring","parse","error","stack"],"mappings":";;;;AAQO,MAAMA,WAAAA,SAAoBC,KAAAA,CAAAA;AAC7B,IAAA,WAAA,CAAYC,OAAe,CAAE;AACzB,QAAA,KAAK,CAACA,OAAAA,CAAAA;QACN,IAAI,CAACC,IAAI,GAAG,aAAA;AAChB;AACJ;AAEO,eAAeC,gBAAAA,CAAiBC,QAAsC,EAAEC,OAAAA,GAAyF;IAAEC,KAAAA,EAAO;AAAc,CAAC,EAAA;AAC5L,IAAA,MAAMC,MAAAA,GAASC,SAAAA,EAAAA;IACf,MAAMC,OAAAA,GAAUC,MAAc,CAAC;AAAEC,QAAAA,GAAAA,EAAKJ,OAAOK;AAAM,KAAA,CAAA;IACnD,IAAI;AAwBiBC,QAAAA,IAAAA,oCAAAA,EAAAA,4BAAAA,EAAAA,oBAAAA;AAvBjB,QAAA,MAAMC,MAAAA,GAASC,OAAAA,CAAQC,GAAG,CAACC,cAAc;AACzC,QAAA,IAAI,CAACH,MAAAA,EAAQ;AACT,YAAA,MAAM,IAAIf,WAAAA,CAAY,gDAAA,CAAA;AAC1B;QAEA,MAAMmB,MAAAA,GAAS,IAAIC,MAAAA,CAAO;YACtBL,MAAAA,EAAQA;AACZ,SAAA,CAAA;QAEAP,MAAAA,CAAOK,KAAK,CAAC,8BAAA,EAAgCR,QAAAA,CAAAA;QAE7C,MAAMS,UAAAA,GAAa,MAAMK,MAAAA,CAAOE,IAAI,CAACC,WAAW,CAACC,MAAM,CAAC;YACpDhB,KAAAA,EAAOD,OAAAA,CAAQC,KAAK,IAAI,aAAA;AACxBF,YAAAA,QAAAA;YACAmB,qBAAAA,EAAuB,KAAA;AACvBC,YAAAA,eAAAA,EAAiBnB,QAAQoB;AAC7B,SAAA,CAAA;AAEA,QAAA,IAAIpB,OAAAA,CAAQO,KAAK,IAAIP,OAAAA,CAAQqB,SAAS,EAAE;YACpC,MAAMjB,OAAAA,CAAQkB,SAAS,CAACtB,OAAAA,CAAQqB,SAAS,EAAEE,IAAAA,CAAKC,SAAS,CAAChB,UAAAA,EAAY,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;AAChFN,YAAAA,MAAAA,CAAOK,KAAK,CAAC,wBAAA,EAA0BP,OAAAA,CAAQqB,SAAS,CAAA;AAC5D;AAEA,QAAA,MAAMI,YAAWjB,oBAAAA,GAAAA,UAAAA,CAAWkB,OAAO,CAAC,CAAA,CAAE,cAArBlB,oBAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,CAAAA,4BAAAA,GAAAA,qBAAuBZ,OAAO,MAAA,IAAA,IAA9BY,oDAAAA,oCAAAA,GAAAA,4BAAAA,CAAgCmB,OAAO,MAAA,IAAA,IAAvCnB,oCAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,qCAAyCoB,IAAI,EAAA;AAC9D,QAAA,IAAI,CAACH,QAAAA,EAAU;AACX,YAAA,MAAM,IAAI/B,WAAAA,CAAY,kCAAA,CAAA;AAC1B;AAEAQ,QAAAA,MAAAA,CAAOK,KAAK,CAAC,sCAAA,EAAwCkB,QAAAA,CAASI,SAAS,CAAC,CAAA,EAAG,EAAA,CAAA,CAAA;QAC3E,IAAI7B,OAAAA,CAAQoB,cAAc,EAAE;YACxB,OAAOG,IAAAA,CAAKO,KAAK,CAACL,QAAAA,CAAAA;SACtB,MAAO;YACH,OAAOA,QAAAA;AACX;AAEJ,KAAA,CAAE,OAAOM,KAAAA,EAAY;AACjB7B,QAAAA,MAAAA,CAAO6B,KAAK,CAAC,iCAAA,EAAmCA,MAAMnC,OAAO,EAAEmC,MAAMC,KAAK,CAAA;AAC1E,QAAA,MAAM,IAAItC,WAAAA,CAAY,CAAC,6BAA6B,EAAEqC,KAAAA,CAAMnC,OAAO,CAAA,CAAE,CAAA;AACzE;AACJ;;;;"}
|
package/dist/util/storage.js
CHANGED
|
@@ -78,6 +78,9 @@ const create = (params)=>{
|
|
|
78
78
|
encoding: encoding
|
|
79
79
|
});
|
|
80
80
|
};
|
|
81
|
+
const rename = async (oldPath, newPath)=>{
|
|
82
|
+
await fs.promises.rename(oldPath, newPath);
|
|
83
|
+
};
|
|
81
84
|
const forEachFileIn = async (directory, callback, options = {
|
|
82
85
|
pattern: '*.*'
|
|
83
86
|
})=>{
|
|
@@ -116,6 +119,7 @@ const create = (params)=>{
|
|
|
116
119
|
readFile,
|
|
117
120
|
readStream,
|
|
118
121
|
writeFile,
|
|
122
|
+
rename,
|
|
119
123
|
forEachFileIn,
|
|
120
124
|
hashFile,
|
|
121
125
|
listFiles
|
package/dist/util/storage.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"storage.js","sources":["../../src/util/storage.ts"],"sourcesContent":["// eslint-disable-next-line no-restricted-imports\nimport * as fs from 'fs';\nimport { glob } from 'glob';\nimport path from 'path';\nimport crypto from 'crypto';\n/**\n * This module exists to isolate filesystem operations from the rest of the codebase.\n * This makes testing easier by avoiding direct fs mocking in jest configuration.\n * \n * Additionally, abstracting storage operations allows for future flexibility - \n * this export utility may need to work with storage systems other than the local filesystem\n * (e.g. S3, Google Cloud Storage, etc).\n */\n\nexport interface Utility {\n exists: (path: string) => Promise<boolean>;\n isDirectory: (path: string) => Promise<boolean>;\n isFile: (path: string) => Promise<boolean>;\n isReadable: (path: string) => Promise<boolean>;\n isWritable: (path: string) => Promise<boolean>;\n isFileReadable: (path: string) => Promise<boolean>;\n isDirectoryWritable: (path: string) => Promise<boolean>;\n isDirectoryReadable: (path: string) => Promise<boolean>;\n createDirectory: (path: string) => Promise<void>;\n readFile: (path: string, encoding: string) => Promise<string>;\n readStream: (path: string) => Promise<fs.ReadStream>;\n writeFile: (path: string, data: string | Buffer, encoding: string) => Promise<void>;\n forEachFileIn: (directory: string, callback: (path: string) => Promise<void>, options?: { pattern: string }) => Promise<void>;\n hashFile: (path: string, length: number) => Promise<string>;\n listFiles: (directory: string) => Promise<string[]>;\n}\n\nexport const create = (params: { log?: (message: string, ...args: any[]) => void }): Utility => {\n\n // eslint-disable-next-line no-console\n const log = params.log || console.log;\n\n const exists = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.stat(path);\n return true;\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n } catch (error: any) {\n return false;\n }\n }\n\n const isDirectory = async (path: string): Promise<boolean> => {\n const stats = await fs.promises.stat(path);\n if (!stats.isDirectory()) {\n log(`${path} is not a directory`);\n return false;\n }\n return true;\n }\n\n const isFile = async (path: string): Promise<boolean> => {\n const stats = await fs.promises.stat(path);\n if (!stats.isFile()) {\n log(`${path} is not a file`);\n return false;\n }\n return true;\n }\n\n const isReadable = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.access(path, fs.constants.R_OK);\n } catch (error: any) {\n log(`${path} is not readable: %s %s`, error.message, error.stack);\n return false;\n }\n return true;\n }\n\n const isWritable = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.access(path, fs.constants.W_OK);\n } catch (error: any) {\n log(`${path} is not writable: %s %s`, error.message, error.stack);\n return false;\n }\n return true;\n }\n\n const isFileReadable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isFile(path) && await isReadable(path);\n }\n\n const isDirectoryWritable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isDirectory(path) && await isWritable(path);\n }\n\n const isDirectoryReadable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isDirectory(path) && await isReadable(path);\n }\n\n const createDirectory = async (path: string): Promise<void> => {\n try {\n await fs.promises.mkdir(path, { recursive: true });\n } catch (mkdirError: any) {\n throw new Error(`Failed to create output directory ${path}: ${mkdirError.message} ${mkdirError.stack}`);\n }\n }\n\n const readFile = async (path: string, encoding: string): Promise<string> => {\n return await fs.promises.readFile(path, { encoding: encoding as BufferEncoding });\n }\n\n const writeFile = async (path: string, data: string | Buffer, encoding: string): Promise<void> => {\n await fs.promises.writeFile(path, data, { encoding: encoding as BufferEncoding });\n }\n\n const forEachFileIn = async (directory: string, callback: (file: string) => Promise<void>, options: { pattern: string | string[] } = { pattern: '*.*' }): Promise<void> => {\n try {\n const files = await glob(options.pattern, { cwd: directory, nodir: true });\n for (const file of files) {\n await callback(path.join(directory, file));\n }\n } catch (err: any) {\n throw new Error(`Failed to glob pattern ${options.pattern} in ${directory}: ${err.message}`);\n }\n }\n\n const readStream = async (path: string): Promise<fs.ReadStream> => {\n return fs.createReadStream(path);\n }\n\n const hashFile = async (path: string, length: number): Promise<string> => {\n const file = await readFile(path, 'utf8');\n return crypto.createHash('sha256').update(file).digest('hex').slice(0, length);\n }\n\n const listFiles = async (directory: string): Promise<string[]> => {\n return await fs.promises.readdir(directory);\n }\n\n return {\n exists,\n isDirectory,\n isFile,\n isReadable,\n isWritable,\n isFileReadable,\n isDirectoryWritable,\n isDirectoryReadable,\n createDirectory,\n readFile,\n readStream,\n writeFile,\n forEachFileIn,\n hashFile,\n listFiles,\n };\n}"],"names":["create","params","log","console","exists","path","fs","promises","stat","error","isDirectory","stats","isFile","isReadable","access","constants","R_OK","message","stack","isWritable","W_OK","isFileReadable","isDirectoryWritable","isDirectoryReadable","createDirectory","mkdir","recursive","mkdirError","Error","readFile","encoding","writeFile","data","forEachFileIn","directory","callback","options","pattern","files","glob","cwd","nodir","file","join","err","readStream","createReadStream","hashFile","length","crypto","createHash","update","digest","slice","listFiles","readdir"],"mappings":";;;;;AAAA;AAgCO,MAAMA,SAAS,CAACC,MAAAA,GAAAA;;AAGnB,IAAA,MAAMC,GAAMD,GAAAA,MAAAA,CAAOC,GAAG,IAAIC,QAAQD,GAAG;AAErC,IAAA,MAAME,SAAS,OAAOC,IAAAA,GAAAA;QAClB,IAAI;AACA,YAAA,MAAMC,EAAGC,CAAAA,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;YACvB,OAAO,IAAA;;AAEX,SAAA,CAAE,OAAOI,KAAY,EAAA;YACjB,OAAO,KAAA;AACX;AACJ,KAAA;AAEA,IAAA,MAAMC,cAAc,OAAOL,IAAAA,GAAAA;AACvB,QAAA,MAAMM,QAAQ,MAAML,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;QACrC,IAAI,CAACM,KAAMD,CAAAA,WAAW,EAAI,EAAA;YACtBR,GAAI,CAAA,CAAA,EAAGG,IAAK,CAAA,mBAAmB,CAAC,CAAA;YAChC,OAAO,KAAA;AACX;QACA,OAAO,IAAA;AACX,KAAA;AAEA,IAAA,MAAMO,SAAS,OAAOP,IAAAA,GAAAA;AAClB,QAAA,MAAMM,QAAQ,MAAML,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;QACrC,IAAI,CAACM,KAAMC,CAAAA,MAAM,EAAI,EAAA;YACjBV,GAAI,CAAA,CAAA,EAAGG,IAAK,CAAA,cAAc,CAAC,CAAA;YAC3B,OAAO,KAAA;AACX;QACA,OAAO,IAAA;AACX,KAAA;AAEA,IAAA,MAAMQ,aAAa,OAAOR,IAAAA,GAAAA;QACtB,IAAI;YACA,MAAMC,EAAAA,CAAGC,QAAQ,CAACO,MAAM,CAACT,IAAMC,EAAAA,EAAAA,CAAGS,SAAS,CAACC,IAAI,CAAA;AACpD,SAAA,CAAE,OAAOP,KAAY,EAAA;YACjBP,GAAI,CAAA,CAAA,EAAGG,KAAK,uBAAuB,CAAC,EAAEI,KAAMQ,CAAAA,OAAO,EAAER,KAAAA,CAAMS,KAAK,CAAA;YAChE,OAAO,KAAA;AACX;QACA,OAAO,IAAA;AACX,KAAA;AAEA,IAAA,MAAMC,aAAa,OAAOd,IAAAA,GAAAA;QACtB,IAAI;YACA,MAAMC,EAAAA,CAAGC,QAAQ,CAACO,MAAM,CAACT,IAAMC,EAAAA,EAAAA,CAAGS,SAAS,CAACK,IAAI,CAAA;AACpD,SAAA,CAAE,OAAOX,KAAY,EAAA;YACjBP,GAAI,CAAA,CAAA,EAAGG,KAAK,uBAAuB,CAAC,EAAEI,KAAMQ,CAAAA,OAAO,EAAER,KAAAA,CAAMS,KAAK,CAAA;YAChE,OAAO,KAAA;AACX;QACA,OAAO,IAAA;AACX,KAAA;AAEA,IAAA,MAAMG,iBAAiB,OAAOhB,IAAAA,GAAAA;AAC1B,QAAA,OAAO,MAAMD,MAAOC,CAAAA,IAAAA,CAAAA,IAAS,MAAMO,MAAOP,CAAAA,IAAAA,CAAAA,IAAS,MAAMQ,UAAWR,CAAAA,IAAAA,CAAAA;AACxE,KAAA;AAEA,IAAA,MAAMiB,sBAAsB,OAAOjB,IAAAA,GAAAA;AAC/B,QAAA,OAAO,MAAMD,MAAOC,CAAAA,IAAAA,CAAAA,IAAS,MAAMK,WAAYL,CAAAA,IAAAA,CAAAA,IAAS,MAAMc,UAAWd,CAAAA,IAAAA,CAAAA;AAC7E,KAAA;AAEA,IAAA,MAAMkB,sBAAsB,OAAOlB,IAAAA,GAAAA;AAC/B,QAAA,OAAO,MAAMD,MAAOC,CAAAA,IAAAA,CAAAA,IAAS,MAAMK,WAAYL,CAAAA,IAAAA,CAAAA,IAAS,MAAMQ,UAAWR,CAAAA,IAAAA,CAAAA;AAC7E,KAAA;AAEA,IAAA,MAAMmB,kBAAkB,OAAOnB,IAAAA,GAAAA;QAC3B,IAAI;AACA,YAAA,MAAMC,EAAGC,CAAAA,QAAQ,CAACkB,KAAK,CAACpB,IAAM,EAAA;gBAAEqB,SAAW,EAAA;AAAK,aAAA,CAAA;AACpD,SAAA,CAAE,OAAOC,UAAiB,EAAA;AACtB,YAAA,MAAM,IAAIC,KAAAA,CAAM,CAAC,kCAAkC,EAAEvB,IAAK,CAAA,EAAE,EAAEsB,UAAAA,CAAWV,OAAO,CAAC,CAAC,EAAEU,UAAAA,CAAWT,KAAK,CAAE,CAAA,CAAA;AAC1G;AACJ,KAAA;IAEA,MAAMW,QAAAA,GAAW,OAAOxB,IAAcyB,EAAAA,QAAAA,GAAAA;AAClC,QAAA,OAAO,MAAMxB,EAAGC,CAAAA,QAAQ,CAACsB,QAAQ,CAACxB,IAAM,EAAA;YAAEyB,QAAUA,EAAAA;AAA2B,SAAA,CAAA;AACnF,KAAA;IAEA,MAAMC,SAAAA,GAAY,OAAO1B,IAAAA,EAAc2B,IAAuBF,EAAAA,QAAAA,GAAAA;AAC1D,QAAA,MAAMxB,GAAGC,QAAQ,CAACwB,SAAS,CAAC1B,MAAM2B,IAAM,EAAA;YAAEF,QAAUA,EAAAA;AAA2B,SAAA,CAAA;AACnF,KAAA;AAEA,IAAA,MAAMG,aAAgB,GAAA,OAAOC,SAAmBC,EAAAA,QAAAA,EAA2CC,OAA0C,GAAA;QAAEC,OAAS,EAAA;KAAO,GAAA;QACnJ,IAAI;AACA,YAAA,MAAMC,KAAQ,GAAA,MAAMC,IAAKH,CAAAA,OAAAA,CAAQC,OAAO,EAAE;gBAAEG,GAAKN,EAAAA,SAAAA;gBAAWO,KAAO,EAAA;AAAK,aAAA,CAAA;YACxE,KAAK,MAAMC,QAAQJ,KAAO,CAAA;AACtB,gBAAA,MAAMH,QAAS9B,CAAAA,IAAAA,CAAKsC,IAAI,CAACT,SAAWQ,EAAAA,IAAAA,CAAAA,CAAAA;AACxC;AACJ,SAAA,CAAE,OAAOE,GAAU,EAAA;AACf,YAAA,MAAM,IAAIhB,KAAAA,CAAM,CAAC,uBAAuB,EAAEQ,OAAQC,CAAAA,OAAO,CAAC,IAAI,EAAEH,SAAU,CAAA,EAAE,EAAEU,GAAAA,CAAI3B,OAAO,CAAE,CAAA,CAAA;AAC/F;AACJ,KAAA;AAEA,IAAA,MAAM4B,aAAa,OAAOxC,IAAAA,GAAAA;QACtB,OAAOC,EAAAA,CAAGwC,gBAAgB,CAACzC,IAAAA,CAAAA;AAC/B,KAAA;IAEA,MAAM0C,QAAAA,GAAW,OAAO1C,IAAc2C,EAAAA,MAAAA,GAAAA;QAClC,MAAMN,IAAAA,GAAO,MAAMb,QAAAA,CAASxB,IAAM,EAAA,MAAA,CAAA;AAClC,QAAA,OAAO4C,MAAOC,CAAAA,UAAU,CAAC,QAAA,CAAA,CAAUC,MAAM,CAACT,IAAMU,CAAAA,CAAAA,MAAM,CAAC,KAAA,CAAA,CAAOC,KAAK,CAAC,CAAGL,EAAAA,MAAAA,CAAAA;AAC3E,KAAA;AAEA,IAAA,MAAMM,YAAY,OAAOpB,SAAAA,GAAAA;AACrB,QAAA,OAAO,MAAM5B,EAAAA,CAAGC,QAAQ,CAACgD,OAAO,CAACrB,SAAAA,CAAAA;AACrC,KAAA;IAEA,OAAO;AACH9B,QAAAA,MAAAA;AACAM,QAAAA,WAAAA;AACAE,QAAAA,MAAAA;AACAC,QAAAA,UAAAA;AACAM,QAAAA,UAAAA;AACAE,QAAAA,cAAAA;AACAC,QAAAA,mBAAAA;AACAC,QAAAA,mBAAAA;AACAC,QAAAA,eAAAA;AACAK,QAAAA,QAAAA;AACAgB,QAAAA,UAAAA;AACAd,QAAAA,SAAAA;AACAE,QAAAA,aAAAA;AACAc,QAAAA,QAAAA;AACAO,QAAAA;AACJ,KAAA;AACJ;;;;"}
|
|
1
|
+
{"version":3,"file":"storage.js","sources":["../../src/util/storage.ts"],"sourcesContent":["// eslint-disable-next-line no-restricted-imports\nimport * as fs from 'fs';\nimport { glob } from 'glob';\nimport path from 'path';\nimport crypto from 'crypto';\n/**\n * This module exists to isolate filesystem operations from the rest of the codebase.\n * This makes testing easier by avoiding direct fs mocking in jest configuration.\n * \n * Additionally, abstracting storage operations allows for future flexibility - \n * this export utility may need to work with storage systems other than the local filesystem\n * (e.g. S3, Google Cloud Storage, etc).\n */\n\nexport interface Utility {\n exists: (path: string) => Promise<boolean>;\n isDirectory: (path: string) => Promise<boolean>;\n isFile: (path: string) => Promise<boolean>;\n isReadable: (path: string) => Promise<boolean>;\n isWritable: (path: string) => Promise<boolean>;\n isFileReadable: (path: string) => Promise<boolean>;\n isDirectoryWritable: (path: string) => Promise<boolean>;\n isDirectoryReadable: (path: string) => Promise<boolean>;\n createDirectory: (path: string) => Promise<void>;\n readFile: (path: string, encoding: string) => Promise<string>;\n readStream: (path: string) => Promise<fs.ReadStream>;\n writeFile: (path: string, data: string | Buffer, encoding: string) => Promise<void>;\n rename: (oldPath: string, newPath: string) => Promise<void>;\n forEachFileIn: (directory: string, callback: (path: string) => Promise<void>, options?: { pattern: string }) => Promise<void>;\n hashFile: (path: string, length: number) => Promise<string>;\n listFiles: (directory: string) => Promise<string[]>;\n}\n\nexport const create = (params: { log?: (message: string, ...args: any[]) => void }): Utility => {\n\n // eslint-disable-next-line no-console\n const log = params.log || console.log;\n\n const exists = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.stat(path);\n return true;\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n } catch (error: any) {\n return false;\n }\n }\n\n const isDirectory = async (path: string): Promise<boolean> => {\n const stats = await fs.promises.stat(path);\n if (!stats.isDirectory()) {\n log(`${path} is not a directory`);\n return false;\n }\n return true;\n }\n\n const isFile = async (path: string): Promise<boolean> => {\n const stats = await fs.promises.stat(path);\n if (!stats.isFile()) {\n log(`${path} is not a file`);\n return false;\n }\n return true;\n }\n\n const isReadable = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.access(path, fs.constants.R_OK);\n } catch (error: any) {\n log(`${path} is not readable: %s %s`, error.message, error.stack);\n return false;\n }\n return true;\n }\n\n const isWritable = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.access(path, fs.constants.W_OK);\n } catch (error: any) {\n log(`${path} is not writable: %s %s`, error.message, error.stack);\n return false;\n }\n return true;\n }\n\n const isFileReadable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isFile(path) && await isReadable(path);\n }\n\n const isDirectoryWritable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isDirectory(path) && await isWritable(path);\n }\n\n const isDirectoryReadable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isDirectory(path) && await isReadable(path);\n }\n\n const createDirectory = async (path: string): Promise<void> => {\n try {\n await fs.promises.mkdir(path, { recursive: true });\n } catch (mkdirError: any) {\n throw new Error(`Failed to create output directory ${path}: ${mkdirError.message} ${mkdirError.stack}`);\n }\n }\n\n const readFile = async (path: string, encoding: string): Promise<string> => {\n return await fs.promises.readFile(path, { encoding: encoding as BufferEncoding });\n }\n\n const writeFile = async (path: string, data: string | Buffer, encoding: string): Promise<void> => {\n await fs.promises.writeFile(path, data, { encoding: encoding as BufferEncoding });\n }\n\n const rename = async (oldPath: string, newPath: string): Promise<void> => {\n await fs.promises.rename(oldPath, newPath);\n }\n\n const forEachFileIn = async (directory: string, callback: (file: string) => Promise<void>, options: { pattern: string | string[] } = { pattern: '*.*' }): Promise<void> => {\n try {\n const files = await glob(options.pattern, { cwd: directory, nodir: true });\n for (const file of files) {\n await callback(path.join(directory, file));\n }\n } catch (err: any) {\n throw new Error(`Failed to glob pattern ${options.pattern} in ${directory}: ${err.message}`);\n }\n }\n\n const readStream = async (path: string): Promise<fs.ReadStream> => {\n return fs.createReadStream(path);\n }\n\n const hashFile = async (path: string, length: number): Promise<string> => {\n const file = await readFile(path, 'utf8');\n return crypto.createHash('sha256').update(file).digest('hex').slice(0, length);\n }\n\n const listFiles = async (directory: string): Promise<string[]> => {\n return await fs.promises.readdir(directory);\n }\n\n return {\n exists,\n isDirectory,\n isFile,\n isReadable,\n isWritable,\n isFileReadable,\n isDirectoryWritable,\n isDirectoryReadable,\n createDirectory,\n readFile,\n readStream,\n writeFile,\n rename,\n forEachFileIn,\n hashFile,\n listFiles,\n };\n}"],"names":["create","params","log","console","exists","path","fs","promises","stat","error","isDirectory","stats","isFile","isReadable","access","constants","R_OK","message","stack","isWritable","W_OK","isFileReadable","isDirectoryWritable","isDirectoryReadable","createDirectory","mkdir","recursive","mkdirError","Error","readFile","encoding","writeFile","data","rename","oldPath","newPath","forEachFileIn","directory","callback","options","pattern","files","glob","cwd","nodir","file","join","err","readStream","createReadStream","hashFile","length","crypto","createHash","update","digest","slice","listFiles","readdir"],"mappings":";;;;;AAAA;AAiCO,MAAMA,SAAS,CAACC,MAAAA,GAAAA;;AAGnB,IAAA,MAAMC,GAAAA,GAAMD,MAAAA,CAAOC,GAAG,IAAIC,QAAQD,GAAG;AAErC,IAAA,MAAME,SAAS,OAAOC,IAAAA,GAAAA;QAClB,IAAI;AACA,YAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;YACvB,OAAO,IAAA;;AAEX,SAAA,CAAE,OAAOI,KAAAA,EAAY;YACjB,OAAO,KAAA;AACX;AACJ,KAAA;AAEA,IAAA,MAAMC,cAAc,OAAOL,IAAAA,GAAAA;AACvB,QAAA,MAAMM,QAAQ,MAAML,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;QACrC,IAAI,CAACM,KAAAA,CAAMD,WAAW,EAAA,EAAI;YACtBR,GAAAA,CAAI,CAAA,EAAGG,IAAAA,CAAK,mBAAmB,CAAC,CAAA;YAChC,OAAO,KAAA;AACX;QACA,OAAO,IAAA;AACX,KAAA;AAEA,IAAA,MAAMO,SAAS,OAAOP,IAAAA,GAAAA;AAClB,QAAA,MAAMM,QAAQ,MAAML,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;QACrC,IAAI,CAACM,KAAAA,CAAMC,MAAM,EAAA,EAAI;YACjBV,GAAAA,CAAI,CAAA,EAAGG,IAAAA,CAAK,cAAc,CAAC,CAAA;YAC3B,OAAO,KAAA;AACX;QACA,OAAO,IAAA;AACX,KAAA;AAEA,IAAA,MAAMQ,aAAa,OAAOR,IAAAA,GAAAA;QACtB,IAAI;YACA,MAAMC,EAAAA,CAAGC,QAAQ,CAACO,MAAM,CAACT,IAAAA,EAAMC,EAAAA,CAAGS,SAAS,CAACC,IAAI,CAAA;AACpD,SAAA,CAAE,OAAOP,KAAAA,EAAY;YACjBP,GAAAA,CAAI,CAAA,EAAGG,KAAK,uBAAuB,CAAC,EAAEI,KAAAA,CAAMQ,OAAO,EAAER,KAAAA,CAAMS,KAAK,CAAA;YAChE,OAAO,KAAA;AACX;QACA,OAAO,IAAA;AACX,KAAA;AAEA,IAAA,MAAMC,aAAa,OAAOd,IAAAA,GAAAA;QACtB,IAAI;YACA,MAAMC,EAAAA,CAAGC,QAAQ,CAACO,MAAM,CAACT,IAAAA,EAAMC,EAAAA,CAAGS,SAAS,CAACK,IAAI,CAAA;AACpD,SAAA,CAAE,OAAOX,KAAAA,EAAY;YACjBP,GAAAA,CAAI,CAAA,EAAGG,KAAK,uBAAuB,CAAC,EAAEI,KAAAA,CAAMQ,OAAO,EAAER,KAAAA,CAAMS,KAAK,CAAA;YAChE,OAAO,KAAA;AACX;QACA,OAAO,IAAA;AACX,KAAA;AAEA,IAAA,MAAMG,iBAAiB,OAAOhB,IAAAA,GAAAA;AAC1B,QAAA,OAAO,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,IAAS,MAAMO,MAAAA,CAAOP,IAAAA,CAAAA,IAAS,MAAMQ,UAAAA,CAAWR,IAAAA,CAAAA;AACxE,KAAA;AAEA,IAAA,MAAMiB,sBAAsB,OAAOjB,IAAAA,GAAAA;AAC/B,QAAA,OAAO,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,IAAS,MAAMK,WAAAA,CAAYL,IAAAA,CAAAA,IAAS,MAAMc,UAAAA,CAAWd,IAAAA,CAAAA;AAC7E,KAAA;AAEA,IAAA,MAAMkB,sBAAsB,OAAOlB,IAAAA,GAAAA;AAC/B,QAAA,OAAO,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,IAAS,MAAMK,WAAAA,CAAYL,IAAAA,CAAAA,IAAS,MAAMQ,UAAAA,CAAWR,IAAAA,CAAAA;AAC7E,KAAA;AAEA,IAAA,MAAMmB,kBAAkB,OAAOnB,IAAAA,GAAAA;QAC3B,IAAI;AACA,YAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACkB,KAAK,CAACpB,IAAAA,EAAM;gBAAEqB,SAAAA,EAAW;AAAK,aAAA,CAAA;AACpD,SAAA,CAAE,OAAOC,UAAAA,EAAiB;AACtB,YAAA,MAAM,IAAIC,KAAAA,CAAM,CAAC,kCAAkC,EAAEvB,IAAAA,CAAK,EAAE,EAAEsB,UAAAA,CAAWV,OAAO,CAAC,CAAC,EAAEU,UAAAA,CAAWT,KAAK,CAAA,CAAE,CAAA;AAC1G;AACJ,KAAA;IAEA,MAAMW,QAAAA,GAAW,OAAOxB,IAAAA,EAAcyB,QAAAA,GAAAA;AAClC,QAAA,OAAO,MAAMxB,EAAAA,CAAGC,QAAQ,CAACsB,QAAQ,CAACxB,IAAAA,EAAM;YAAEyB,QAAAA,EAAUA;AAA2B,SAAA,CAAA;AACnF,KAAA;IAEA,MAAMC,SAAAA,GAAY,OAAO1B,IAAAA,EAAc2B,IAAAA,EAAuBF,QAAAA,GAAAA;AAC1D,QAAA,MAAMxB,GAAGC,QAAQ,CAACwB,SAAS,CAAC1B,MAAM2B,IAAAA,EAAM;YAAEF,QAAAA,EAAUA;AAA2B,SAAA,CAAA;AACnF,KAAA;IAEA,MAAMG,MAAAA,GAAS,OAAOC,OAAAA,EAAiBC,OAAAA,GAAAA;AACnC,QAAA,MAAM7B,EAAAA,CAAGC,QAAQ,CAAC0B,MAAM,CAACC,OAAAA,EAASC,OAAAA,CAAAA;AACtC,KAAA;AAEA,IAAA,MAAMC,aAAAA,GAAgB,OAAOC,SAAAA,EAAmBC,QAAAA,EAA2CC,OAAAA,GAA0C;QAAEC,OAAAA,EAAS;KAAO,GAAA;QACnJ,IAAI;AACA,YAAA,MAAMC,KAAAA,GAAQ,MAAMC,IAAAA,CAAKH,OAAAA,CAAQC,OAAO,EAAE;gBAAEG,GAAAA,EAAKN,SAAAA;gBAAWO,KAAAA,EAAO;AAAK,aAAA,CAAA;YACxE,KAAK,MAAMC,QAAQJ,KAAAA,CAAO;AACtB,gBAAA,MAAMH,QAAAA,CAASjC,IAAAA,CAAKyC,IAAI,CAACT,SAAAA,EAAWQ,IAAAA,CAAAA,CAAAA;AACxC;AACJ,SAAA,CAAE,OAAOE,GAAAA,EAAU;AACf,YAAA,MAAM,IAAInB,KAAAA,CAAM,CAAC,uBAAuB,EAAEW,OAAAA,CAAQC,OAAO,CAAC,IAAI,EAAEH,SAAAA,CAAU,EAAE,EAAEU,GAAAA,CAAI9B,OAAO,CAAA,CAAE,CAAA;AAC/F;AACJ,KAAA;AAEA,IAAA,MAAM+B,aAAa,OAAO3C,IAAAA,GAAAA;QACtB,OAAOC,EAAAA,CAAG2C,gBAAgB,CAAC5C,IAAAA,CAAAA;AAC/B,KAAA;IAEA,MAAM6C,QAAAA,GAAW,OAAO7C,IAAAA,EAAc8C,MAAAA,GAAAA;QAClC,MAAMN,IAAAA,GAAO,MAAMhB,QAAAA,CAASxB,IAAAA,EAAM,MAAA,CAAA;AAClC,QAAA,OAAO+C,MAAAA,CAAOC,UAAU,CAAC,QAAA,CAAA,CAAUC,MAAM,CAACT,IAAAA,CAAAA,CAAMU,MAAM,CAAC,KAAA,CAAA,CAAOC,KAAK,CAAC,CAAA,EAAGL,MAAAA,CAAAA;AAC3E,KAAA;AAEA,IAAA,MAAMM,YAAY,OAAOpB,SAAAA,GAAAA;AACrB,QAAA,OAAO,MAAM/B,EAAAA,CAAGC,QAAQ,CAACmD,OAAO,CAACrB,SAAAA,CAAAA;AACrC,KAAA;IAEA,OAAO;AACHjC,QAAAA,MAAAA;AACAM,QAAAA,WAAAA;AACAE,QAAAA,MAAAA;AACAC,QAAAA,UAAAA;AACAM,QAAAA,UAAAA;AACAE,QAAAA,cAAAA;AACAC,QAAAA,mBAAAA;AACAC,QAAAA,mBAAAA;AACAC,QAAAA,eAAAA;AACAK,QAAAA,QAAAA;AACAmB,QAAAA,UAAAA;AACAjB,QAAAA,SAAAA;AACAE,QAAAA,MAAAA;AACAG,QAAAA,aAAAA;AACAc,QAAAA,QAAAA;AACAO,QAAAA;AACJ,KAAA;AACJ;;;;"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@eldrforge/kodrdriv",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.7",
|
|
4
4
|
"description": "Create Intelligent Release Notes or Change Logs from Git",
|
|
5
5
|
"main": "dist/main.js",
|
|
6
6
|
"type": "module",
|
|
@@ -21,45 +21,46 @@
|
|
|
21
21
|
"author": "Calen Varek <calenvarek@gmail.com>",
|
|
22
22
|
"license": "Apache-2.0",
|
|
23
23
|
"dependencies": {
|
|
24
|
+
"@octokit/rest": "^22.0.0",
|
|
24
25
|
"@riotprompt/riotprompt": "^0.0.2",
|
|
25
|
-
"@theunwalked/cardigantime": "^0.0.
|
|
26
|
+
"@theunwalked/cardigantime": "^0.0.5",
|
|
26
27
|
"commander": "^14.0.0",
|
|
27
28
|
"dayjs": "^1.11.13",
|
|
28
|
-
"dotenv": "^
|
|
29
|
-
"glob": "^11.0.
|
|
29
|
+
"dotenv": "^17.0.0",
|
|
30
|
+
"glob": "^11.0.3",
|
|
30
31
|
"js-yaml": "^4.1.0",
|
|
31
32
|
"luxon": "^3.6.1",
|
|
32
33
|
"moment-timezone": "^0.6.0",
|
|
33
|
-
"openai": "^
|
|
34
|
+
"openai": "^5.8.2",
|
|
34
35
|
"shell-escape": "^0.2.0",
|
|
35
36
|
"winston": "^3.17.0",
|
|
36
|
-
"zod": "^3.25.
|
|
37
|
+
"zod": "^3.25.67"
|
|
37
38
|
},
|
|
38
39
|
"devDependencies": {
|
|
39
40
|
"@eslint/eslintrc": "^3.3.1",
|
|
40
|
-
"@eslint/js": "^9.
|
|
41
|
+
"@eslint/js": "^9.30.0",
|
|
41
42
|
"@rollup/plugin-replace": "^6.0.2",
|
|
42
|
-
"@swc/core": "^1.
|
|
43
|
+
"@swc/core": "^1.12.7",
|
|
43
44
|
"@types/js-yaml": "^4.0.9",
|
|
44
45
|
"@types/luxon": "^3.6.2",
|
|
45
|
-
"@types/node": "^
|
|
46
|
+
"@types/node": "^24.0.8",
|
|
46
47
|
"@types/shell-escape": "^0.2.3",
|
|
47
48
|
"@types/winston": "^2.4.4",
|
|
48
|
-
"@typescript-eslint/eslint-plugin": "^8.
|
|
49
|
-
"@typescript-eslint/parser": "^8.
|
|
50
|
-
"@vitest/coverage-v8": "^3.
|
|
49
|
+
"@typescript-eslint/eslint-plugin": "^8.35.1",
|
|
50
|
+
"@typescript-eslint/parser": "^8.35.1",
|
|
51
|
+
"@vitest/coverage-v8": "^3.2.4",
|
|
51
52
|
"copyfiles": "^2.4.1",
|
|
52
|
-
"esbuild": "0.25.
|
|
53
|
-
"eslint": "^9.
|
|
54
|
-
"eslint-plugin-import": "^2.
|
|
53
|
+
"esbuild": "0.25.5",
|
|
54
|
+
"eslint": "^9.30.0",
|
|
55
|
+
"eslint-plugin-import": "^2.32.0",
|
|
55
56
|
"globals": "^16.2.0",
|
|
56
57
|
"mockdate": "^3.0.5",
|
|
57
58
|
"rollup-plugin-preserve-shebang": "^1.0.1",
|
|
58
|
-
"rollup-plugin-visualizer": "^
|
|
59
|
+
"rollup-plugin-visualizer": "^6.0.3",
|
|
59
60
|
"typescript": "^5.8.3",
|
|
60
|
-
"vite": "^
|
|
61
|
+
"vite": "^7.0.0",
|
|
61
62
|
"vite-plugin-node": "^5.0.1",
|
|
62
|
-
"vitest": "^3.
|
|
63
|
+
"vitest": "^3.2.4"
|
|
63
64
|
},
|
|
64
65
|
"scripts": {
|
|
65
66
|
"build": "tsc --noEmit && vite build && copyfiles -u 1 \"src/**/*.md\" dist",
|
package/vitest.config.ts
CHANGED
|
@@ -5,16 +5,19 @@ export default defineConfig({
|
|
|
5
5
|
globals: false,
|
|
6
6
|
environment: 'node',
|
|
7
7
|
include: ['tests/**/*.test.ts'],
|
|
8
|
+
env: {
|
|
9
|
+
TZ: 'America/New_York'
|
|
10
|
+
},
|
|
8
11
|
coverage: {
|
|
9
12
|
provider: 'v8',
|
|
10
13
|
reporter: ['text', 'lcov', 'html'],
|
|
11
14
|
all: true,
|
|
12
15
|
include: ['src/**/*.ts'],
|
|
13
16
|
thresholds: {
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
17
|
+
statements: 91,
|
|
18
|
+
branches: 96,
|
|
19
|
+
functions: 98,
|
|
20
|
+
lines: 91,
|
|
18
21
|
}
|
|
19
22
|
},
|
|
20
23
|
},
|