opencode-pilot 0.15.1 → 0.16.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/opencode-pilot +7 -4
- package/package.json +1 -1
- package/service/actions.js +14 -2
- package/service/repo-config.js +105 -6
- package/test/unit/actions.test.js +108 -0
- package/test/unit/repo-config.test.js +99 -0
package/bin/opencode-pilot
CHANGED
|
@@ -295,16 +295,19 @@ async function configCommand() {
|
|
|
295
295
|
const name = source.name || "<unnamed>";
|
|
296
296
|
const tool = source.tool;
|
|
297
297
|
|
|
298
|
-
if (!tool || !tool.mcp || !tool.name) {
|
|
299
|
-
console.log(` ✗ ${name}: missing tool.
|
|
298
|
+
if (!tool || (!tool.command && (!tool.mcp || !tool.name))) {
|
|
299
|
+
console.log(` ✗ ${name}: missing tool.command or tool.mcp/tool.name`);
|
|
300
300
|
continue;
|
|
301
301
|
}
|
|
302
302
|
|
|
303
|
+
const toolDesc = tool.command
|
|
304
|
+
? `cli: ${Array.isArray(tool.command) ? tool.command[0] : tool.command.split(' ')[0]}`
|
|
305
|
+
: `${tool.mcp}/${tool.name}`;
|
|
303
306
|
const itemId = source.item?.id;
|
|
304
307
|
if (!itemId) {
|
|
305
|
-
console.log(` ⚠ ${name}: ${
|
|
308
|
+
console.log(` ⚠ ${name}: ${toolDesc} (no item.id template)`);
|
|
306
309
|
} else {
|
|
307
|
-
console.log(` ✓ ${name}: ${
|
|
310
|
+
console.log(` ✓ ${name}: ${toolDesc}`);
|
|
308
311
|
}
|
|
309
312
|
|
|
310
313
|
// Show prompt and agent
|
package/package.json
CHANGED
package/service/actions.js
CHANGED
|
@@ -10,7 +10,7 @@ import { readFileSync, existsSync } from "fs";
|
|
|
10
10
|
import { debug } from "./logger.js";
|
|
11
11
|
import { getNestedValue } from "./utils.js";
|
|
12
12
|
import { getServerPort } from "./repo-config.js";
|
|
13
|
-
import { resolveWorktreeDirectory } from "./worktree.js";
|
|
13
|
+
import { resolveWorktreeDirectory, getProjectInfo } from "./worktree.js";
|
|
14
14
|
import path from "path";
|
|
15
15
|
import os from "os";
|
|
16
16
|
|
|
@@ -573,8 +573,20 @@ export async function executeAction(item, config, options = {}) {
|
|
|
573
573
|
|
|
574
574
|
// Resolve worktree directory if configured
|
|
575
575
|
// This allows creating sessions in isolated worktrees instead of the main project
|
|
576
|
+
let worktreeMode = config.worktree;
|
|
577
|
+
|
|
578
|
+
// Auto-detect worktree support: if not explicitly configured and server is running,
|
|
579
|
+
// check if the project has sandboxes (indicating worktree workflow is set up)
|
|
580
|
+
if (!worktreeMode && serverUrl) {
|
|
581
|
+
const projectInfo = await getProjectInfo(serverUrl, { fetch: options.fetch });
|
|
582
|
+
if (projectInfo?.sandboxes?.length > 0) {
|
|
583
|
+
debug(`executeAction: auto-detected worktree support (${projectInfo.sandboxes.length} sandboxes)`);
|
|
584
|
+
worktreeMode = 'new';
|
|
585
|
+
}
|
|
586
|
+
}
|
|
587
|
+
|
|
576
588
|
const worktreeConfig = {
|
|
577
|
-
worktree:
|
|
589
|
+
worktree: worktreeMode,
|
|
578
590
|
// Expand worktree_name template with item fields (e.g., "issue-{number}")
|
|
579
591
|
worktreeName: config.worktree_name ? expandTemplate(config.worktree_name, item) : undefined,
|
|
580
592
|
};
|
package/service/repo-config.js
CHANGED
|
@@ -14,6 +14,7 @@ import fs from "fs";
|
|
|
14
14
|
import path from "path";
|
|
15
15
|
import os from "os";
|
|
16
16
|
import YAML from "yaml";
|
|
17
|
+
import { execSync } from "child_process";
|
|
17
18
|
import { getNestedValue } from "./utils.js";
|
|
18
19
|
import { expandPreset, expandGitHubShorthand, getProviderConfig } from "./presets/index.js";
|
|
19
20
|
|
|
@@ -32,6 +33,82 @@ const DEFAULT_TEMPLATES_DIR = path.join(
|
|
|
32
33
|
// In-memory config cache (for testing and runtime)
|
|
33
34
|
let configCache = null;
|
|
34
35
|
|
|
36
|
+
// Cache for discovered repos from repos_dir
|
|
37
|
+
let discoveredReposCache = null;
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Parse GitHub owner/repo from a git remote URL
|
|
41
|
+
* Supports HTTPS and SSH formats
|
|
42
|
+
* @param {string} url - Git remote URL
|
|
43
|
+
* @returns {string|null} "owner/repo" or null if not a GitHub URL
|
|
44
|
+
*/
|
|
45
|
+
function parseGitHubRepo(url) {
|
|
46
|
+
if (!url) return null;
|
|
47
|
+
|
|
48
|
+
// HTTPS: https://github.com/owner/repo.git
|
|
49
|
+
const httpsMatch = url.match(/github\.com\/([^/]+)\/([^/.]+)/);
|
|
50
|
+
if (httpsMatch) {
|
|
51
|
+
return `${httpsMatch[1]}/${httpsMatch[2]}`;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// SSH: git@github.com:owner/repo.git
|
|
55
|
+
const sshMatch = url.match(/github\.com:([^/]+)\/([^/.]+)/);
|
|
56
|
+
if (sshMatch) {
|
|
57
|
+
return `${sshMatch[1]}/${sshMatch[2]}`;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
return null;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Discover repos from a repos_dir by scanning git remotes
|
|
65
|
+
* @param {string} reposDir - Directory containing git repositories
|
|
66
|
+
* @returns {Map<string, object>} Map of "owner/repo" -> { path }
|
|
67
|
+
*/
|
|
68
|
+
function discoverRepos(reposDir) {
|
|
69
|
+
const discovered = new Map();
|
|
70
|
+
|
|
71
|
+
if (!reposDir || !fs.existsSync(reposDir)) {
|
|
72
|
+
return discovered;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const normalizedDir = reposDir.replace(/^~/, os.homedir());
|
|
76
|
+
|
|
77
|
+
try {
|
|
78
|
+
const entries = fs.readdirSync(normalizedDir, { withFileTypes: true });
|
|
79
|
+
|
|
80
|
+
for (const entry of entries) {
|
|
81
|
+
if (!entry.isDirectory()) continue;
|
|
82
|
+
|
|
83
|
+
const repoPath = path.join(normalizedDir, entry.name);
|
|
84
|
+
const gitDir = path.join(repoPath, '.git');
|
|
85
|
+
|
|
86
|
+
// Skip if not a git repo
|
|
87
|
+
if (!fs.existsSync(gitDir)) continue;
|
|
88
|
+
|
|
89
|
+
// Get remote origin URL via git API
|
|
90
|
+
try {
|
|
91
|
+
const remoteUrl = execSync('git remote get-url origin', {
|
|
92
|
+
cwd: repoPath,
|
|
93
|
+
encoding: 'utf-8',
|
|
94
|
+
stdio: ['pipe', 'pipe', 'pipe']
|
|
95
|
+
}).trim();
|
|
96
|
+
|
|
97
|
+
const repoKey = parseGitHubRepo(remoteUrl);
|
|
98
|
+
if (repoKey) {
|
|
99
|
+
discovered.set(repoKey, { path: repoPath });
|
|
100
|
+
}
|
|
101
|
+
} catch {
|
|
102
|
+
// Skip repos without origin or git errors
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
} catch {
|
|
106
|
+
// Directory read error
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
return discovered;
|
|
110
|
+
}
|
|
111
|
+
|
|
35
112
|
/**
|
|
36
113
|
* Expand template string with item fields
|
|
37
114
|
* Supports {field} and {field.nested} syntax
|
|
@@ -53,6 +130,10 @@ export function loadRepoConfig(configOrPath) {
|
|
|
53
130
|
if (typeof configOrPath === "object") {
|
|
54
131
|
// Direct config object (for testing)
|
|
55
132
|
configCache = configOrPath;
|
|
133
|
+
// Discover repos if repos_dir is set
|
|
134
|
+
discoveredReposCache = configCache.repos_dir
|
|
135
|
+
? discoverRepos(configCache.repos_dir)
|
|
136
|
+
: null;
|
|
56
137
|
return configCache;
|
|
57
138
|
}
|
|
58
139
|
|
|
@@ -60,16 +141,22 @@ export function loadRepoConfig(configOrPath) {
|
|
|
60
141
|
|
|
61
142
|
if (!fs.existsSync(configPath)) {
|
|
62
143
|
configCache = emptyConfig;
|
|
144
|
+
discoveredReposCache = null;
|
|
63
145
|
return configCache;
|
|
64
146
|
}
|
|
65
147
|
|
|
66
148
|
try {
|
|
67
149
|
const content = fs.readFileSync(configPath, "utf-8");
|
|
68
150
|
configCache = YAML.parse(content, { merge: true }) || emptyConfig;
|
|
151
|
+
// Discover repos if repos_dir is set
|
|
152
|
+
discoveredReposCache = configCache.repos_dir
|
|
153
|
+
? discoverRepos(configCache.repos_dir)
|
|
154
|
+
: null;
|
|
69
155
|
} catch (err) {
|
|
70
156
|
// Log error but continue with empty config to allow graceful degradation
|
|
71
157
|
console.error(`Warning: Failed to parse config at ${configPath}: ${err.message}`);
|
|
72
158
|
configCache = emptyConfig;
|
|
159
|
+
discoveredReposCache = null;
|
|
73
160
|
}
|
|
74
161
|
return configCache;
|
|
75
162
|
}
|
|
@@ -86,20 +173,31 @@ function getRawConfig() {
|
|
|
86
173
|
|
|
87
174
|
/**
|
|
88
175
|
* Get configuration for a specific repo
|
|
176
|
+
* Checks explicit repos config first, then falls back to auto-discovered repos
|
|
89
177
|
* @param {string} repoKey - Repository identifier (e.g., "myorg/backend")
|
|
90
178
|
* @returns {object} Repository configuration or empty object
|
|
91
179
|
*/
|
|
92
180
|
export function getRepoConfig(repoKey) {
|
|
93
181
|
const config = getRawConfig();
|
|
94
182
|
const repos = config.repos || {};
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
183
|
+
|
|
184
|
+
// Check explicit repos config first
|
|
185
|
+
if (repos[repoKey]) {
|
|
186
|
+
const repoConfig = repos[repoKey];
|
|
187
|
+
// Normalize: support both 'path' and 'repo_path' keys
|
|
188
|
+
if (repoConfig.path && !repoConfig.repo_path) {
|
|
189
|
+
return { ...repoConfig, repo_path: repoConfig.path };
|
|
190
|
+
}
|
|
191
|
+
return repoConfig;
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// Fall back to auto-discovered repos from repos_dir
|
|
195
|
+
if (discoveredReposCache && discoveredReposCache.has(repoKey)) {
|
|
196
|
+
const discovered = discoveredReposCache.get(repoKey);
|
|
197
|
+
return { ...discovered, repo_path: discovered.path };
|
|
100
198
|
}
|
|
101
199
|
|
|
102
|
-
return
|
|
200
|
+
return {};
|
|
103
201
|
}
|
|
104
202
|
|
|
105
203
|
/**
|
|
@@ -324,4 +422,5 @@ export function getServerPort() {
|
|
|
324
422
|
*/
|
|
325
423
|
export function clearConfigCache() {
|
|
326
424
|
configCache = null;
|
|
425
|
+
discoveredReposCache = null;
|
|
327
426
|
}
|
|
@@ -693,6 +693,114 @@ describe('actions.js', () => {
|
|
|
693
693
|
assert.ok(result.command.includes(tempDir),
|
|
694
694
|
'Should fall back to base directory when worktree creation fails');
|
|
695
695
|
});
|
|
696
|
+
|
|
697
|
+
test('auto-detects worktree support when project has sandboxes (dry run)', async () => {
|
|
698
|
+
const { executeAction } = await import('../../service/actions.js');
|
|
699
|
+
|
|
700
|
+
const item = { number: 456, title: 'New feature' };
|
|
701
|
+
const config = {
|
|
702
|
+
path: tempDir,
|
|
703
|
+
prompt: 'default'
|
|
704
|
+
// Note: no worktree config - should be auto-detected
|
|
705
|
+
};
|
|
706
|
+
|
|
707
|
+
// Mock server discovery
|
|
708
|
+
const mockDiscoverServer = async () => 'http://localhost:4096';
|
|
709
|
+
|
|
710
|
+
// Track API calls
|
|
711
|
+
let projectInfoCalled = false;
|
|
712
|
+
let worktreeCreateCalled = false;
|
|
713
|
+
|
|
714
|
+
const mockFetch = async (url, opts) => {
|
|
715
|
+
// Project info endpoint - returns sandboxes indicating worktree workflow
|
|
716
|
+
if (url === 'http://localhost:4096/project/current') {
|
|
717
|
+
projectInfoCalled = true;
|
|
718
|
+
return {
|
|
719
|
+
ok: true,
|
|
720
|
+
json: async () => ({
|
|
721
|
+
id: 'proj-123',
|
|
722
|
+
worktree: tempDir,
|
|
723
|
+
sandboxes: ['/data/worktree/proj-123/sandbox-1'],
|
|
724
|
+
time: { created: 1 }
|
|
725
|
+
})
|
|
726
|
+
};
|
|
727
|
+
}
|
|
728
|
+
// Worktree creation endpoint
|
|
729
|
+
if (url === 'http://localhost:4096/experimental/worktree' && opts?.method === 'POST') {
|
|
730
|
+
worktreeCreateCalled = true;
|
|
731
|
+
return {
|
|
732
|
+
ok: true,
|
|
733
|
+
json: async () => ({
|
|
734
|
+
name: 'new-sandbox',
|
|
735
|
+
branch: 'opencode/new-sandbox',
|
|
736
|
+
directory: '/data/worktree/proj-123/new-sandbox'
|
|
737
|
+
})
|
|
738
|
+
};
|
|
739
|
+
}
|
|
740
|
+
return { ok: false, text: async () => 'Not found' };
|
|
741
|
+
};
|
|
742
|
+
|
|
743
|
+
const result = await executeAction(item, config, {
|
|
744
|
+
dryRun: true,
|
|
745
|
+
discoverServer: mockDiscoverServer,
|
|
746
|
+
fetch: mockFetch
|
|
747
|
+
});
|
|
748
|
+
|
|
749
|
+
assert.ok(result.dryRun);
|
|
750
|
+
assert.ok(projectInfoCalled, 'Should call project/current to check for sandboxes');
|
|
751
|
+
assert.ok(worktreeCreateCalled, 'Should auto-create worktree when sandboxes detected');
|
|
752
|
+
assert.ok(result.command.includes('/data/worktree/proj-123/new-sandbox'),
|
|
753
|
+
'Should use newly created worktree directory');
|
|
754
|
+
});
|
|
755
|
+
|
|
756
|
+
test('does not auto-create worktree when project has no sandboxes (dry run)', async () => {
|
|
757
|
+
const { executeAction } = await import('../../service/actions.js');
|
|
758
|
+
|
|
759
|
+
const item = { number: 789, title: 'Simple fix' };
|
|
760
|
+
const config = {
|
|
761
|
+
path: tempDir,
|
|
762
|
+
prompt: 'default'
|
|
763
|
+
// Note: no worktree config
|
|
764
|
+
};
|
|
765
|
+
|
|
766
|
+
// Mock server discovery
|
|
767
|
+
const mockDiscoverServer = async () => 'http://localhost:4096';
|
|
768
|
+
|
|
769
|
+
let projectInfoCalled = false;
|
|
770
|
+
let worktreeCreateCalled = false;
|
|
771
|
+
|
|
772
|
+
const mockFetch = async (url, opts) => {
|
|
773
|
+
// Project info endpoint - returns empty sandboxes (no worktree workflow)
|
|
774
|
+
if (url === 'http://localhost:4096/project/current') {
|
|
775
|
+
projectInfoCalled = true;
|
|
776
|
+
return {
|
|
777
|
+
ok: true,
|
|
778
|
+
json: async () => ({
|
|
779
|
+
id: 'proj-456',
|
|
780
|
+
worktree: tempDir,
|
|
781
|
+
sandboxes: [],
|
|
782
|
+
time: { created: 1 }
|
|
783
|
+
})
|
|
784
|
+
};
|
|
785
|
+
}
|
|
786
|
+
if (url === 'http://localhost:4096/experimental/worktree' && opts?.method === 'POST') {
|
|
787
|
+
worktreeCreateCalled = true;
|
|
788
|
+
}
|
|
789
|
+
return { ok: false, text: async () => 'Not found' };
|
|
790
|
+
};
|
|
791
|
+
|
|
792
|
+
const result = await executeAction(item, config, {
|
|
793
|
+
dryRun: true,
|
|
794
|
+
discoverServer: mockDiscoverServer,
|
|
795
|
+
fetch: mockFetch
|
|
796
|
+
});
|
|
797
|
+
|
|
798
|
+
assert.ok(result.dryRun);
|
|
799
|
+
assert.ok(projectInfoCalled, 'Should call project/current to check for sandboxes');
|
|
800
|
+
assert.ok(!worktreeCreateCalled, 'Should NOT create worktree when no sandboxes');
|
|
801
|
+
assert.ok(result.command.includes(tempDir),
|
|
802
|
+
'Should use base directory when no worktree workflow detected');
|
|
803
|
+
});
|
|
696
804
|
});
|
|
697
805
|
|
|
698
806
|
describe('createSessionViaApi', () => {
|
|
@@ -7,6 +7,7 @@ import assert from 'node:assert';
|
|
|
7
7
|
import { mkdtempSync, writeFileSync, mkdirSync, rmSync } from 'fs';
|
|
8
8
|
import { join } from 'path';
|
|
9
9
|
import { tmpdir } from 'os';
|
|
10
|
+
import { execSync } from 'child_process';
|
|
10
11
|
|
|
11
12
|
describe('repo-config.js', () => {
|
|
12
13
|
let tempDir;
|
|
@@ -119,6 +120,104 @@ repos:
|
|
|
119
120
|
});
|
|
120
121
|
});
|
|
121
122
|
|
|
123
|
+
describe('repos_dir auto-discovery', () => {
|
|
124
|
+
let reposDir;
|
|
125
|
+
|
|
126
|
+
beforeEach(() => {
|
|
127
|
+
reposDir = join(tempDir, 'code');
|
|
128
|
+
mkdirSync(reposDir);
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
function createGitRepo(name, remoteUrl) {
|
|
132
|
+
const repoPath = join(reposDir, name);
|
|
133
|
+
mkdirSync(repoPath);
|
|
134
|
+
execSync('git init', { cwd: repoPath, stdio: 'ignore' });
|
|
135
|
+
execSync(`git remote add origin ${remoteUrl}`, { cwd: repoPath, stdio: 'ignore' });
|
|
136
|
+
return repoPath;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
test('discovers repos from git remote origin', async () => {
|
|
140
|
+
const repoPath = createGitRepo('my-project', 'https://github.com/myorg/my-project.git');
|
|
141
|
+
|
|
142
|
+
writeFileSync(configPath, `
|
|
143
|
+
repos_dir: ${reposDir}
|
|
144
|
+
`);
|
|
145
|
+
|
|
146
|
+
const { loadRepoConfig, getRepoConfig } = await import('../../service/repo-config.js');
|
|
147
|
+
loadRepoConfig(configPath);
|
|
148
|
+
|
|
149
|
+
const config = getRepoConfig('myorg/my-project');
|
|
150
|
+
assert.strictEqual(config.path, repoPath);
|
|
151
|
+
assert.strictEqual(config.repo_path, repoPath);
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
test('handles SSH git URLs', async () => {
|
|
155
|
+
const repoPath = createGitRepo('backend', 'git@github.com:myorg/backend.git');
|
|
156
|
+
|
|
157
|
+
writeFileSync(configPath, `
|
|
158
|
+
repos_dir: ${reposDir}
|
|
159
|
+
`);
|
|
160
|
+
|
|
161
|
+
const { loadRepoConfig, getRepoConfig } = await import('../../service/repo-config.js');
|
|
162
|
+
loadRepoConfig(configPath);
|
|
163
|
+
|
|
164
|
+
const config = getRepoConfig('myorg/backend');
|
|
165
|
+
assert.strictEqual(config.path, repoPath);
|
|
166
|
+
});
|
|
167
|
+
|
|
168
|
+
test('explicit repos override auto-discovered', async () => {
|
|
169
|
+
createGitRepo('my-project', 'https://github.com/myorg/my-project.git');
|
|
170
|
+
|
|
171
|
+
writeFileSync(configPath, `
|
|
172
|
+
repos_dir: ${reposDir}
|
|
173
|
+
repos:
|
|
174
|
+
myorg/my-project:
|
|
175
|
+
path: /custom/path
|
|
176
|
+
prompt: custom
|
|
177
|
+
`);
|
|
178
|
+
|
|
179
|
+
const { loadRepoConfig, getRepoConfig } = await import('../../service/repo-config.js');
|
|
180
|
+
loadRepoConfig(configPath);
|
|
181
|
+
|
|
182
|
+
const config = getRepoConfig('myorg/my-project');
|
|
183
|
+
// Explicit config should win
|
|
184
|
+
assert.strictEqual(config.path, '/custom/path');
|
|
185
|
+
assert.strictEqual(config.prompt, 'custom');
|
|
186
|
+
});
|
|
187
|
+
|
|
188
|
+
test('skips directories without .git', async () => {
|
|
189
|
+
const notARepo = join(reposDir, 'not-a-repo');
|
|
190
|
+
mkdirSync(notARepo);
|
|
191
|
+
writeFileSync(join(notARepo, 'file.txt'), 'hello');
|
|
192
|
+
|
|
193
|
+
writeFileSync(configPath, `
|
|
194
|
+
repos_dir: ${reposDir}
|
|
195
|
+
`);
|
|
196
|
+
|
|
197
|
+
const { loadRepoConfig, getRepoConfig } = await import('../../service/repo-config.js');
|
|
198
|
+
loadRepoConfig(configPath);
|
|
199
|
+
|
|
200
|
+
// Should not find this as a repo
|
|
201
|
+
const config = getRepoConfig('not-a-repo');
|
|
202
|
+
assert.deepStrictEqual(config, {});
|
|
203
|
+
});
|
|
204
|
+
|
|
205
|
+
test('returns empty for unknown repo even with repos_dir', async () => {
|
|
206
|
+
createGitRepo('my-project', 'https://github.com/myorg/my-project.git');
|
|
207
|
+
|
|
208
|
+
writeFileSync(configPath, `
|
|
209
|
+
repos_dir: ${reposDir}
|
|
210
|
+
`);
|
|
211
|
+
|
|
212
|
+
const { loadRepoConfig, getRepoConfig } = await import('../../service/repo-config.js');
|
|
213
|
+
loadRepoConfig(configPath);
|
|
214
|
+
|
|
215
|
+
const config = getRepoConfig('unknown/repo');
|
|
216
|
+
assert.deepStrictEqual(config, {});
|
|
217
|
+
});
|
|
218
|
+
|
|
219
|
+
});
|
|
220
|
+
|
|
122
221
|
describe('sources', () => {
|
|
123
222
|
test('returns sources array from top-level', async () => {
|
|
124
223
|
writeFileSync(configPath, `
|