fss-link 1.5.9 → 1.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/bundle/fss-link.js +3824 -2660
  2. package/package.json +3 -1
  3. package/scripts/analyze-session-logs.sh +279 -0
  4. package/scripts/build.js +55 -0
  5. package/scripts/build_package.js +37 -0
  6. package/scripts/build_sandbox.js +195 -0
  7. package/scripts/build_vscode_companion.js +30 -0
  8. package/scripts/check-build-status.js +148 -0
  9. package/scripts/check-publish.js +101 -0
  10. package/scripts/clean.js +55 -0
  11. package/scripts/copy_bundle_assets.js +40 -0
  12. package/scripts/copy_files.js +56 -0
  13. package/scripts/create_alias.sh +39 -0
  14. package/scripts/emergency-kill-all-tests.sh +95 -0
  15. package/scripts/emergency-kill-vitest.sh +95 -0
  16. package/scripts/extract-session-logs.sh +202 -0
  17. package/scripts/generate-git-commit-info.js +71 -0
  18. package/scripts/get-previous-tag.js +213 -0
  19. package/scripts/get-release-version.js +119 -0
  20. package/scripts/index-session-logs.sh +173 -0
  21. package/scripts/install-linux.sh +294 -0
  22. package/scripts/install-macos.sh +343 -0
  23. package/scripts/install-windows.ps1 +427 -0
  24. package/scripts/local_telemetry.js +219 -0
  25. package/scripts/memory-monitor.sh +165 -0
  26. package/scripts/postinstall-message.js +31 -0
  27. package/scripts/prepare-package.js +51 -0
  28. package/scripts/process-session-log.py +302 -0
  29. package/scripts/quick-install.sh +195 -0
  30. package/scripts/sandbox_command.js +126 -0
  31. package/scripts/start.js +76 -0
  32. package/scripts/telemetry.js +85 -0
  33. package/scripts/telemetry_gcp.js +188 -0
  34. package/scripts/telemetry_utils.js +421 -0
  35. package/scripts/test-windows-paths.js +51 -0
  36. package/scripts/tests/get-release-version.test.js +110 -0
  37. package/scripts/tests/test-setup.ts +12 -0
  38. package/scripts/tests/vitest.config.ts +20 -0
  39. package/scripts/version.js +83 -0
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "fss-link",
3
- "version": "1.5.9",
3
+ "version": "1.6.0",
4
4
  "engines": {
5
5
  "node": ">=20.0.0"
6
6
  },
@@ -56,6 +56,7 @@
56
56
  },
57
57
  "files": [
58
58
  "bundle/",
59
+ "scripts/",
59
60
  "README.md",
60
61
  "LICENSE"
61
62
  ],
@@ -147,6 +148,7 @@
147
148
  "react-dom": "19.1.1",
148
149
  "read-package-up": "^11.0.0",
149
150
  "shell-quote": "^1.8.3",
151
+ "sharp": "^0.34.4",
150
152
  "signal-exit": "^4.1.0",
151
153
  "simple-git": "^3.28.0",
152
154
  "sql.js": "^1.11.0",
@@ -0,0 +1,279 @@
1
+ #!/bin/bash
2
+ # FSS Link Session Log Analyzer
3
+ # Quick analysis and searching of processed session logs
4
+
5
+ set -euo pipefail
6
+
7
+ # Configuration
8
+ PROCESSED_DIR="/MASTERFOLDER/Projects/fss-link/fss-live-testing/session-logs/processed"
9
+ ANALYSIS_DIR="/MASTERFOLDER/Projects/fss-link/fss-live-testing/session-logs/analysis"
10
+
11
+ # Colors
12
+ RED='\033[0;31m'
13
+ GREEN='\033[0;32m'
14
+ YELLOW='\033[1;33m'
15
+ BLUE='\033[0;34m'
16
+ NC='\033[0m'
17
+
18
+ usage() {
19
+ cat << EOF
20
+ Usage: $0 [COMMAND] [OPTIONS]
21
+
22
+ Analyze FSS Link session logs for testing validation.
23
+
24
+ COMMANDS:
25
+ stats Show statistics for all sessions
26
+ search TERM Search for specific term in sessions
27
+ tools TOOL Find sessions using specific tool
28
+ errors Find sessions with errors
29
+ summary FILE Generate summary report for session
30
+
31
+ OPTIONS:
32
+ -h, --help Show this help message
33
+
34
+ EXAMPLES:
35
+ $0 stats # Show all session statistics
36
+ $0 search "read_file" # Find sessions mentioning read_file
37
+ $0 tools TodoWrite # Find sessions using TodoWrite
38
+ $0 errors # Find sessions with errors
39
+ $0 summary SESSION_*.md # Summarize specific session
40
+
41
+ EOF
42
+ exit 1
43
+ }
44
+
45
+ # Show statistics for all processed sessions
46
+ show_stats() {
47
+ echo -e "${BLUE}=== Session Log Statistics ===${NC}\n"
48
+
49
+ if [[ ! -d "${PROCESSED_DIR}" ]] || [[ -z "$(ls -A ${PROCESSED_DIR} 2>/dev/null)" ]]; then
50
+ echo -e "${RED}No processed sessions found in ${PROCESSED_DIR}${NC}"
51
+ exit 1
52
+ fi
53
+
54
+ local total_sessions=0
55
+ local total_messages=0
56
+ local total_tool_calls=0
57
+ local total_errors=0
58
+
59
+ for session in "${PROCESSED_DIR}"/*.md; do
60
+ if [[ ! -f "${session}" ]]; then
61
+ continue
62
+ fi
63
+
64
+ ((total_sessions++))
65
+
66
+ # Extract metadata from session file
67
+ messages=$(grep "^\*\*Total Messages\*\*:" "${session}" | sed 's/.*: //')
68
+ tool_calls=$(grep "^\*\*Tool Calls\*\*:" "${session}" | sed 's/.*: //' || echo "0")
69
+ errors=$(grep "^\*\*Errors\*\*:" "${session}" | sed 's/.*: //' || echo "0")
70
+
71
+ # Extract from metadata section if not in header
72
+ if [[ -z "${messages}" ]]; then
73
+ messages=$(grep '"total_messages":' "${session}" | sed 's/.*: //' | tr -d ',' || echo "0")
74
+ fi
75
+ if [[ -z "${tool_calls}" ]] || [[ "${tool_calls}" == "0" ]]; then
76
+ tool_calls=$(grep '"total_tool_calls":' "${session}" | sed 's/.*: //' | tr -d ',' || echo "0")
77
+ fi
78
+ if [[ -z "${errors}" ]] || [[ "${errors}" == "0" ]]; then
79
+ errors=$(grep '"total_errors":' "${session}" | sed 's/.*: //' | tr -d ',' || echo "0")
80
+ fi
81
+
82
+ total_messages=$((total_messages + messages))
83
+ total_tool_calls=$((total_tool_calls + tool_calls))
84
+ total_errors=$((total_errors + errors))
85
+
86
+ session_name=$(basename "${session}")
87
+ echo -e "${GREEN}${session_name}${NC}"
88
+ echo " Messages: ${messages}"
89
+ echo " Tool calls: ${tool_calls}"
90
+ echo " Errors: ${errors}"
91
+ echo ""
92
+ done
93
+
94
+ echo -e "${YELLOW}=== Totals ===${NC}"
95
+ echo "Sessions analyzed: ${total_sessions}"
96
+ echo "Total messages: ${total_messages}"
97
+ echo "Total tool calls: ${total_tool_calls}"
98
+ echo "Total errors: ${total_errors}"
99
+ }
100
+
101
+ # Search for term in sessions
102
+ search_term() {
103
+ local term="$1"
104
+ echo -e "${BLUE}=== Searching for: '${term}' ===${NC}\n"
105
+
106
+ for session in "${PROCESSED_DIR}"/*.md; do
107
+ if [[ ! -f "${session}" ]]; then
108
+ continue
109
+ fi
110
+
111
+ if grep -qi "${term}" "${session}"; then
112
+ session_name=$(basename "${session}")
113
+ count=$(grep -ci "${term}" "${session}")
114
+ echo -e "${GREEN}${session_name}${NC} - ${count} matches"
115
+
116
+ # Show first few matches with context
117
+ grep -ni "${term}" "${session}" | head -3 | while read -r line; do
118
+ echo " ${line}"
119
+ done
120
+ echo ""
121
+ fi
122
+ done
123
+ }
124
+
125
+ # Find sessions using specific tool
126
+ find_tool_usage() {
127
+ local tool="$1"
128
+ echo -e "${BLUE}=== Sessions using tool: '${tool}' ===${NC}\n"
129
+
130
+ for session in "${PROCESSED_DIR}"/*.md; do
131
+ if [[ ! -f "${session}" ]]; then
132
+ continue
133
+ fi
134
+
135
+ # Check in tools_used metadata or tool call sections
136
+ if grep -qi "\"${tool}\"" "${session}" || grep -qi "\[FUNCTION CALL: ${tool}\]" "${session}"; then
137
+ session_name=$(basename "${session}")
138
+ call_count=$(grep -ci "\[FUNCTION CALL: ${tool}\]" "${session}" || echo "0")
139
+
140
+ echo -e "${GREEN}${session_name}${NC}"
141
+ echo " Tool calls: ${call_count}"
142
+
143
+ # Extract tool calls section summary if exists
144
+ if grep -qi "### ${tool}" "${session}"; then
145
+ summary=$(grep -A 1 "### ${tool}" "${session}" | tail -1)
146
+ echo " ${summary}"
147
+ fi
148
+ echo ""
149
+ fi
150
+ done
151
+ }
152
+
153
+ # Find sessions with errors
154
+ find_errors() {
155
+ echo -e "${BLUE}=== Sessions with Errors ===${NC}\n"
156
+
157
+ local found=0
158
+
159
+ for session in "${PROCESSED_DIR}"/*.md; do
160
+ if [[ ! -f "${session}" ]]; then
161
+ continue
162
+ fi
163
+
164
+ errors=$(grep '"total_errors":' "${session}" | sed 's/.*: //' | tr -d ',' || echo "0")
165
+
166
+ if [[ "${errors}" != "0" ]] && [[ -n "${errors}" ]]; then
167
+ ((found++))
168
+ session_name=$(basename "${session}")
169
+ echo -e "${RED}${session_name}${NC} - ${errors} errors"
170
+
171
+ # Show error section if exists
172
+ if grep -q "## Errors and Issues" "${session}"; then
173
+ grep -A 5 "## Errors and Issues" "${session}" | tail -4
174
+ fi
175
+ echo ""
176
+ fi
177
+ done
178
+
179
+ if [[ ${found} -eq 0 ]]; then
180
+ echo -e "${GREEN}No errors found in any session!${NC}"
181
+ fi
182
+ }
183
+
184
+ # Generate summary for specific session
185
+ generate_summary() {
186
+ local session_file="$1"
187
+
188
+ if [[ ! -f "${session_file}" ]]; then
189
+ echo -e "${RED}Session file not found: ${session_file}${NC}"
190
+ exit 1
191
+ fi
192
+
193
+ session_name=$(basename "${session_file}")
194
+ echo -e "${BLUE}=== Summary: ${session_name} ===${NC}\n"
195
+
196
+ # Extract key metadata
197
+ messages=$(grep '"total_messages":' "${session_file}" | sed 's/.*: //' | tr -d ',')
198
+ tool_calls=$(grep '"total_tool_calls":' "${session_file}" | sed 's/.*: //' | tr -d ',')
199
+ errors=$(grep '"total_errors":' "${session_file}" | sed 's/.*: //' | tr -d ',')
200
+ session_date=$(grep '"session_date":' "${session_file}" | sed 's/.*: "//' | tr -d '",')
201
+
202
+ echo -e "${YELLOW}Basic Info:${NC}"
203
+ echo " Date: ${session_date}"
204
+ echo " Messages: ${messages}"
205
+ echo " Tool calls: ${tool_calls}"
206
+ echo " Errors: ${errors}"
207
+ echo ""
208
+
209
+ # Extract tools used
210
+ echo -e "${YELLOW}Tools Used:${NC}"
211
+ grep '"tools_used":' -A 12 "${session_file}" | grep '"' | sed 's/.*"//;s/".*//' | sed 's/^/ - /'
212
+ echo ""
213
+
214
+ # Show tool call breakdown
215
+ echo -e "${YELLOW}Tool Call Breakdown:${NC}"
216
+ grep -E "^### .* \([0-9]+ calls\)" "${session_file}" | sed 's/^###/ -/'
217
+ echo ""
218
+
219
+ # Check for key events
220
+ echo -e "${YELLOW}Key Events:${NC}"
221
+ if grep -q "NO NO NO" "${session_file}"; then
222
+ echo " ⚠️ User correction detected"
223
+ fi
224
+ if grep -qi "TodoWrite" "${session_file}"; then
225
+ todo_count=$(grep -ci "todo_write" "${session_file}")
226
+ echo " ✓ TodoWrite used: ${todo_count} times"
227
+ fi
228
+ if grep -qi "Production Ready" "${session_file}"; then
229
+ echo " ✓ Testing completed - marked as Production Ready"
230
+ fi
231
+ }
232
+
233
+ # Main execution
234
+ main() {
235
+ if [[ $# -eq 0 ]]; then
236
+ usage
237
+ fi
238
+
239
+ mkdir -p "${ANALYSIS_DIR}"
240
+
241
+ case "$1" in
242
+ stats)
243
+ show_stats
244
+ ;;
245
+ search)
246
+ if [[ $# -lt 2 ]]; then
247
+ echo -e "${RED}Error: search requires a term${NC}"
248
+ usage
249
+ fi
250
+ search_term "$2"
251
+ ;;
252
+ tools)
253
+ if [[ $# -lt 2 ]]; then
254
+ echo -e "${RED}Error: tools requires a tool name${NC}"
255
+ usage
256
+ fi
257
+ find_tool_usage "$2"
258
+ ;;
259
+ errors)
260
+ find_errors
261
+ ;;
262
+ summary)
263
+ if [[ $# -lt 2 ]]; then
264
+ echo -e "${RED}Error: summary requires a session file${NC}"
265
+ usage
266
+ fi
267
+ generate_summary "$2"
268
+ ;;
269
+ -h|--help)
270
+ usage
271
+ ;;
272
+ *)
273
+ echo -e "${RED}Unknown command: $1${NC}"
274
+ usage
275
+ ;;
276
+ esac
277
+ }
278
+
279
+ main "$@"
@@ -0,0 +1,55 @@
1
+ /**
2
+ * @license
3
+ * Copyright 2025 Google LLC
4
+ * SPDX-License-Identifier: Apache-2.0
5
+ */
6
+
7
+ //
8
+ // Licensed under the Apache License, Version 2.0 (the "License");
9
+ // you may not use this file except in compliance with the License.
10
+ // You may obtain a copy of the License at
11
+ //
12
+ // http://www.apache.org/licenses/LICENSE-2.0
13
+ //
14
+ // Unless required by applicable law or agreed to in writing, software
15
+ // distributed under the License is distributed on an "AS IS" BASIS,
16
+ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17
+ // See the License for the specific language governing permissions and
18
+ // limitations under the License.
19
+
20
+ import { execSync } from 'child_process';
21
+ import { existsSync } from 'fs';
22
+ import { dirname, join } from 'path';
23
+ import { fileURLToPath } from 'url';
24
+
25
+ const __dirname = dirname(fileURLToPath(import.meta.url));
26
+ const root = join(__dirname, '..');
27
+
28
+ // npm install if node_modules was removed (e.g. via npm run clean or scripts/clean.js)
29
+ if (!existsSync(join(root, 'node_modules'))) {
30
+ execSync('npm install', { stdio: 'inherit', cwd: root });
31
+ }
32
+
33
+ // build all workspaces/packages
34
+ execSync('npm run generate', { stdio: 'inherit', cwd: root });
35
+ execSync('npm run build --workspaces', { stdio: 'inherit', cwd: root });
36
+
37
+ // also build container image if sandboxing is enabled
38
+ // skip (-s) npm install + build since we did that above
39
+ try {
40
+ execSync('node scripts/sandbox_command.js -q', {
41
+ stdio: 'inherit',
42
+ cwd: root,
43
+ });
44
+ if (
45
+ process.env.BUILD_SANDBOX === '1' ||
46
+ process.env.BUILD_SANDBOX === 'true'
47
+ ) {
48
+ execSync('node scripts/build_sandbox.js -s', {
49
+ stdio: 'inherit',
50
+ cwd: root,
51
+ });
52
+ }
53
+ } catch {
54
+ // ignore
55
+ }
@@ -0,0 +1,37 @@
1
+ /**
2
+ * @license
3
+ * Copyright 2025 Google LLC
4
+ * SPDX-License-Identifier: Apache-2.0
5
+ */
6
+
7
+ //
8
+ // Licensed under the Apache License, Version 2.0 (the "License");
9
+ // you may not use this file except in compliance with the License.
10
+ // You may obtain a copy of the License at
11
+ //
12
+ // http://www.apache.org/licenses/LICENSE-2.0
13
+ //
14
+ // Unless required by applicable law or agreed to in writing, software
15
+ // distributed under the License is distributed on an "AS IS" BASIS,
16
+ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17
+ // See the License for the specific language governing permissions and
18
+ // limitations under the License.
19
+
20
+ import { execSync } from 'child_process';
21
+ import { writeFileSync } from 'fs';
22
+ import { join } from 'path';
23
+
24
+ if (!process.cwd().includes('packages')) {
25
+ console.error('must be invoked from a package directory');
26
+ process.exit(1);
27
+ }
28
+
29
+ // build typescript files
30
+ execSync('tsc --build', { stdio: 'inherit' });
31
+
32
+ // copy .{md,json} files
33
+ execSync('node ../../scripts/copy_files.js', { stdio: 'inherit' });
34
+
35
+ // touch dist/.last_build
36
+ writeFileSync(join(process.cwd(), 'dist', '.last_build'), '');
37
+ process.exit(0);
@@ -0,0 +1,195 @@
1
+ /**
2
+ * @license
3
+ * Copyright 2025 Google LLC
4
+ * SPDX-License-Identifier: Apache-2.0
5
+ */
6
+
7
+ //
8
+ // Licensed under the Apache License, Version 2.0 (the "License");
9
+ // you may not use this file except in compliance with the License.
10
+ // You may obtain a copy of the License at
11
+ //
12
+ // http://www.apache.org/licenses/LICENSE-2.0
13
+ //
14
+ // Unless required by applicable law or agreed to in writing, software
15
+ // distributed under the License is distributed on an "AS IS" BASIS,
16
+ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17
+ // See the License for the specific language governing permissions and
18
+ // limitations under the License.
19
+
20
+ import { execSync } from 'child_process';
21
+ import { chmodSync, existsSync, readFileSync, rmSync, writeFileSync } from 'fs';
22
+ import { join } from 'path';
23
+ import os from 'os';
24
+ import yargs from 'yargs';
25
+ import { hideBin } from 'yargs/helpers';
26
+ import cliPkgJson from '../packages/cli/package.json' with { type: 'json' };
27
+
28
+ const argv = yargs(hideBin(process.argv))
29
+ .option('s', {
30
+ alias: 'skip-npm-install-build',
31
+ type: 'boolean',
32
+ default: false,
33
+ description: 'skip npm install + npm run build',
34
+ })
35
+ .option('f', {
36
+ alias: 'dockerfile',
37
+ type: 'string',
38
+ description: 'use <dockerfile> for custom image',
39
+ })
40
+ .option('i', {
41
+ alias: 'image',
42
+ type: 'string',
43
+ description: 'use <image> name for custom image',
44
+ })
45
+ .option('output-file', {
46
+ type: 'string',
47
+ description:
48
+ 'Path to write the final image URI. Used for CI/CD pipeline integration.',
49
+ }).argv;
50
+
51
+ let sandboxCommand;
52
+ try {
53
+ sandboxCommand = execSync('node scripts/sandbox_command.js')
54
+ .toString()
55
+ .trim();
56
+ } catch {
57
+ console.warn('ERROR: could not detect sandbox container command');
58
+ process.exit(0);
59
+ }
60
+
61
+ if (sandboxCommand === 'sandbox-exec') {
62
+ console.warn(
63
+ 'WARNING: container-based sandboxing is disabled (see README.md#sandboxing)',
64
+ );
65
+ process.exit(0);
66
+ }
67
+
68
+ console.log(`using ${sandboxCommand} for sandboxing`);
69
+
70
+ const baseImage = cliPkgJson.config.sandboxImageUri;
71
+ const customImage = argv.i;
72
+ const baseDockerfile = 'Dockerfile';
73
+ const customDockerfile = argv.f;
74
+
75
+ if (!baseImage?.length) {
76
+ console.warn(
77
+ 'No default image tag specified in gemini-cli/packages/cli/package.json',
78
+ );
79
+ }
80
+
81
+ if (!argv.s) {
82
+ execSync('npm install', { stdio: 'inherit' });
83
+ execSync('npm run build --workspaces', { stdio: 'inherit' });
84
+ }
85
+
86
+ console.log('packing fss-link ...');
87
+ const cliPackageDir = join('packages', 'cli');
88
+ rmSync(join(cliPackageDir, 'dist', 'fss-link-*.tgz'), { force: true });
89
+ execSync(
90
+ `npm pack -w fss-link --pack-destination ./packages/cli/dist`,
91
+ {
92
+ stdio: 'ignore',
93
+ },
94
+ );
95
+
96
+ console.log('packing fss-link-core ...');
97
+ const corePackageDir = join('packages', 'core');
98
+ rmSync(join(corePackageDir, 'dist', 'fss-link-core-*.tgz'), {
99
+ force: true,
100
+ });
101
+ execSync(
102
+ `npm pack -w fss-link-core --pack-destination ./packages/core/dist`,
103
+ { stdio: 'ignore' },
104
+ );
105
+
106
+ const packageVersion = JSON.parse(
107
+ readFileSync(join(process.cwd(), 'package.json'), 'utf-8'),
108
+ ).version;
109
+
110
+ chmodSync(
111
+ join(cliPackageDir, 'dist', `qwen-code-qwen-code-${packageVersion}.tgz`),
112
+ 0o755,
113
+ );
114
+ chmodSync(
115
+ join(
116
+ corePackageDir,
117
+ 'dist',
118
+ `qwen-code-qwen-code-core-${packageVersion}.tgz`,
119
+ ),
120
+ 0o755,
121
+ );
122
+
123
+ const buildStdout = process.env.VERBOSE ? 'inherit' : 'ignore';
124
+
125
+ // Determine the appropriate shell based on OS
126
+ const isWindows = os.platform() === 'win32';
127
+ const shellToUse = isWindows ? 'powershell.exe' : '/bin/bash';
128
+
129
+ function buildImage(imageName, dockerfile) {
130
+ console.log(`building ${imageName} ... (can be slow first time)`);
131
+
132
+ let buildCommandArgs = '';
133
+ let tempAuthFile = '';
134
+
135
+ if (sandboxCommand === 'podman') {
136
+ if (isWindows) {
137
+ // PowerShell doesn't support <() process substitution.
138
+ // Create a temporary auth file that we will clean up after.
139
+ tempAuthFile = join(os.tmpdir(), `qwen-auth-${Date.now()}.json`);
140
+ writeFileSync(tempAuthFile, '{}');
141
+ buildCommandArgs = `--authfile="${tempAuthFile}"`;
142
+ } else {
143
+ // Use bash-specific syntax for Linux/macOS
144
+ buildCommandArgs = `--authfile=<(echo '{}')`;
145
+ }
146
+ }
147
+
148
+ const npmPackageVersion = JSON.parse(
149
+ readFileSync(join(process.cwd(), 'package.json'), 'utf-8'),
150
+ ).version;
151
+
152
+ const imageTag =
153
+ process.env.GEMINI_SANDBOX_IMAGE_TAG || imageName.split(':')[1];
154
+ const finalImageName = `${imageName.split(':')[0]}:${imageTag}`;
155
+
156
+ try {
157
+ execSync(
158
+ `${sandboxCommand} build ${buildCommandArgs} ${
159
+ process.env.BUILD_SANDBOX_FLAGS || ''
160
+ } --build-arg CLI_VERSION_ARG=${npmPackageVersion} -f "${dockerfile}" -t "${imageName}" .`,
161
+ { stdio: buildStdout, shell: shellToUse },
162
+ );
163
+ console.log(`built ${finalImageName}`);
164
+
165
+ // If an output file path was provided via command-line, write the final image URI to it.
166
+ if (argv.outputFile) {
167
+ console.log(
168
+ `Writing final image URI for CI artifact to: ${argv.outputFile}`,
169
+ );
170
+ // The publish step only supports one image. If we build multiple, only the last one
171
+ // will be published. Throw an error to make this failure explicit if the file already exists.
172
+ if (existsSync(argv.outputFile)) {
173
+ throw new Error(
174
+ `CI artifact file ${argv.outputFile} already exists. Refusing to overwrite.`,
175
+ );
176
+ }
177
+ writeFileSync(argv.outputFile, finalImageName);
178
+ }
179
+ } finally {
180
+ // If we created a temp file, delete it now.
181
+ if (tempAuthFile) {
182
+ rmSync(tempAuthFile, { force: true });
183
+ }
184
+ }
185
+ }
186
+
187
+ if (baseImage && baseDockerfile) {
188
+ buildImage(baseImage, baseDockerfile);
189
+ }
190
+
191
+ if (customDockerfile && customImage) {
192
+ buildImage(customImage, customDockerfile);
193
+ }
194
+
195
+ execSync(`${sandboxCommand} image prune -f`, { stdio: 'ignore' });
@@ -0,0 +1,30 @@
1
+ /**
2
+ * @license
3
+ * Copyright 2025 Google LLC
4
+ * SPDX-License-Identifier: Apache-2.0
5
+ */
6
+
7
+ //
8
+ // Licensed under the Apache License, Version 2.0 (the "License");
9
+ // you may not use this file except in compliance with the License.
10
+ // You may obtain a copy of the License at
11
+ //
12
+ // http://www.apache.org/licenses/LICENSE-2.0
13
+ //
14
+ // Unless required by applicable law or agreed to in writing, software
15
+ // distributed under the License is distributed on an "AS IS" BASIS,
16
+ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17
+ // See the License for the specific language governing permissions and
18
+ // limitations under the License.
19
+
20
+ import { execSync } from 'child_process';
21
+ import { dirname, join } from 'path';
22
+ import { fileURLToPath } from 'url';
23
+
24
+ const __dirname = dirname(fileURLToPath(import.meta.url));
25
+ const root = join(__dirname, '..');
26
+
27
+ execSync('npx --yes @vscode/vsce package --no-dependencies', {
28
+ stdio: 'inherit',
29
+ cwd: join(root, 'packages', 'vscode-ide-companion'),
30
+ });