skrypt-ai 0.2.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +7 -1
- package/dist/commands/cron.d.ts +2 -0
- package/dist/commands/cron.js +106 -0
- package/dist/commands/deploy.d.ts +2 -0
- package/dist/commands/deploy.js +317 -0
- package/dist/commands/test.d.ts +2 -0
- package/dist/commands/test.js +335 -0
- package/dist/template/.github/workflows/skrypt-cron.yml +60 -0
- package/package.json +1 -1
package/dist/cli.js
CHANGED
|
@@ -15,7 +15,10 @@ import { sdkCommand } from './commands/sdk.js';
|
|
|
15
15
|
import { ghActionCommand } from './commands/gh-action.js';
|
|
16
16
|
import { llmsTxtCommand } from './commands/llms-txt.js';
|
|
17
17
|
import { loginCommand, logoutCommand, whoamiCommand } from './commands/login.js';
|
|
18
|
-
|
|
18
|
+
import { cronCommand } from './commands/cron.js';
|
|
19
|
+
import { deployCommand } from './commands/deploy.js';
|
|
20
|
+
import { testCommand } from './commands/test.js';
|
|
21
|
+
const VERSION = '0.3.0';
|
|
19
22
|
async function checkForUpdates() {
|
|
20
23
|
try {
|
|
21
24
|
const res = await fetch('https://registry.npmjs.org/skrypt-ai/latest', {
|
|
@@ -59,4 +62,7 @@ program.addCommand(llmsTxtCommand);
|
|
|
59
62
|
program.addCommand(loginCommand);
|
|
60
63
|
program.addCommand(logoutCommand);
|
|
61
64
|
program.addCommand(whoamiCommand);
|
|
65
|
+
program.addCommand(cronCommand);
|
|
66
|
+
program.addCommand(deployCommand);
|
|
67
|
+
program.addCommand(testCommand);
|
|
62
68
|
program.parse();
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import { Command } from 'commander';
|
|
2
|
+
import { existsSync, writeFileSync, mkdirSync } from 'fs';
|
|
3
|
+
import { resolve, join } from 'path';
|
|
4
|
+
const CRON_WORKFLOW = `name: Skrypt Auto-Update Docs
|
|
5
|
+
|
|
6
|
+
on:
|
|
7
|
+
# Run daily at 2am UTC - adjust as needed
|
|
8
|
+
schedule:
|
|
9
|
+
- cron: '0 2 * * *'
|
|
10
|
+
|
|
11
|
+
# Run on push to main
|
|
12
|
+
push:
|
|
13
|
+
branches: [main]
|
|
14
|
+
paths:
|
|
15
|
+
- 'src/**'
|
|
16
|
+
- 'lib/**'
|
|
17
|
+
- 'app/**'
|
|
18
|
+
|
|
19
|
+
# Allow manual trigger
|
|
20
|
+
workflow_dispatch:
|
|
21
|
+
|
|
22
|
+
jobs:
|
|
23
|
+
update-docs:
|
|
24
|
+
runs-on: ubuntu-latest
|
|
25
|
+
|
|
26
|
+
steps:
|
|
27
|
+
- uses: actions/checkout@v4
|
|
28
|
+
with:
|
|
29
|
+
fetch-depth: 0
|
|
30
|
+
|
|
31
|
+
- uses: actions/setup-node@v4
|
|
32
|
+
with:
|
|
33
|
+
node-version: '20'
|
|
34
|
+
|
|
35
|
+
- name: Install Skrypt
|
|
36
|
+
run: npm install -g skrypt-ai
|
|
37
|
+
|
|
38
|
+
- name: Generate docs
|
|
39
|
+
env:
|
|
40
|
+
OPENAI_API_KEY: \${{ secrets.OPENAI_API_KEY }}
|
|
41
|
+
ANTHROPIC_API_KEY: \${{ secrets.ANTHROPIC_API_KEY }}
|
|
42
|
+
run: |
|
|
43
|
+
skrypt generate ./src --output ./docs
|
|
44
|
+
|
|
45
|
+
- name: Deploy to Skrypt
|
|
46
|
+
env:
|
|
47
|
+
SKRYPT_API_KEY: \${{ secrets.SKRYPT_API_KEY }}
|
|
48
|
+
run: |
|
|
49
|
+
skrypt deploy ./docs --project \${{ github.event.repository.name }}
|
|
50
|
+
`;
|
|
51
|
+
export const cronCommand = new Command('cron')
|
|
52
|
+
.description('Set up automated documentation updates with GitHub Actions')
|
|
53
|
+
.argument('[repo-path]', 'Repository path', '.')
|
|
54
|
+
.option('-s, --schedule <cron>', 'Cron schedule (default: daily at 2am UTC)', '0 2 * * *')
|
|
55
|
+
.option('-f, --force', 'Overwrite existing workflow')
|
|
56
|
+
.action(async (repoPath, options) => {
|
|
57
|
+
const resolvedPath = resolve(repoPath);
|
|
58
|
+
const workflowDir = join(resolvedPath, '.github', 'workflows');
|
|
59
|
+
const workflowPath = join(workflowDir, 'skrypt-docs.yml');
|
|
60
|
+
console.log('skrypt cron');
|
|
61
|
+
console.log(` repo: ${resolvedPath}`);
|
|
62
|
+
console.log(` schedule: ${options.schedule}`);
|
|
63
|
+
console.log('');
|
|
64
|
+
// Check if workflow exists
|
|
65
|
+
if (existsSync(workflowPath) && !options.force) {
|
|
66
|
+
console.log('Workflow already exists. Use --force to overwrite.');
|
|
67
|
+
console.log(` ${workflowPath}`);
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
// Create workflow directory
|
|
71
|
+
mkdirSync(workflowDir, { recursive: true });
|
|
72
|
+
// Customize schedule if provided
|
|
73
|
+
let workflow = CRON_WORKFLOW;
|
|
74
|
+
if (options.schedule && options.schedule !== '0 2 * * *') {
|
|
75
|
+
workflow = workflow.replace("cron: '0 2 * * *'", `cron: '${options.schedule}'`);
|
|
76
|
+
}
|
|
77
|
+
// Write workflow
|
|
78
|
+
writeFileSync(workflowPath, workflow);
|
|
79
|
+
console.log(`✓ Created: ${workflowPath}`);
|
|
80
|
+
console.log('');
|
|
81
|
+
console.log('=== Setup Instructions ===');
|
|
82
|
+
console.log('');
|
|
83
|
+
console.log('1. Add secrets to your GitHub repository:');
|
|
84
|
+
console.log(' Settings > Secrets and variables > Actions');
|
|
85
|
+
console.log('');
|
|
86
|
+
console.log(' Required:');
|
|
87
|
+
console.log(' - SKRYPT_API_KEY: Get from https://skrypt.sh/dashboard/settings');
|
|
88
|
+
console.log('');
|
|
89
|
+
console.log(' For AI generation (one of):');
|
|
90
|
+
console.log(' - OPENAI_API_KEY');
|
|
91
|
+
console.log(' - ANTHROPIC_API_KEY');
|
|
92
|
+
console.log('');
|
|
93
|
+
console.log('2. Commit and push:');
|
|
94
|
+
console.log(' git add .github/');
|
|
95
|
+
console.log(' git commit -m "Add Skrypt auto-update workflow"');
|
|
96
|
+
console.log(' git push');
|
|
97
|
+
console.log('');
|
|
98
|
+
console.log('3. Trigger manually (optional):');
|
|
99
|
+
console.log(' Go to Actions > Skrypt Auto-Update Docs > Run workflow');
|
|
100
|
+
console.log('');
|
|
101
|
+
console.log('Common schedules:');
|
|
102
|
+
console.log(' "0 2 * * *" - Daily at 2am UTC');
|
|
103
|
+
console.log(' "0 2 * * 1" - Weekly on Monday');
|
|
104
|
+
console.log(' "0 2 1 * *" - Monthly on the 1st');
|
|
105
|
+
console.log(' "0 */6 * * *" - Every 6 hours');
|
|
106
|
+
});
|
|
@@ -0,0 +1,317 @@
|
|
|
1
|
+
import { Command } from 'commander';
|
|
2
|
+
import { execSync, spawn } from 'child_process';
|
|
3
|
+
import { existsSync, readFileSync, statSync } from 'fs';
|
|
4
|
+
import { resolve, join } from 'path';
|
|
5
|
+
import { homedir } from 'os';
|
|
6
|
+
import { createHash } from 'crypto';
|
|
7
|
+
const API_BASE = process.env.SKRYPT_API_URL || 'https://api.skrypt.sh';
|
|
8
|
+
/**
|
|
9
|
+
* Get API token from options, env var, or auth config
|
|
10
|
+
*/
|
|
11
|
+
function getApiToken(options) {
|
|
12
|
+
// 1. CLI flag
|
|
13
|
+
if (options.token) {
|
|
14
|
+
return options.token;
|
|
15
|
+
}
|
|
16
|
+
// 2. Environment variable
|
|
17
|
+
if (process.env.SKRYPT_API_KEY) {
|
|
18
|
+
return process.env.SKRYPT_API_KEY;
|
|
19
|
+
}
|
|
20
|
+
// 3. Auth config file
|
|
21
|
+
try {
|
|
22
|
+
const authFile = join(homedir(), '.skrypt', 'auth.json');
|
|
23
|
+
if (existsSync(authFile)) {
|
|
24
|
+
const config = JSON.parse(readFileSync(authFile, 'utf-8'));
|
|
25
|
+
if (config.apiKey) {
|
|
26
|
+
return config.apiKey;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
catch {
|
|
31
|
+
// Ignore auth file errors
|
|
32
|
+
}
|
|
33
|
+
return null;
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Get project slug from options or docs.json
|
|
37
|
+
*/
|
|
38
|
+
function getProjectSlug(options, docsPath) {
|
|
39
|
+
// 1. CLI flag
|
|
40
|
+
if (options.project) {
|
|
41
|
+
return options.project;
|
|
42
|
+
}
|
|
43
|
+
// 2. docs.json
|
|
44
|
+
const docsJsonPath = join(docsPath, 'docs.json');
|
|
45
|
+
if (existsSync(docsJsonPath)) {
|
|
46
|
+
try {
|
|
47
|
+
const docsJson = JSON.parse(readFileSync(docsJsonPath, 'utf-8'));
|
|
48
|
+
if (docsJson.slug) {
|
|
49
|
+
return docsJson.slug;
|
|
50
|
+
}
|
|
51
|
+
// Fallback to name, slugified
|
|
52
|
+
if (docsJson.name) {
|
|
53
|
+
return docsJson.name.toLowerCase().replace(/[^a-z0-9-]/g, '-').replace(/-+/g, '-');
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
catch {
|
|
57
|
+
// Ignore parse errors
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
// 3. package.json name
|
|
61
|
+
const packageJsonPath = join(docsPath, 'package.json');
|
|
62
|
+
if (existsSync(packageJsonPath)) {
|
|
63
|
+
try {
|
|
64
|
+
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));
|
|
65
|
+
if (packageJson.name) {
|
|
66
|
+
return packageJson.name.replace(/^@[^/]+\//, '').toLowerCase().replace(/[^a-z0-9-]/g, '-');
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
catch {
|
|
70
|
+
// Ignore parse errors
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
return null;
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Run next build and return the output directory
|
|
77
|
+
*/
|
|
78
|
+
async function buildSite(docsPath) {
|
|
79
|
+
const outDir = join(docsPath, 'out');
|
|
80
|
+
console.log(' Running next build...');
|
|
81
|
+
return new Promise((resolve, reject) => {
|
|
82
|
+
const child = spawn('npm', ['run', 'build'], {
|
|
83
|
+
cwd: docsPath,
|
|
84
|
+
stdio: ['inherit', 'pipe', 'pipe'],
|
|
85
|
+
shell: true
|
|
86
|
+
});
|
|
87
|
+
child.stdout?.on('data', (data) => {
|
|
88
|
+
process.stdout.write(` ${data.toString().trim()}\n`);
|
|
89
|
+
});
|
|
90
|
+
child.stderr?.on('data', (data) => {
|
|
91
|
+
// Filter out noise
|
|
92
|
+
const line = data.toString().trim();
|
|
93
|
+
if (line && !line.includes('ExperimentalWarning')) {
|
|
94
|
+
process.stderr.write(` ${line}\n`);
|
|
95
|
+
}
|
|
96
|
+
});
|
|
97
|
+
child.on('close', (code) => {
|
|
98
|
+
if (code !== 0) {
|
|
99
|
+
reject(new Error(`Build failed with exit code ${code}`));
|
|
100
|
+
return;
|
|
101
|
+
}
|
|
102
|
+
resolve(outDir);
|
|
103
|
+
});
|
|
104
|
+
child.on('error', (err) => {
|
|
105
|
+
reject(err);
|
|
106
|
+
});
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
/**
|
|
110
|
+
* Create a tar.gz bundle of the build output
|
|
111
|
+
*/
|
|
112
|
+
async function bundleOutput(outDir) {
|
|
113
|
+
const bundlePath = join(outDir, '..', 'deploy-bundle.tar.gz');
|
|
114
|
+
console.log(' Creating deployment bundle...');
|
|
115
|
+
// Use tar to create a gzipped tarball
|
|
116
|
+
execSync(`tar -czf "${bundlePath}" -C "${outDir}" .`, {
|
|
117
|
+
stdio: 'pipe'
|
|
118
|
+
});
|
|
119
|
+
const stats = statSync(bundlePath);
|
|
120
|
+
const sizeMB = (stats.size / (1024 * 1024)).toFixed(2);
|
|
121
|
+
console.log(` Bundle size: ${sizeMB} MB`);
|
|
122
|
+
return bundlePath;
|
|
123
|
+
}
|
|
124
|
+
/**
|
|
125
|
+
* Upload the bundle to the API with progress
|
|
126
|
+
*/
|
|
127
|
+
async function uploadBundle(bundlePath, projectSlug, apiToken) {
|
|
128
|
+
const fileBuffer = readFileSync(bundlePath);
|
|
129
|
+
console.log(' Uploading to Skrypt...');
|
|
130
|
+
// Calculate checksum
|
|
131
|
+
const checksum = createHash('sha256').update(fileBuffer).digest('hex');
|
|
132
|
+
// Create form data manually for fetch
|
|
133
|
+
const boundary = `----WebKitFormBoundary${Date.now()}`;
|
|
134
|
+
const formDataParts = [];
|
|
135
|
+
// Add project slug field
|
|
136
|
+
formDataParts.push(Buffer.from(`--${boundary}\r\n` +
|
|
137
|
+
`Content-Disposition: form-data; name="project"\r\n\r\n` +
|
|
138
|
+
`${projectSlug}\r\n`));
|
|
139
|
+
// Add checksum field
|
|
140
|
+
formDataParts.push(Buffer.from(`--${boundary}\r\n` +
|
|
141
|
+
`Content-Disposition: form-data; name="checksum"\r\n\r\n` +
|
|
142
|
+
`${checksum}\r\n`));
|
|
143
|
+
// Add file field
|
|
144
|
+
formDataParts.push(Buffer.from(`--${boundary}\r\n` +
|
|
145
|
+
`Content-Disposition: form-data; name="bundle"; filename="deploy-bundle.tar.gz"\r\n` +
|
|
146
|
+
`Content-Type: application/gzip\r\n\r\n`));
|
|
147
|
+
formDataParts.push(fileBuffer);
|
|
148
|
+
formDataParts.push(Buffer.from(`\r\n--${boundary}--\r\n`));
|
|
149
|
+
const body = Buffer.concat(formDataParts);
|
|
150
|
+
// Show upload progress
|
|
151
|
+
let uploadedBytes = 0;
|
|
152
|
+
const totalBytes = body.length;
|
|
153
|
+
const progressInterval = setInterval(() => {
|
|
154
|
+
// Simulate progress since fetch doesn't provide upload progress
|
|
155
|
+
uploadedBytes = Math.min(uploadedBytes + Math.floor(totalBytes / 10), totalBytes);
|
|
156
|
+
const percent = Math.round((uploadedBytes / totalBytes) * 100);
|
|
157
|
+
process.stdout.write(`\r Uploading: ${percent}%`);
|
|
158
|
+
}, 200);
|
|
159
|
+
try {
|
|
160
|
+
const response = await fetch(`${API_BASE}/v1/deploy`, {
|
|
161
|
+
method: 'POST',
|
|
162
|
+
headers: {
|
|
163
|
+
'Authorization': `Bearer ${apiToken}`,
|
|
164
|
+
'Content-Type': `multipart/form-data; boundary=${boundary}`,
|
|
165
|
+
'Content-Length': body.length.toString()
|
|
166
|
+
},
|
|
167
|
+
body
|
|
168
|
+
});
|
|
169
|
+
clearInterval(progressInterval);
|
|
170
|
+
process.stdout.write(`\r Uploading: 100%\n`);
|
|
171
|
+
if (!response.ok) {
|
|
172
|
+
const errorText = await response.text();
|
|
173
|
+
let errorJson = {};
|
|
174
|
+
try {
|
|
175
|
+
errorJson = JSON.parse(errorText);
|
|
176
|
+
}
|
|
177
|
+
catch {
|
|
178
|
+
// Not JSON
|
|
179
|
+
}
|
|
180
|
+
return {
|
|
181
|
+
success: false,
|
|
182
|
+
error: errorJson.error || errorJson.message || `HTTP ${response.status}: ${response.statusText}`
|
|
183
|
+
};
|
|
184
|
+
}
|
|
185
|
+
const result = await response.json();
|
|
186
|
+
return {
|
|
187
|
+
success: true,
|
|
188
|
+
url: result.url,
|
|
189
|
+
customDomain: result.customDomain,
|
|
190
|
+
message: result.message
|
|
191
|
+
};
|
|
192
|
+
}
|
|
193
|
+
catch (err) {
|
|
194
|
+
clearInterval(progressInterval);
|
|
195
|
+
console.log('');
|
|
196
|
+
return {
|
|
197
|
+
success: false,
|
|
198
|
+
error: err instanceof Error ? err.message : 'Upload failed'
|
|
199
|
+
};
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
export const deployCommand = new Command('deploy')
|
|
203
|
+
.description('Deploy documentation site to Skrypt hosting')
|
|
204
|
+
.argument('[directory]', 'Documentation site directory', '.')
|
|
205
|
+
.option('--project <slug>', 'Project slug (e.g., my-docs)')
|
|
206
|
+
.option('--token <key>', 'API token (or set SKRYPT_API_KEY env var)')
|
|
207
|
+
.action(async (directory, options) => {
|
|
208
|
+
const startTime = Date.now();
|
|
209
|
+
const docsPath = resolve(directory);
|
|
210
|
+
console.log('skrypt deploy');
|
|
211
|
+
console.log(` directory: ${docsPath}`);
|
|
212
|
+
console.log('');
|
|
213
|
+
// Validate directory
|
|
214
|
+
if (!existsSync(docsPath)) {
|
|
215
|
+
console.error(`Error: Directory not found: ${docsPath}`);
|
|
216
|
+
process.exit(1);
|
|
217
|
+
}
|
|
218
|
+
// Check for package.json (Next.js project)
|
|
219
|
+
const packageJsonPath = join(docsPath, 'package.json');
|
|
220
|
+
if (!existsSync(packageJsonPath)) {
|
|
221
|
+
console.error('Error: No package.json found. Is this a Skrypt docs site?');
|
|
222
|
+
console.error(' Run: skrypt init <directory>');
|
|
223
|
+
process.exit(1);
|
|
224
|
+
}
|
|
225
|
+
// Get API token
|
|
226
|
+
const apiToken = getApiToken(options);
|
|
227
|
+
if (!apiToken) {
|
|
228
|
+
console.error('Error: No API token found.');
|
|
229
|
+
console.error('');
|
|
230
|
+
console.error(' Provide a token using one of:');
|
|
231
|
+
console.error(' --token <key>');
|
|
232
|
+
console.error(' SKRYPT_API_KEY environment variable');
|
|
233
|
+
console.error(' skrypt login');
|
|
234
|
+
console.error('');
|
|
235
|
+
process.exit(1);
|
|
236
|
+
}
|
|
237
|
+
// Get project slug
|
|
238
|
+
const projectSlug = getProjectSlug(options, docsPath);
|
|
239
|
+
if (!projectSlug) {
|
|
240
|
+
console.error('Error: No project slug found.');
|
|
241
|
+
console.error('');
|
|
242
|
+
console.error(' Provide a project slug using one of:');
|
|
243
|
+
console.error(' --project <slug>');
|
|
244
|
+
console.error(' "slug" field in docs.json');
|
|
245
|
+
console.error('');
|
|
246
|
+
process.exit(1);
|
|
247
|
+
}
|
|
248
|
+
console.log(` project: ${projectSlug}`);
|
|
249
|
+
console.log('');
|
|
250
|
+
// Step 1: Build the site
|
|
251
|
+
console.log('Step 1: Building documentation site...');
|
|
252
|
+
let outDir;
|
|
253
|
+
try {
|
|
254
|
+
outDir = await buildSite(docsPath);
|
|
255
|
+
}
|
|
256
|
+
catch (err) {
|
|
257
|
+
console.error('');
|
|
258
|
+
console.error(`Error: Build failed`);
|
|
259
|
+
if (err instanceof Error) {
|
|
260
|
+
console.error(` ${err.message}`);
|
|
261
|
+
}
|
|
262
|
+
process.exit(1);
|
|
263
|
+
}
|
|
264
|
+
// Check build output exists
|
|
265
|
+
if (!existsSync(outDir)) {
|
|
266
|
+
// Try .next/static for non-static export
|
|
267
|
+
const nextDir = join(docsPath, '.next');
|
|
268
|
+
if (existsSync(nextDir)) {
|
|
269
|
+
console.error('');
|
|
270
|
+
console.error('Error: Static export not found.');
|
|
271
|
+
console.error(' Add to next.config.mjs: output: "export"');
|
|
272
|
+
process.exit(1);
|
|
273
|
+
}
|
|
274
|
+
console.error('');
|
|
275
|
+
console.error('Error: Build output not found.');
|
|
276
|
+
process.exit(1);
|
|
277
|
+
}
|
|
278
|
+
console.log(' Build completed successfully');
|
|
279
|
+
console.log('');
|
|
280
|
+
// Step 2: Bundle the output
|
|
281
|
+
console.log('Step 2: Creating deployment bundle...');
|
|
282
|
+
let bundlePath;
|
|
283
|
+
try {
|
|
284
|
+
bundlePath = await bundleOutput(outDir);
|
|
285
|
+
}
|
|
286
|
+
catch (err) {
|
|
287
|
+
console.error('');
|
|
288
|
+
console.error('Error: Failed to create bundle');
|
|
289
|
+
if (err instanceof Error) {
|
|
290
|
+
console.error(` ${err.message}`);
|
|
291
|
+
}
|
|
292
|
+
process.exit(1);
|
|
293
|
+
}
|
|
294
|
+
console.log('');
|
|
295
|
+
// Step 3: Upload
|
|
296
|
+
console.log('Step 3: Deploying to Skrypt...');
|
|
297
|
+
const result = await uploadBundle(bundlePath, projectSlug, apiToken);
|
|
298
|
+
if (!result.success) {
|
|
299
|
+
console.error('');
|
|
300
|
+
console.error(`Error: Deployment failed`);
|
|
301
|
+
console.error(` ${result.error}`);
|
|
302
|
+
process.exit(1);
|
|
303
|
+
}
|
|
304
|
+
// Success
|
|
305
|
+
const duration = Math.round((Date.now() - startTime) / 1000);
|
|
306
|
+
console.log('');
|
|
307
|
+
console.log('=== Deployment Successful ===');
|
|
308
|
+
console.log('');
|
|
309
|
+
console.log(` URL: ${result.url}`);
|
|
310
|
+
if (result.customDomain) {
|
|
311
|
+
console.log(` Custom domain: ${result.customDomain}`);
|
|
312
|
+
}
|
|
313
|
+
console.log(` Duration: ${duration}s`);
|
|
314
|
+
console.log('');
|
|
315
|
+
console.log('Your documentation is now live!');
|
|
316
|
+
console.log('');
|
|
317
|
+
});
|
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
import { Command } from 'commander';
|
|
2
|
+
import { existsSync, readFileSync, readdirSync, statSync } from 'fs';
|
|
3
|
+
import { resolve, join, extname, relative } from 'path';
|
|
4
|
+
import { spawn } from 'child_process';
|
|
5
|
+
import { writeFileSync, unlinkSync, mkdirSync } from 'fs';
|
|
6
|
+
import { tmpdir } from 'os';
|
|
7
|
+
import { randomUUID } from 'crypto';
|
|
8
|
+
import { requirePro } from '../auth/index.js';
|
|
9
|
+
const SUPPORTED_LANGUAGES = ['typescript', 'ts', 'javascript', 'js', 'python', 'py'];
|
|
10
|
+
/**
|
|
11
|
+
* Find all MDX/MD files in a directory recursively
|
|
12
|
+
*/
|
|
13
|
+
function findDocFiles(dir) {
|
|
14
|
+
const files = [];
|
|
15
|
+
function walk(currentDir) {
|
|
16
|
+
const entries = readdirSync(currentDir);
|
|
17
|
+
for (const entry of entries) {
|
|
18
|
+
const fullPath = join(currentDir, entry);
|
|
19
|
+
const stat = statSync(fullPath);
|
|
20
|
+
if (stat.isDirectory() && !entry.startsWith('.') && entry !== 'node_modules') {
|
|
21
|
+
walk(fullPath);
|
|
22
|
+
}
|
|
23
|
+
else if (stat.isFile() && (extname(entry) === '.mdx' || extname(entry) === '.md')) {
|
|
24
|
+
files.push(fullPath);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
walk(dir);
|
|
29
|
+
return files;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Extract code blocks from a markdown/MDX file
|
|
33
|
+
*/
|
|
34
|
+
function extractCodeBlocks(filePath, languageFilter) {
|
|
35
|
+
const content = readFileSync(filePath, 'utf-8');
|
|
36
|
+
const blocks = [];
|
|
37
|
+
const codeBlockRegex = /```(\w+)?\n([\s\S]*?)```/g;
|
|
38
|
+
let match;
|
|
39
|
+
while ((match = codeBlockRegex.exec(content)) !== null) {
|
|
40
|
+
const language = match[1] || '';
|
|
41
|
+
const code = match[2] || '';
|
|
42
|
+
// Skip if language filter is set and doesn't match
|
|
43
|
+
if (languageFilter && !language.toLowerCase().startsWith(languageFilter.toLowerCase())) {
|
|
44
|
+
continue;
|
|
45
|
+
}
|
|
46
|
+
// Skip unsupported languages
|
|
47
|
+
if (!SUPPORTED_LANGUAGES.includes(language.toLowerCase())) {
|
|
48
|
+
continue;
|
|
49
|
+
}
|
|
50
|
+
// Calculate line number
|
|
51
|
+
const beforeMatch = content.substring(0, match.index);
|
|
52
|
+
const lineNumber = beforeMatch.split('\n').length;
|
|
53
|
+
blocks.push({
|
|
54
|
+
code: code.trim(),
|
|
55
|
+
language: language.toLowerCase(),
|
|
56
|
+
file: filePath,
|
|
57
|
+
line: lineNumber,
|
|
58
|
+
index: blocks.length,
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
return blocks;
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* Run a code block in an isolated environment with timeout
|
|
65
|
+
*/
|
|
66
|
+
async function runCodeBlock(block, timeoutMs) {
|
|
67
|
+
const startTime = Date.now();
|
|
68
|
+
const tempDir = join(tmpdir(), `skrypt-test-${randomUUID()}`);
|
|
69
|
+
mkdirSync(tempDir, { recursive: true });
|
|
70
|
+
try {
|
|
71
|
+
const isTypeScript = ['typescript', 'ts'].includes(block.language);
|
|
72
|
+
const isPython = ['python', 'py'].includes(block.language);
|
|
73
|
+
const isJavaScript = ['javascript', 'js'].includes(block.language);
|
|
74
|
+
let tempFile;
|
|
75
|
+
let command;
|
|
76
|
+
let args;
|
|
77
|
+
if (isTypeScript) {
|
|
78
|
+
tempFile = join(tempDir, 'test.ts');
|
|
79
|
+
writeFileSync(tempFile, block.code);
|
|
80
|
+
// Use tsx for TypeScript execution
|
|
81
|
+
command = 'npx';
|
|
82
|
+
args = ['tsx', tempFile];
|
|
83
|
+
}
|
|
84
|
+
else if (isJavaScript) {
|
|
85
|
+
tempFile = join(tempDir, 'test.js');
|
|
86
|
+
writeFileSync(tempFile, block.code);
|
|
87
|
+
command = 'node';
|
|
88
|
+
args = [tempFile];
|
|
89
|
+
}
|
|
90
|
+
else if (isPython) {
|
|
91
|
+
tempFile = join(tempDir, 'test.py');
|
|
92
|
+
writeFileSync(tempFile, block.code);
|
|
93
|
+
command = 'python3';
|
|
94
|
+
args = [tempFile];
|
|
95
|
+
}
|
|
96
|
+
else {
|
|
97
|
+
return {
|
|
98
|
+
block,
|
|
99
|
+
passed: false,
|
|
100
|
+
error: `Unsupported language: ${block.language}`,
|
|
101
|
+
duration: Date.now() - startTime,
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
const result = await executeWithTimeout(command, args, timeoutMs, tempDir);
|
|
105
|
+
const duration = Date.now() - startTime;
|
|
106
|
+
return {
|
|
107
|
+
block,
|
|
108
|
+
passed: result.exitCode === 0,
|
|
109
|
+
output: result.stdout,
|
|
110
|
+
error: result.stderr || (result.exitCode !== 0 ? `Exit code: ${result.exitCode}` : undefined),
|
|
111
|
+
duration,
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
catch (err) {
|
|
115
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
116
|
+
return {
|
|
117
|
+
block,
|
|
118
|
+
passed: false,
|
|
119
|
+
error: message,
|
|
120
|
+
duration: Date.now() - startTime,
|
|
121
|
+
};
|
|
122
|
+
}
|
|
123
|
+
finally {
|
|
124
|
+
// Cleanup temp directory
|
|
125
|
+
try {
|
|
126
|
+
const files = readdirSync(tempDir);
|
|
127
|
+
for (const file of files) {
|
|
128
|
+
unlinkSync(join(tempDir, file));
|
|
129
|
+
}
|
|
130
|
+
readdirSync(tempDir); // Verify empty
|
|
131
|
+
// Remove the directory
|
|
132
|
+
const { rmdirSync } = await import('fs');
|
|
133
|
+
rmdirSync(tempDir);
|
|
134
|
+
}
|
|
135
|
+
catch {
|
|
136
|
+
// Ignore cleanup errors
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
/**
|
|
141
|
+
* Execute a command with timeout
|
|
142
|
+
*/
|
|
143
|
+
function executeWithTimeout(command, args, timeoutMs, cwd) {
|
|
144
|
+
return new Promise((resolve) => {
|
|
145
|
+
const proc = spawn(command, args, {
|
|
146
|
+
cwd,
|
|
147
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
148
|
+
env: { ...process.env, NODE_NO_WARNINGS: '1' },
|
|
149
|
+
});
|
|
150
|
+
let stdout = '';
|
|
151
|
+
let stderr = '';
|
|
152
|
+
let killed = false;
|
|
153
|
+
const timeout = setTimeout(() => {
|
|
154
|
+
killed = true;
|
|
155
|
+
proc.kill('SIGKILL');
|
|
156
|
+
}, timeoutMs);
|
|
157
|
+
proc.stdout?.on('data', (data) => {
|
|
158
|
+
stdout += data.toString();
|
|
159
|
+
});
|
|
160
|
+
proc.stderr?.on('data', (data) => {
|
|
161
|
+
stderr += data.toString();
|
|
162
|
+
});
|
|
163
|
+
proc.on('close', (code) => {
|
|
164
|
+
clearTimeout(timeout);
|
|
165
|
+
if (killed) {
|
|
166
|
+
resolve({
|
|
167
|
+
exitCode: 1,
|
|
168
|
+
stdout,
|
|
169
|
+
stderr: `Timeout: code execution exceeded ${timeoutMs}ms`,
|
|
170
|
+
});
|
|
171
|
+
}
|
|
172
|
+
else {
|
|
173
|
+
resolve({
|
|
174
|
+
exitCode: code ?? 1,
|
|
175
|
+
stdout,
|
|
176
|
+
stderr,
|
|
177
|
+
});
|
|
178
|
+
}
|
|
179
|
+
});
|
|
180
|
+
proc.on('error', (err) => {
|
|
181
|
+
clearTimeout(timeout);
|
|
182
|
+
resolve({
|
|
183
|
+
exitCode: 1,
|
|
184
|
+
stdout,
|
|
185
|
+
stderr: err.message,
|
|
186
|
+
});
|
|
187
|
+
});
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
/**
|
|
191
|
+
* Format duration in human-readable format
|
|
192
|
+
*/
|
|
193
|
+
function formatDuration(ms) {
|
|
194
|
+
if (ms < 1000)
|
|
195
|
+
return `${ms}ms`;
|
|
196
|
+
return `${(ms / 1000).toFixed(2)}s`;
|
|
197
|
+
}
|
|
198
|
+
/**
|
|
199
|
+
* Print test result
|
|
200
|
+
*/
|
|
201
|
+
function printResult(result, basePath, verbose) {
|
|
202
|
+
const relPath = relative(basePath, result.block.file);
|
|
203
|
+
const location = `${relPath}:${result.block.line}`;
|
|
204
|
+
const lang = result.block.language;
|
|
205
|
+
if (result.passed) {
|
|
206
|
+
console.log(` \x1b[32m✓\x1b[0m ${location} [${lang}] (${formatDuration(result.duration)})`);
|
|
207
|
+
if (verbose && result.output) {
|
|
208
|
+
console.log(` \x1b[90mOutput: ${result.output.trim().slice(0, 100)}${result.output.length > 100 ? '...' : ''}\x1b[0m`);
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
else {
|
|
212
|
+
console.log(` \x1b[31m✗\x1b[0m ${location} [${lang}] (${formatDuration(result.duration)})`);
|
|
213
|
+
if (result.error) {
|
|
214
|
+
const errorLines = result.error.trim().split('\n').slice(0, 3);
|
|
215
|
+
for (const line of errorLines) {
|
|
216
|
+
console.log(` \x1b[31m${line}\x1b[0m`);
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
/**
|
|
222
|
+
* Print summary
|
|
223
|
+
*/
|
|
224
|
+
function printSummary(summary) {
|
|
225
|
+
console.log('');
|
|
226
|
+
console.log('=== Test Summary ===');
|
|
227
|
+
console.log(` Total: ${summary.total}`);
|
|
228
|
+
console.log(` \x1b[32mPassed:\x1b[0m ${summary.passed}`);
|
|
229
|
+
console.log(` \x1b[31mFailed:\x1b[0m ${summary.failed}`);
|
|
230
|
+
if (summary.skipped > 0) {
|
|
231
|
+
console.log(` \x1b[33mSkipped:\x1b[0m ${summary.skipped}`);
|
|
232
|
+
}
|
|
233
|
+
console.log(` Duration: ${formatDuration(summary.duration)}`);
|
|
234
|
+
}
|
|
235
|
+
export const testCommand = new Command('test')
|
|
236
|
+
.description('Test code examples in documentation files')
|
|
237
|
+
.argument('[path]', 'Directory or file to test', '.')
|
|
238
|
+
.option('-f, --file <file>', 'Test a specific file or directory')
|
|
239
|
+
.option('-l, --language <lang>', 'Filter by language (typescript, javascript, python)')
|
|
240
|
+
.option('--fix', 'Auto-fix failing examples using autofix command')
|
|
241
|
+
.option('-t, --timeout <ms>', 'Timeout per code block in milliseconds', '10000')
|
|
242
|
+
.option('-v, --verbose', 'Show detailed output')
|
|
243
|
+
.action(async (path, options) => {
|
|
244
|
+
// Pro feature - requires subscription
|
|
245
|
+
if (!await requirePro('test')) {
|
|
246
|
+
process.exit(1);
|
|
247
|
+
}
|
|
248
|
+
const targetPath = resolve(options.file || path);
|
|
249
|
+
if (!existsSync(targetPath)) {
|
|
250
|
+
console.error(`Error: Path not found: ${targetPath}`);
|
|
251
|
+
process.exit(1);
|
|
252
|
+
}
|
|
253
|
+
const timeoutMs = parseInt(options.timeout);
|
|
254
|
+
if (isNaN(timeoutMs) || timeoutMs <= 0) {
|
|
255
|
+
console.error(`Error: Invalid timeout value: ${options.timeout}`);
|
|
256
|
+
process.exit(1);
|
|
257
|
+
}
|
|
258
|
+
console.log('skrypt test');
|
|
259
|
+
console.log(` path: ${targetPath}`);
|
|
260
|
+
if (options.language) {
|
|
261
|
+
console.log(` language: ${options.language}`);
|
|
262
|
+
}
|
|
263
|
+
console.log(` timeout: ${timeoutMs}ms`);
|
|
264
|
+
console.log('');
|
|
265
|
+
// Find all doc files
|
|
266
|
+
const files = statSync(targetPath).isDirectory()
|
|
267
|
+
? findDocFiles(targetPath)
|
|
268
|
+
: [targetPath];
|
|
269
|
+
if (files.length === 0) {
|
|
270
|
+
console.log('No .md or .mdx files found.');
|
|
271
|
+
process.exit(0);
|
|
272
|
+
}
|
|
273
|
+
// Extract all code blocks
|
|
274
|
+
const allBlocks = [];
|
|
275
|
+
for (const file of files) {
|
|
276
|
+
const blocks = extractCodeBlocks(file, options.language);
|
|
277
|
+
allBlocks.push(...blocks);
|
|
278
|
+
}
|
|
279
|
+
if (allBlocks.length === 0) {
|
|
280
|
+
console.log('No testable code blocks found.');
|
|
281
|
+
if (options.language) {
|
|
282
|
+
console.log(` (filtered by language: ${options.language})`);
|
|
283
|
+
}
|
|
284
|
+
console.log(` Supported languages: ${SUPPORTED_LANGUAGES.join(', ')}`);
|
|
285
|
+
process.exit(0);
|
|
286
|
+
}
|
|
287
|
+
console.log(`Found ${allBlocks.length} code block(s) in ${files.length} file(s)`);
|
|
288
|
+
console.log('');
|
|
289
|
+
console.log('Running tests...\n');
|
|
290
|
+
const results = [];
|
|
291
|
+
const failedBlocks = [];
|
|
292
|
+
const startTime = Date.now();
|
|
293
|
+
for (let i = 0; i < allBlocks.length; i++) {
|
|
294
|
+
const block = allBlocks[i];
|
|
295
|
+
if (!block)
|
|
296
|
+
continue;
|
|
297
|
+
const result = await runCodeBlock(block, timeoutMs);
|
|
298
|
+
results.push(result);
|
|
299
|
+
printResult(result, targetPath, options.verbose || false);
|
|
300
|
+
if (!result.passed) {
|
|
301
|
+
failedBlocks.push(block);
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
const totalDuration = Date.now() - startTime;
|
|
305
|
+
const summary = {
|
|
306
|
+
total: allBlocks.length,
|
|
307
|
+
passed: results.filter(r => r.passed).length,
|
|
308
|
+
failed: results.filter(r => !r.passed).length,
|
|
309
|
+
skipped: 0,
|
|
310
|
+
duration: totalDuration,
|
|
311
|
+
};
|
|
312
|
+
printSummary(summary);
|
|
313
|
+
// Handle --fix flag
|
|
314
|
+
if (options.fix && failedBlocks.length > 0) {
|
|
315
|
+
console.log('');
|
|
316
|
+
console.log('Attempting to auto-fix failing examples...');
|
|
317
|
+
console.log(' Run: skrypt autofix <file> for each failing file');
|
|
318
|
+
console.log('');
|
|
319
|
+
// Get unique files with failures
|
|
320
|
+
const failedFiles = [...new Set(failedBlocks.map(b => b.file))];
|
|
321
|
+
for (const file of failedFiles) {
|
|
322
|
+
console.log(` → ${relative(targetPath, file)}`);
|
|
323
|
+
}
|
|
324
|
+
console.log('');
|
|
325
|
+
console.log('To auto-fix, run:');
|
|
326
|
+
for (const file of failedFiles) {
|
|
327
|
+
console.log(` skrypt autofix "${file}"`);
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
// Exit with error code if tests failed
|
|
331
|
+
if (summary.failed > 0) {
|
|
332
|
+
process.exit(1);
|
|
333
|
+
}
|
|
334
|
+
console.log('\nAll tests passed!');
|
|
335
|
+
});
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
name: Skrypt Auto-Update Docs
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
# Run daily at 2am UTC
|
|
5
|
+
schedule:
|
|
6
|
+
- cron: '0 2 * * *'
|
|
7
|
+
|
|
8
|
+
# Run on push to main (optional)
|
|
9
|
+
push:
|
|
10
|
+
branches: [main]
|
|
11
|
+
paths:
|
|
12
|
+
- 'src/**'
|
|
13
|
+
- 'lib/**'
|
|
14
|
+
- 'app/**'
|
|
15
|
+
|
|
16
|
+
# Allow manual trigger
|
|
17
|
+
workflow_dispatch:
|
|
18
|
+
|
|
19
|
+
jobs:
|
|
20
|
+
update-docs:
|
|
21
|
+
runs-on: ubuntu-latest
|
|
22
|
+
|
|
23
|
+
steps:
|
|
24
|
+
- uses: actions/checkout@v4
|
|
25
|
+
with:
|
|
26
|
+
fetch-depth: 0
|
|
27
|
+
|
|
28
|
+
- uses: actions/setup-node@v4
|
|
29
|
+
with:
|
|
30
|
+
node-version: '20'
|
|
31
|
+
|
|
32
|
+
- name: Install Skrypt
|
|
33
|
+
run: npm install -g skrypt-ai
|
|
34
|
+
|
|
35
|
+
- name: Generate docs
|
|
36
|
+
env:
|
|
37
|
+
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
|
38
|
+
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
|
39
|
+
run: |
|
|
40
|
+
skrypt generate ./src --output ./docs
|
|
41
|
+
|
|
42
|
+
- name: Build docs site
|
|
43
|
+
run: |
|
|
44
|
+
cd docs
|
|
45
|
+
npm install
|
|
46
|
+
npm run build
|
|
47
|
+
|
|
48
|
+
- name: Deploy to Skrypt
|
|
49
|
+
env:
|
|
50
|
+
SKRYPT_API_KEY: ${{ secrets.SKRYPT_API_KEY }}
|
|
51
|
+
run: |
|
|
52
|
+
skrypt deploy ./docs --project ${{ github.event.repository.name }}
|
|
53
|
+
|
|
54
|
+
- name: Commit changes (optional)
|
|
55
|
+
run: |
|
|
56
|
+
git config --local user.email "action@github.com"
|
|
57
|
+
git config --local user.name "GitHub Action"
|
|
58
|
+
git add docs/
|
|
59
|
+
git diff --staged --quiet || git commit -m "docs: auto-update documentation [skip ci]"
|
|
60
|
+
git push
|