fogtrail-cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +61 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +20 -0
- package/dist/push.d.ts +9 -0
- package/dist/push.js +157 -0
- package/dist/utils.d.ts +21 -0
- package/dist/utils.js +113 -0
- package/package.json +41 -0
package/README.md
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
# fogtrail-cli
|
|
2
|
+
|
|
3
|
+
CLI tool to push blog content to your [Fogtrail](https://fogtrail.io) instance for AEO (Answer Engine Optimization).
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
# Use directly with npx (no install needed)
|
|
9
|
+
npx fogtrail-cli push --token <TOKEN> --dir ./blogs
|
|
10
|
+
|
|
11
|
+
# Or install globally
|
|
12
|
+
npm install -g fogtrail-cli
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
## Usage
|
|
16
|
+
|
|
17
|
+
### Push blog content
|
|
18
|
+
|
|
19
|
+
Push all markdown and HTML files from a directory to Fogtrail:
|
|
20
|
+
|
|
21
|
+
```bash
|
|
22
|
+
fogtrail-cli push --token <TOKEN> --dir ./blogs --url https://your-fogtrail-instance.com
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
### Options
|
|
26
|
+
|
|
27
|
+
| Option | Required | Default | Description |
|
|
28
|
+
|--------|----------|---------|-------------|
|
|
29
|
+
| `--token <token>` | Yes | - | Ingest session token from Fogtrail UI |
|
|
30
|
+
| `--dir <directory>` | Yes | - | Directory containing blog files (.md, .html) |
|
|
31
|
+
| `--url <url>` | No | `http://localhost:3000` | Fogtrail instance URL |
|
|
32
|
+
| `--batch-size <size>` | No | `10` | Number of files per upload batch |
|
|
33
|
+
| `--no-recursive` | No | - | Do not scan subdirectories |
|
|
34
|
+
|
|
35
|
+
### How it works
|
|
36
|
+
|
|
37
|
+
1. Open your Fogtrail dashboard and click **Add Content** > **Import from Website**
|
|
38
|
+
2. Copy the command shown in the dialog (includes your unique token)
|
|
39
|
+
3. Run the command in your terminal from your project directory
|
|
40
|
+
4. The CLI scans for `.md` and `.html` files, uploads them to Fogtrail
|
|
41
|
+
5. HTML files are automatically converted to Markdown
|
|
42
|
+
6. Files without YAML frontmatter get auto-generated metadata
|
|
43
|
+
|
|
44
|
+
### Supported file formats
|
|
45
|
+
|
|
46
|
+
- **Markdown** (`.md`) - uploaded as-is with frontmatter validation
|
|
47
|
+
- **HTML** (`.html`, `.htm`) - converted to Markdown before upload
|
|
48
|
+
|
|
49
|
+
### Example
|
|
50
|
+
|
|
51
|
+
```bash
|
|
52
|
+
# Push all blog posts from ./content/blog
|
|
53
|
+
npx fogtrail-cli push \
|
|
54
|
+
--token abc123def456... \
|
|
55
|
+
--dir ./content/blog \
|
|
56
|
+
--url https://app.fogtrail.io
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
## License
|
|
60
|
+
|
|
61
|
+
MIT
|
package/dist/index.d.ts
ADDED
package/dist/index.js
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
"use strict";
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
const commander_1 = require("commander");
|
|
5
|
+
const push_1 = require("./push");
|
|
6
|
+
const program = new commander_1.Command();
|
|
7
|
+
program
|
|
8
|
+
.name('fogtrail-cli')
|
|
9
|
+
.description('Push blog content to your Fogtrail instance')
|
|
10
|
+
.version('0.1.0');
|
|
11
|
+
program
|
|
12
|
+
.command('push')
|
|
13
|
+
.description('Push markdown/HTML files to Fogtrail')
|
|
14
|
+
.requiredOption('--token <token>', 'Ingest session token (from Fogtrail UI)')
|
|
15
|
+
.requiredOption('--dir <directory>', 'Directory containing blog files (.md, .html)')
|
|
16
|
+
.option('--url <url>', 'Fogtrail instance URL', 'http://localhost:3000')
|
|
17
|
+
.option('--batch-size <size>', 'Files per request batch', '10')
|
|
18
|
+
.option('--no-recursive', 'Do not scan subdirectories')
|
|
19
|
+
.action(push_1.pushCommand);
|
|
20
|
+
program.parse();
|
package/dist/push.d.ts
ADDED
package/dist/push.js
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.pushCommand = pushCommand;
|
|
37
|
+
const fs = __importStar(require("fs"));
|
|
38
|
+
const path = __importStar(require("path"));
|
|
39
|
+
const utils_1 = require("./utils");
|
|
40
|
+
async function pushCommand(options) {
|
|
41
|
+
const { token, dir, url, batchSize: batchSizeStr } = options;
|
|
42
|
+
const recursive = options.recursive !== false;
|
|
43
|
+
const batchSize = parseInt(batchSizeStr, 10) || 10;
|
|
44
|
+
// Validate directory
|
|
45
|
+
const resolvedDir = path.resolve(dir);
|
|
46
|
+
if (!fs.existsSync(resolvedDir)) {
|
|
47
|
+
console.error(`Error: Directory not found: ${resolvedDir}`);
|
|
48
|
+
process.exit(1);
|
|
49
|
+
}
|
|
50
|
+
if (!fs.statSync(resolvedDir).isDirectory()) {
|
|
51
|
+
console.error(`Error: Not a directory: ${resolvedDir}`);
|
|
52
|
+
process.exit(1);
|
|
53
|
+
}
|
|
54
|
+
console.log(`Scanning ${resolvedDir} for blog files...`);
|
|
55
|
+
// Discover files
|
|
56
|
+
const files = await (0, utils_1.discoverFiles)(resolvedDir, recursive);
|
|
57
|
+
if (files.length === 0) {
|
|
58
|
+
console.log('No .md or .html files found.');
|
|
59
|
+
process.exit(0);
|
|
60
|
+
}
|
|
61
|
+
console.log(`Found ${files.length} file${files.length !== 1 ? 's' : ''}`);
|
|
62
|
+
// Read all files
|
|
63
|
+
const entries = [];
|
|
64
|
+
let readErrors = 0;
|
|
65
|
+
for (const filepath of files) {
|
|
66
|
+
try {
|
|
67
|
+
const entry = (0, utils_1.readFileContent)(filepath);
|
|
68
|
+
entries.push(entry);
|
|
69
|
+
}
|
|
70
|
+
catch (error) {
|
|
71
|
+
const err = error;
|
|
72
|
+
console.error(` Warning: Failed to read ${path.basename(filepath)}: ${err.message}`);
|
|
73
|
+
readErrors++;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
if (entries.length === 0) {
|
|
77
|
+
console.error('No files could be read.');
|
|
78
|
+
process.exit(1);
|
|
79
|
+
}
|
|
80
|
+
console.log(`Read ${entries.length} file${entries.length !== 1 ? 's' : ''}${readErrors > 0 ? ` (${readErrors} failed)` : ''}`);
|
|
81
|
+
console.log();
|
|
82
|
+
// Upload in batches
|
|
83
|
+
const totalBatches = Math.ceil(entries.length / batchSize);
|
|
84
|
+
let totalReceived = 0;
|
|
85
|
+
let totalErrors = [];
|
|
86
|
+
const apiUrl = `${url.replace(/\/$/, '')}/api/ingest/push`;
|
|
87
|
+
for (let i = 0; i < totalBatches; i++) {
|
|
88
|
+
const start = i * batchSize;
|
|
89
|
+
const end = Math.min(start + batchSize, entries.length);
|
|
90
|
+
const batch = entries.slice(start, end);
|
|
91
|
+
const isLastBatch = i === totalBatches - 1;
|
|
92
|
+
const progress = `[${end}/${entries.length}]`;
|
|
93
|
+
process.stdout.write(`${progress} Uploading batch ${i + 1} of ${totalBatches}...`);
|
|
94
|
+
try {
|
|
95
|
+
const response = await fetch(apiUrl, {
|
|
96
|
+
method: 'POST',
|
|
97
|
+
headers: {
|
|
98
|
+
'Authorization': `Bearer ${token}`,
|
|
99
|
+
'Content-Type': 'application/json',
|
|
100
|
+
},
|
|
101
|
+
body: JSON.stringify({
|
|
102
|
+
files: batch.map((entry) => ({
|
|
103
|
+
filename: entry.filename,
|
|
104
|
+
content: entry.content,
|
|
105
|
+
sourceUrl: entry.sourceUrl,
|
|
106
|
+
})),
|
|
107
|
+
done: isLastBatch,
|
|
108
|
+
}),
|
|
109
|
+
});
|
|
110
|
+
if (!response.ok) {
|
|
111
|
+
const data = await response.json().catch(() => ({ error: `HTTP ${response.status}` }));
|
|
112
|
+
if (response.status === 401) {
|
|
113
|
+
console.error(` failed\n\nError: Invalid token. Please check your token and try again.`);
|
|
114
|
+
process.exit(1);
|
|
115
|
+
}
|
|
116
|
+
if (response.status === 410) {
|
|
117
|
+
console.error(` failed\n\nError: Session expired. Please create a new import session from the Fogtrail UI.`);
|
|
118
|
+
process.exit(1);
|
|
119
|
+
}
|
|
120
|
+
console.error(` failed\n\nError: ${data.error || `HTTP ${response.status}`}`);
|
|
121
|
+
process.exit(1);
|
|
122
|
+
}
|
|
123
|
+
const data = await response.json();
|
|
124
|
+
if (!data.success) {
|
|
125
|
+
console.error(` failed: ${data.error}`);
|
|
126
|
+
process.exit(1);
|
|
127
|
+
}
|
|
128
|
+
totalReceived += data.received || 0;
|
|
129
|
+
if (data.errors && data.errors.length > 0) {
|
|
130
|
+
totalErrors = [...totalErrors, ...data.errors];
|
|
131
|
+
}
|
|
132
|
+
console.log(' done');
|
|
133
|
+
}
|
|
134
|
+
catch (error) {
|
|
135
|
+
const err = error;
|
|
136
|
+
if (err.message.includes('ECONNREFUSED') || err.message.includes('fetch failed')) {
|
|
137
|
+
console.error(` failed\n\nError: Could not connect to ${url}. Make sure your Fogtrail instance is running.`);
|
|
138
|
+
}
|
|
139
|
+
else {
|
|
140
|
+
console.error(` failed\n\nError: ${err.message}`);
|
|
141
|
+
}
|
|
142
|
+
process.exit(1);
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
// Summary
|
|
146
|
+
console.log();
|
|
147
|
+
console.log(`Successfully pushed ${totalReceived} file${totalReceived !== 1 ? 's' : ''} to Fogtrail`);
|
|
148
|
+
if (totalErrors.length > 0) {
|
|
149
|
+
console.log(`${totalErrors.length} error${totalErrors.length !== 1 ? 's' : ''}:`);
|
|
150
|
+
for (const err of totalErrors.slice(0, 10)) {
|
|
151
|
+
console.log(` - ${err}`);
|
|
152
|
+
}
|
|
153
|
+
if (totalErrors.length > 10) {
|
|
154
|
+
console.log(` ...and ${totalErrors.length - 10} more`);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
package/dist/utils.d.ts
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
export interface FileEntry {
|
|
2
|
+
filename: string;
|
|
3
|
+
content: string;
|
|
4
|
+
sourceUrl: string | null;
|
|
5
|
+
}
|
|
6
|
+
/**
|
|
7
|
+
* Discover all .md and .html files in a directory
|
|
8
|
+
*/
|
|
9
|
+
export declare function discoverFiles(dir: string, recursive: boolean): Promise<string[]>;
|
|
10
|
+
/**
|
|
11
|
+
* Read a file and return its content, converting HTML to markdown if needed
|
|
12
|
+
*/
|
|
13
|
+
export declare function readFileContent(filepath: string): FileEntry;
|
|
14
|
+
/**
|
|
15
|
+
* Convert HTML to Markdown
|
|
16
|
+
*/
|
|
17
|
+
export declare function htmlToMarkdown(html: string): string;
|
|
18
|
+
/**
|
|
19
|
+
* Ensure content has YAML frontmatter. If not, generate minimal frontmatter.
|
|
20
|
+
*/
|
|
21
|
+
export declare function ensureFrontmatter(content: string, filename: string): string;
|
package/dist/utils.js
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.discoverFiles = discoverFiles;
|
|
40
|
+
exports.readFileContent = readFileContent;
|
|
41
|
+
exports.htmlToMarkdown = htmlToMarkdown;
|
|
42
|
+
exports.ensureFrontmatter = ensureFrontmatter;
|
|
43
|
+
const fs = __importStar(require("fs"));
|
|
44
|
+
const path = __importStar(require("path"));
|
|
45
|
+
const glob_1 = require("glob");
|
|
46
|
+
const gray_matter_1 = __importDefault(require("gray-matter"));
|
|
47
|
+
const turndown_1 = __importDefault(require("turndown"));
|
|
48
|
+
const turndown = new turndown_1.default({
|
|
49
|
+
headingStyle: 'atx',
|
|
50
|
+
codeBlockStyle: 'fenced',
|
|
51
|
+
});
|
|
52
|
+
/**
|
|
53
|
+
* Discover all .md and .html files in a directory
|
|
54
|
+
*/
|
|
55
|
+
async function discoverFiles(dir, recursive) {
|
|
56
|
+
const pattern = recursive ? '**/*.{md,html,htm}' : '*.{md,html,htm}';
|
|
57
|
+
const files = await (0, glob_1.glob)(pattern, {
|
|
58
|
+
cwd: dir,
|
|
59
|
+
nodir: true,
|
|
60
|
+
absolute: true,
|
|
61
|
+
});
|
|
62
|
+
return files.sort();
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Read a file and return its content, converting HTML to markdown if needed
|
|
66
|
+
*/
|
|
67
|
+
function readFileContent(filepath) {
|
|
68
|
+
const content = fs.readFileSync(filepath, 'utf-8');
|
|
69
|
+
const filename = path.basename(filepath);
|
|
70
|
+
const ext = path.extname(filepath).toLowerCase();
|
|
71
|
+
let finalContent;
|
|
72
|
+
if (ext === '.html' || ext === '.htm') {
|
|
73
|
+
finalContent = htmlToMarkdown(content);
|
|
74
|
+
}
|
|
75
|
+
else {
|
|
76
|
+
finalContent = content;
|
|
77
|
+
}
|
|
78
|
+
// Ensure frontmatter exists
|
|
79
|
+
finalContent = ensureFrontmatter(finalContent, filename);
|
|
80
|
+
return {
|
|
81
|
+
filename,
|
|
82
|
+
content: finalContent,
|
|
83
|
+
sourceUrl: null,
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Convert HTML to Markdown
|
|
88
|
+
*/
|
|
89
|
+
function htmlToMarkdown(html) {
|
|
90
|
+
return turndown.turndown(html);
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Ensure content has YAML frontmatter. If not, generate minimal frontmatter.
|
|
94
|
+
*/
|
|
95
|
+
function ensureFrontmatter(content, filename) {
|
|
96
|
+
try {
|
|
97
|
+
const parsed = (0, gray_matter_1.default)(content);
|
|
98
|
+
if (parsed.data && Object.keys(parsed.data).length > 0) {
|
|
99
|
+
return content; // Already has frontmatter
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
catch {
|
|
103
|
+
// Not valid frontmatter, continue to add it
|
|
104
|
+
}
|
|
105
|
+
// Extract title from first heading or filename
|
|
106
|
+
const headingMatch = content.match(/^#\s+(.+)$/m);
|
|
107
|
+
const title = headingMatch
|
|
108
|
+
? headingMatch[1].trim()
|
|
109
|
+
: filename.replace(/\.(md|html|htm)$/i, '').replace(/[-_]/g, ' ');
|
|
110
|
+
const escapedTitle = title.replace(/"/g, '\\"');
|
|
111
|
+
const frontmatter = `---\ntitle: "${escapedTitle}"\ntopics: []\n---\n\n`;
|
|
112
|
+
return frontmatter + content;
|
|
113
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "fogtrail-cli",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "CLI tool to push blog content to Fogtrail for AEO optimization",
|
|
5
|
+
"bin": {
|
|
6
|
+
"fogtrail-cli": "./dist/index.js"
|
|
7
|
+
},
|
|
8
|
+
"main": "./dist/index.js",
|
|
9
|
+
"scripts": {
|
|
10
|
+
"build": "tsc",
|
|
11
|
+
"dev": "tsc --watch",
|
|
12
|
+
"prepublishOnly": "npm run build"
|
|
13
|
+
},
|
|
14
|
+
"keywords": [
|
|
15
|
+
"fogtrail",
|
|
16
|
+
"aeo",
|
|
17
|
+
"seo",
|
|
18
|
+
"blog",
|
|
19
|
+
"content",
|
|
20
|
+
"cli"
|
|
21
|
+
],
|
|
22
|
+
"license": "MIT",
|
|
23
|
+
"files": [
|
|
24
|
+
"dist",
|
|
25
|
+
"README.md"
|
|
26
|
+
],
|
|
27
|
+
"engines": {
|
|
28
|
+
"node": ">=18"
|
|
29
|
+
},
|
|
30
|
+
"dependencies": {
|
|
31
|
+
"commander": "^12.0.0",
|
|
32
|
+
"glob": "^11.0.0",
|
|
33
|
+
"gray-matter": "^4.0.3",
|
|
34
|
+
"turndown": "^7.2.0"
|
|
35
|
+
},
|
|
36
|
+
"devDependencies": {
|
|
37
|
+
"@types/node": "^20",
|
|
38
|
+
"@types/turndown": "^5.0.5",
|
|
39
|
+
"typescript": "^5"
|
|
40
|
+
}
|
|
41
|
+
}
|