firecrawl-cli 1.9.4 → 1.9.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +129 -0
- package/dist/commands/browser.d.ts.map +1 -1
- package/dist/commands/browser.js +3 -2
- package/dist/commands/browser.js.map +1 -1
- package/dist/commands/experimental/backends.d.ts +19 -0
- package/dist/commands/experimental/backends.d.ts.map +1 -0
- package/dist/commands/experimental/backends.js +74 -0
- package/dist/commands/experimental/backends.js.map +1 -0
- package/dist/commands/experimental/index.d.ts +13 -0
- package/dist/commands/experimental/index.d.ts.map +1 -0
- package/dist/commands/experimental/index.js +227 -0
- package/dist/commands/experimental/index.js.map +1 -0
- package/dist/commands/experimental/shared.d.ts +17 -0
- package/dist/commands/experimental/shared.d.ts.map +1 -0
- package/dist/commands/experimental/shared.js +154 -0
- package/dist/commands/experimental/shared.js.map +1 -0
- package/dist/commands/experimental/workflows/competitor-analysis.d.ts +10 -0
- package/dist/commands/experimental/workflows/competitor-analysis.d.ts.map +1 -0
- package/dist/commands/experimental/workflows/competitor-analysis.js +196 -0
- package/dist/commands/experimental/workflows/competitor-analysis.js.map +1 -0
- package/dist/commands/experimental/workflows/deep-research.d.ts +11 -0
- package/dist/commands/experimental/workflows/deep-research.d.ts.map +1 -0
- package/dist/commands/experimental/workflows/deep-research.js +159 -0
- package/dist/commands/experimental/workflows/deep-research.js.map +1 -0
- package/dist/commands/experimental/workflows/demo.d.ts +11 -0
- package/dist/commands/experimental/workflows/demo.d.ts.map +1 -0
- package/dist/commands/experimental/workflows/demo.js +190 -0
- package/dist/commands/experimental/workflows/demo.js.map +1 -0
- package/dist/commands/experimental/workflows/knowledge-base.d.ts +11 -0
- package/dist/commands/experimental/workflows/knowledge-base.d.ts.map +1 -0
- package/dist/commands/experimental/workflows/knowledge-base.js +319 -0
- package/dist/commands/experimental/workflows/knowledge-base.js.map +1 -0
- package/dist/commands/experimental/workflows/lead-research.d.ts +11 -0
- package/dist/commands/experimental/workflows/lead-research.d.ts.map +1 -0
- package/dist/commands/experimental/workflows/lead-research.js +146 -0
- package/dist/commands/experimental/workflows/lead-research.js.map +1 -0
- package/dist/commands/experimental/workflows/qa.d.ts +11 -0
- package/dist/commands/experimental/workflows/qa.d.ts.map +1 -0
- package/dist/commands/experimental/workflows/qa.js +184 -0
- package/dist/commands/experimental/workflows/qa.js.map +1 -0
- package/dist/commands/experimental/workflows/research-papers.d.ts +11 -0
- package/dist/commands/experimental/workflows/research-papers.d.ts.map +1 -0
- package/dist/commands/experimental/workflows/research-papers.js +151 -0
- package/dist/commands/experimental/workflows/research-papers.js.map +1 -0
- package/dist/commands/experimental/workflows/seo-audit.d.ts +11 -0
- package/dist/commands/experimental/workflows/seo-audit.d.ts.map +1 -0
- package/dist/commands/experimental/workflows/seo-audit.js +155 -0
- package/dist/commands/experimental/workflows/seo-audit.js.map +1 -0
- package/dist/commands/experimental/workflows/shop.d.ts +11 -0
- package/dist/commands/experimental/workflows/shop.d.ts.map +1 -0
- package/dist/commands/experimental/workflows/shop.js +158 -0
- package/dist/commands/experimental/workflows/shop.js.map +1 -0
- package/dist/commands/init.d.ts +11 -1
- package/dist/commands/init.d.ts.map +1 -1
- package/dist/commands/init.js +474 -36
- package/dist/commands/init.js.map +1 -1
- package/dist/commands/login.d.ts.map +1 -1
- package/dist/commands/login.js +18 -0
- package/dist/commands/login.js.map +1 -1
- package/dist/index.js +17 -4
- package/dist/index.js.map +1 -1
- package/package.json +2 -2
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Shared constants and helpers for AI workflows.
|
|
4
|
+
*/
|
|
5
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
6
|
+
if (k2 === undefined) k2 = k;
|
|
7
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
8
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
9
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
10
|
+
}
|
|
11
|
+
Object.defineProperty(o, k2, desc);
|
|
12
|
+
}) : (function(o, m, k, k2) {
|
|
13
|
+
if (k2 === undefined) k2 = k;
|
|
14
|
+
o[k2] = m[k];
|
|
15
|
+
}));
|
|
16
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
17
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
18
|
+
}) : function(o, v) {
|
|
19
|
+
o["default"] = v;
|
|
20
|
+
});
|
|
21
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
22
|
+
var ownKeys = function(o) {
|
|
23
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
24
|
+
var ar = [];
|
|
25
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
26
|
+
return ar;
|
|
27
|
+
};
|
|
28
|
+
return ownKeys(o);
|
|
29
|
+
};
|
|
30
|
+
return function (mod) {
|
|
31
|
+
if (mod && mod.__esModule) return mod;
|
|
32
|
+
var result = {};
|
|
33
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
34
|
+
__setModuleDefault(result, mod);
|
|
35
|
+
return result;
|
|
36
|
+
};
|
|
37
|
+
})();
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.SUBAGENT_INSTRUCTIONS = exports.QA_TOOLS_BLOCK = exports.FIRECRAWL_TOOLS_BLOCK = exports.validateRequired = exports.validateUrl = void 0;
|
|
40
|
+
exports.normalizeUrl = normalizeUrl;
|
|
41
|
+
exports.normalizeSource = normalizeSource;
|
|
42
|
+
exports.buildMessage = buildMessage;
|
|
43
|
+
exports.askPermissionMode = askPermissionMode;
|
|
44
|
+
const backends_1 = require("./backends");
|
|
45
|
+
// ─── Validators ─────────────────────────────────────────────────────────────
|
|
46
|
+
const validateUrl = (value) => {
|
|
47
|
+
if (!value.trim())
|
|
48
|
+
return 'URL is required';
|
|
49
|
+
try {
|
|
50
|
+
new URL(value.startsWith('http') ? value : `https://${value}`);
|
|
51
|
+
return true;
|
|
52
|
+
}
|
|
53
|
+
catch {
|
|
54
|
+
return 'Please enter a valid URL';
|
|
55
|
+
}
|
|
56
|
+
};
|
|
57
|
+
exports.validateUrl = validateUrl;
|
|
58
|
+
const validateRequired = (label) => (value) => value.trim() ? true : `${label} is required`;
|
|
59
|
+
exports.validateRequired = validateRequired;
|
|
60
|
+
// ─── URL helpers ────────────────────────────────────────────────────────────
|
|
61
|
+
/** Ensure a URL has a protocol prefix. */
|
|
62
|
+
function normalizeUrl(url) {
|
|
63
|
+
return url.startsWith('http') ? url : `https://${url}`;
|
|
64
|
+
}
|
|
65
|
+
/** Normalize a source that might be a URL or a plain topic string. */
|
|
66
|
+
function normalizeSource(source) {
|
|
67
|
+
if (source.startsWith('http'))
|
|
68
|
+
return source;
|
|
69
|
+
if (/\.\w{2,}/.test(source))
|
|
70
|
+
return `https://${source}`;
|
|
71
|
+
return source;
|
|
72
|
+
}
|
|
73
|
+
// ─── Prompt blocks ──────────────────────────────────────────────────────────
|
|
74
|
+
exports.FIRECRAWL_TOOLS_BLOCK = `## Your Tools -- READ THIS FIRST
|
|
75
|
+
|
|
76
|
+
Use ONLY \`firecrawl\` for ALL web operations. It is already installed and authenticated. Run firecrawl commands via Bash. Do not use any other tools, skills, plugins, or built-in web features for web access -- only \`firecrawl\`. If the CLI has issues, you may fall back to Firecrawl MCP tools if available.
|
|
77
|
+
|
|
78
|
+
**First step: run \`firecrawl --help\` to see all available commands.** Then run \`firecrawl <command> --help\` for any command you plan to use heavily.
|
|
79
|
+
|
|
80
|
+
Quick reference:
|
|
81
|
+
- \`firecrawl search "<query>"\` -- Search the web
|
|
82
|
+
- \`firecrawl scrape <url>\` -- Scrape a page as markdown
|
|
83
|
+
- \`firecrawl map <url>\` -- Discover all URLs on a site
|
|
84
|
+
- \`firecrawl crawl <url>\` -- Crawl an entire site
|
|
85
|
+
- \`firecrawl browser "open <url>"\` -- Cloud browser session
|
|
86
|
+
- \`firecrawl browser "snapshot"\` -- Get page state
|
|
87
|
+
- \`firecrawl browser "click @<ref>"\` -- Click an element
|
|
88
|
+
- \`firecrawl browser "type @<ref> <text>"\` -- Type into an input`;
|
|
89
|
+
exports.QA_TOOLS_BLOCK = `## Your Tools -- READ THIS FIRST
|
|
90
|
+
|
|
91
|
+
Use ONLY \`firecrawl\` for ALL web operations. It is already installed and authenticated. Run firecrawl commands via Bash. Do not use any other tools, skills, plugins, or built-in web features for web access -- only \`firecrawl\`. If the CLI has issues, you may fall back to Firecrawl MCP tools if available.
|
|
92
|
+
|
|
93
|
+
**First step: run \`firecrawl --help\` and \`firecrawl browser --help\` to see all commands.** Tell each subagent to do the same.
|
|
94
|
+
|
|
95
|
+
## IMPORTANT: Launch Browser with Live View FIRST
|
|
96
|
+
|
|
97
|
+
Before doing anything else, launch a browser session with streaming enabled so the user can watch in real-time:
|
|
98
|
+
|
|
99
|
+
\`\`\`bash
|
|
100
|
+
firecrawl browser launch-session --json
|
|
101
|
+
\`\`\`
|
|
102
|
+
|
|
103
|
+
This prints a **Live View URL**. Try to open it automatically for the user:
|
|
104
|
+
|
|
105
|
+
\`\`\`bash
|
|
106
|
+
open "<liveViewUrl>" # macOS
|
|
107
|
+
xdg-open "<liveViewUrl>" # Linux
|
|
108
|
+
\`\`\`
|
|
109
|
+
|
|
110
|
+
If the \`open\` command fails or errors, just print the URL clearly so the user can copy-paste it into their browser. Either way, make sure the user sees the live view URL before you start working.
|
|
111
|
+
|
|
112
|
+
Quick reference:
|
|
113
|
+
- \`firecrawl browser "open <url>"\` -- Navigate to a URL in a cloud browser
|
|
114
|
+
- \`firecrawl browser "snapshot"\` -- Get the current page state (accessibility tree)
|
|
115
|
+
- \`firecrawl browser "click @<ref>"\` -- Click an element by its reference ID
|
|
116
|
+
- \`firecrawl browser "type @<ref> <text>"\` -- Type text into an input
|
|
117
|
+
- \`firecrawl browser "scrape"\` -- Get the full page content as markdown
|
|
118
|
+
- \`firecrawl browser "scroll down"\` / \`"scroll up"\` -- Scroll the page
|
|
119
|
+
- \`firecrawl scrape <url>\` -- Quick scrape without browser session
|
|
120
|
+
- \`firecrawl map <url>\` -- Discover all URLs on the site`;
|
|
121
|
+
exports.SUBAGENT_INSTRUCTIONS = `**IMPORTANT:** When spawning agents with the Agent tool:
|
|
122
|
+
- Use \`subagent_type: "general-purpose"\` for each agent
|
|
123
|
+
- Give each agent a clear, specific mandate in the prompt
|
|
124
|
+
- Tell each agent: "Use ONLY firecrawl for all web access via Bash. Do not use any other tools, skills, or plugins for web access. If the CLI has issues, fall back to Firecrawl MCP tools. Run \`firecrawl --help\` first."
|
|
125
|
+
- Launch ALL agents in a SINGLE message (parallel, not sequential)
|
|
126
|
+
- Each agent should return structured findings with source URLs`;
|
|
127
|
+
// ─── Message builder ────────────────────────────────────────────────────────
|
|
128
|
+
/** Join non-empty parts into a message string. */
|
|
129
|
+
function buildMessage(parts) {
|
|
130
|
+
return parts.filter(Boolean).join('. ') + '.';
|
|
131
|
+
}
|
|
132
|
+
// ─── Permission helper ──────────────────────────────────────────────────────
|
|
133
|
+
async function askPermissionMode(backend) {
|
|
134
|
+
const { select } = await Promise.resolve().then(() => __importStar(require('@inquirer/prompts')));
|
|
135
|
+
const config = backends_1.BACKENDS[backend];
|
|
136
|
+
const skipLabel = backend === 'codex' ? '--full-auto' : '--dangerously-skip-permissions';
|
|
137
|
+
const mode = await select({
|
|
138
|
+
message: 'How should the agent handle tool permissions?',
|
|
139
|
+
choices: [
|
|
140
|
+
{
|
|
141
|
+
name: 'Auto-approve all (recommended)',
|
|
142
|
+
value: 'skip',
|
|
143
|
+
description: `Runs fully autonomous, no manual approvals. Uses ${skipLabel}.`,
|
|
144
|
+
},
|
|
145
|
+
{
|
|
146
|
+
name: 'Ask me each time',
|
|
147
|
+
value: 'ask',
|
|
148
|
+
description: `${config.displayName} will prompt before running each tool (slower but more control).`,
|
|
149
|
+
},
|
|
150
|
+
],
|
|
151
|
+
});
|
|
152
|
+
return mode === 'skip';
|
|
153
|
+
}
|
|
154
|
+
//# sourceMappingURL=shared.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"shared.js","sourceRoot":"","sources":["../../../src/commands/experimental/shared.ts"],"names":[],"mappings":";AAAA;;GAEG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAwBH,oCAEC;AAGD,0CAIC;AA+DD,oCAEC;AAID,8CAwBC;AA5HD,yCAAoD;AAEpD,+EAA+E;AAExE,MAAM,WAAW,GAAG,CAAC,KAAa,EAAiB,EAAE;IAC1D,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE;QAAE,OAAO,iBAAiB,CAAC;IAC5C,IAAI,CAAC;QACH,IAAI,GAAG,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,WAAW,KAAK,EAAE,CAAC,CAAC;QAC/D,OAAO,IAAI,CAAC;IACd,CAAC;IAAC,MAAM,CAAC;QACP,OAAO,0BAA0B,CAAC;IACpC,CAAC;AACH,CAAC,CAAC;AARW,QAAA,WAAW,eAQtB;AAEK,MAAM,gBAAgB,GAC3B,CAAC,KAAa,EAAE,EAAE,CAClB,CAAC,KAAa,EAAiB,EAAE,CAC/B,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,KAAK,cAAc,CAAC;AAHpC,QAAA,gBAAgB,oBAGoB;AAEjD,+EAA+E;AAE/E,0CAA0C;AAC1C,SAAgB,YAAY,CAAC,GAAW;IACtC,OAAO,GAAG,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,GAAG,EAAE,CAAC;AACzD,CAAC;AAED,sEAAsE;AACtE,SAAgB,eAAe,CAAC,MAAc;IAC5C,IAAI,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC;QAAE,OAAO,MAAM,CAAC;IAC7C,IAAI,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC;QAAE,OAAO,WAAW,MAAM,EAAE,CAAC;IACxD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,+EAA+E;AAElE,QAAA,qBAAqB,GAAG;;;;;;;;;;;;;;mEAc8B,CAAC;AAEvD,QAAA,cAAc,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;2DA+B6B,CAAC;AAE/C,QAAA,qBAAqB,GAAG;;;;;gEAK2B,CAAC;AAEjE,+EAA+E;AAE/E,kDAAkD;AAClD,SAAgB,YAAY,CAAC,KAAe;IAC1C,OAAO,KAAK,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,CAAC;AAChD,CAAC;AAED,+EAA+E;AAExE,KAAK,UAAU,iBAAiB,CAAC,OAAgB;IACtD,MAAM,EAAE,MAAM,EAAE,GAAG,wDAAa,mBAAmB,GAAC,CAAC;IACrD,MAAM,MAAM,GAAG,mBAAQ,CAAC,OAAO,CAAC,CAAC;IAEjC,MAAM,SAAS,GACb,OAAO,KAAK,OAAO,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,gCAAgC,CAAC;IAEzE,MAAM,IAAI,GAAG,MAAM,MAAM,CAAC;QACxB,OAAO,EAAE,+CAA+C;QACxD,OAAO,EAAE;YACP;gBACE,IAAI,EAAE,gCAAgC;gBACtC,KAAK,EAAE,MAAM;gBACb,WAAW,EAAE,oDAAoD,SAAS,GAAG;aAC9E;YACD;gBACE,IAAI,EAAE,kBAAkB;gBACxB,KAAK,EAAE,KAAK;gBACZ,WAAW,EAAE,GAAG,MAAM,CAAC,WAAW,kEAAkE;aACrG;SACF;KACF,CAAC,CAAC;IAEH,OAAO,IAAI,KAAK,MAAM,CAAC;AACzB,CAAC"}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Workflow: Competitor Analysis
|
|
3
|
+
*
|
|
4
|
+
* Spawns parallel agents -- one per company -- to scrape and profile the target
|
|
5
|
+
* and each competitor simultaneously. Synthesizes into a full competitive report.
|
|
6
|
+
*/
|
|
7
|
+
import { Command } from 'commander';
|
|
8
|
+
import { type Backend } from '../backends';
|
|
9
|
+
export declare function register(parentCmd: Command, backend: Backend): void;
|
|
10
|
+
//# sourceMappingURL=competitor-analysis.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"competitor-analysis.d.ts","sourceRoot":"","sources":["../../../../src/commands/experimental/workflows/competitor-analysis.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AACpC,OAAO,EAAE,KAAK,OAAO,EAAyB,MAAM,aAAa,CAAC;AAuKlE,wBAAgB,QAAQ,CAAC,SAAS,EAAE,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG,IAAI,CAyBnE"}
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Workflow: Competitor Analysis
|
|
4
|
+
*
|
|
5
|
+
* Spawns parallel agents -- one per company -- to scrape and profile the target
|
|
6
|
+
* and each competitor simultaneously. Synthesizes into a full competitive report.
|
|
7
|
+
*/
|
|
8
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
9
|
+
if (k2 === undefined) k2 = k;
|
|
10
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
11
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
12
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
13
|
+
}
|
|
14
|
+
Object.defineProperty(o, k2, desc);
|
|
15
|
+
}) : (function(o, m, k, k2) {
|
|
16
|
+
if (k2 === undefined) k2 = k;
|
|
17
|
+
o[k2] = m[k];
|
|
18
|
+
}));
|
|
19
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
20
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
21
|
+
}) : function(o, v) {
|
|
22
|
+
o["default"] = v;
|
|
23
|
+
});
|
|
24
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
25
|
+
var ownKeys = function(o) {
|
|
26
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
27
|
+
var ar = [];
|
|
28
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
29
|
+
return ar;
|
|
30
|
+
};
|
|
31
|
+
return ownKeys(o);
|
|
32
|
+
};
|
|
33
|
+
return function (mod) {
|
|
34
|
+
if (mod && mod.__esModule) return mod;
|
|
35
|
+
var result = {};
|
|
36
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
37
|
+
__setModuleDefault(result, mod);
|
|
38
|
+
return result;
|
|
39
|
+
};
|
|
40
|
+
})();
|
|
41
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
42
|
+
exports.register = register;
|
|
43
|
+
const backends_1 = require("../backends");
|
|
44
|
+
const shared_1 = require("../shared");
|
|
45
|
+
// ─── Input gathering ────────────────────────────────────────────────────────
|
|
46
|
+
async function gatherInputs(prefill) {
|
|
47
|
+
const { input, select } = await Promise.resolve().then(() => __importStar(require('@inquirer/prompts')));
|
|
48
|
+
const rawUrl = prefill?.url ||
|
|
49
|
+
(await input({
|
|
50
|
+
message: "What's the URL of the site you want to analyze?",
|
|
51
|
+
validate: shared_1.validateUrl,
|
|
52
|
+
}));
|
|
53
|
+
const competitors = await input({
|
|
54
|
+
message: 'Are there particular competitors you want to flag? (leave blank to auto-discover)',
|
|
55
|
+
default: '',
|
|
56
|
+
});
|
|
57
|
+
const context = await input({
|
|
58
|
+
message: 'Anything else I should know? (leave blank to skip)',
|
|
59
|
+
default: '',
|
|
60
|
+
});
|
|
61
|
+
const output = await select({
|
|
62
|
+
message: 'How should the report be delivered?',
|
|
63
|
+
choices: [
|
|
64
|
+
{ name: 'Print to terminal', value: 'terminal' },
|
|
65
|
+
{ name: 'Save as Markdown file', value: 'markdown' },
|
|
66
|
+
{ name: 'Save as JSON (structured data)', value: 'json' },
|
|
67
|
+
],
|
|
68
|
+
});
|
|
69
|
+
return { url: (0, shared_1.normalizeUrl)(rawUrl), competitors, context, output };
|
|
70
|
+
}
|
|
71
|
+
// ─── System prompt ──────────────────────────────────────────────────────────
|
|
72
|
+
function buildSystemPrompt(opts) {
|
|
73
|
+
const outputInstructions = {
|
|
74
|
+
terminal: 'Print the full report to the terminal in well-formatted markdown.',
|
|
75
|
+
markdown: 'Save the report to a file called `competitor-analysis.md` in the current directory. Tell the user the file path when done.',
|
|
76
|
+
json: `Save the report as structured JSON to \`competitor-analysis.json\` in the current directory. Tell the user the file path when done.
|
|
77
|
+
|
|
78
|
+
Use this exact schema:
|
|
79
|
+
\`\`\`json
|
|
80
|
+
{
|
|
81
|
+
"target": {
|
|
82
|
+
"name": "string",
|
|
83
|
+
"url": "string",
|
|
84
|
+
"description": "string",
|
|
85
|
+
"features": ["string"],
|
|
86
|
+
"pricing": { "model": "string", "tiers": [{ "name": "string", "price": "string", "features": ["string"] }] },
|
|
87
|
+
"targetAudience": "string",
|
|
88
|
+
"valueProposition": "string",
|
|
89
|
+
"sources": ["url"]
|
|
90
|
+
},
|
|
91
|
+
"competitors": [
|
|
92
|
+
{
|
|
93
|
+
"name": "string",
|
|
94
|
+
"url": "string",
|
|
95
|
+
"description": "string",
|
|
96
|
+
"features": ["string"],
|
|
97
|
+
"pricing": { "model": "string", "tiers": [{ "name": "string", "price": "string", "features": ["string"] }] },
|
|
98
|
+
"targetAudience": "string",
|
|
99
|
+
"sources": ["url"]
|
|
100
|
+
}
|
|
101
|
+
],
|
|
102
|
+
"featureMatrix": {
|
|
103
|
+
"features": ["string"],
|
|
104
|
+
"comparison": { "companyName": { "featureName": "yes | no | partial | string" } }
|
|
105
|
+
},
|
|
106
|
+
"positioning": [{ "company": "string", "tone": "string", "keyClaims": ["string"], "differentiators": ["string"] }],
|
|
107
|
+
"strengths": ["string"],
|
|
108
|
+
"weaknesses": ["string"],
|
|
109
|
+
"opportunities": ["string"],
|
|
110
|
+
"sources": [{ "url": "string", "title": "string", "usedFor": "string" }]
|
|
111
|
+
}
|
|
112
|
+
\`\`\``,
|
|
113
|
+
};
|
|
114
|
+
return `You are a competitive analysis team lead powered by Firecrawl. You orchestrate parallel research agents to analyze a target company and its competitors simultaneously.
|
|
115
|
+
|
|
116
|
+
${shared_1.FIRECRAWL_TOOLS_BLOCK}
|
|
117
|
+
|
|
118
|
+
## Your Strategy
|
|
119
|
+
|
|
120
|
+
You are a **team lead**, not a solo researcher. Your job is to:
|
|
121
|
+
|
|
122
|
+
1. **Identify the landscape** -- Do a quick search yourself to find competitors if not provided. Search for "<product> alternatives", "<product> vs", "<industry> tools".
|
|
123
|
+
2. **Spawn parallel subagents** -- Launch one agent per company (target + each competitor). Each agent scrapes and profiles one company in depth.
|
|
124
|
+
3. **Collect results** -- Each agent reports back structured company data with source URLs.
|
|
125
|
+
4. **Synthesize** -- Build the comparative analysis, feature matrix, positioning breakdown, and recommendations from all agent findings.
|
|
126
|
+
|
|
127
|
+
## Agent Assignments
|
|
128
|
+
|
|
129
|
+
Spawn these agents in parallel:
|
|
130
|
+
1. **Target Company Agent** -- Scrape the target site thoroughly. Extract: features, pricing, positioning, messaging, target audience, value proposition, content strategy. Return all findings with source URLs.
|
|
131
|
+
2. **Competitor Agent** (one per competitor) -- Each agent scrapes one competitor's site. Extract: company name, URL, what they do, key features, pricing (if public), target audience, value proposition. Return findings with source URLs.
|
|
132
|
+
|
|
133
|
+
${shared_1.SUBAGENT_INSTRUCTIONS}
|
|
134
|
+
|
|
135
|
+
## Output Format
|
|
136
|
+
|
|
137
|
+
${outputInstructions[opts.output]}
|
|
138
|
+
|
|
139
|
+
Produce a comprehensive competitive analysis report with:
|
|
140
|
+
|
|
141
|
+
### 1. Target Company Overview
|
|
142
|
+
- What they do (one paragraph)
|
|
143
|
+
- Key features / product offerings
|
|
144
|
+
- Pricing model (if public)
|
|
145
|
+
- Target audience
|
|
146
|
+
- Unique value proposition
|
|
147
|
+
|
|
148
|
+
### 2. Competitor Profiles
|
|
149
|
+
For each competitor:
|
|
150
|
+
- Company name & URL
|
|
151
|
+
- What they do
|
|
152
|
+
- Key features
|
|
153
|
+
- Pricing (if public)
|
|
154
|
+
- Target audience
|
|
155
|
+
|
|
156
|
+
### 3. Feature Comparison Matrix
|
|
157
|
+
A markdown table comparing features across all companies.
|
|
158
|
+
|
|
159
|
+
### 4. Positioning & Messaging Analysis
|
|
160
|
+
How each company positions itself -- tone, key claims, differentiators.
|
|
161
|
+
|
|
162
|
+
### 5. Strengths & Weaknesses
|
|
163
|
+
For the target company relative to competitors.
|
|
164
|
+
|
|
165
|
+
### 6. Opportunities & Recommendations
|
|
166
|
+
Actionable insights based on competitive gaps.
|
|
167
|
+
|
|
168
|
+
### 7. Sources & Citations
|
|
169
|
+
For every claim, cite the source URL where you found the information. List all URLs scraped at the end with a one-line note on what was found there.
|
|
170
|
+
|
|
171
|
+
---
|
|
172
|
+
|
|
173
|
+
Be thorough. Scrape real pages, extract real data. Do not make things up -- if pricing isn't public, say so. If a page fails to scrape, try an alternative URL or note the limitation.
|
|
174
|
+
|
|
175
|
+
Start working immediately when given a target.`;
|
|
176
|
+
}
|
|
177
|
+
// ─── Command registration ───────────────────────────────────────────────────
|
|
178
|
+
function register(parentCmd, backend) {
|
|
179
|
+
const config = backends_1.BACKENDS[backend];
|
|
180
|
+
parentCmd
|
|
181
|
+
.command('competitor-analysis')
|
|
182
|
+
.description('Analyze a website and its competitive landscape')
|
|
183
|
+
.argument('[url]', 'URL to analyze')
|
|
184
|
+
.option('-y, --yes', 'Auto-approve all tool permissions')
|
|
185
|
+
.action(async (url, options) => {
|
|
186
|
+
const inputs = await gatherInputs(url ? { url } : undefined);
|
|
187
|
+
const skipPermissions = options.yes || (await (0, shared_1.askPermissionMode)(backend));
|
|
188
|
+
console.log(`\nLaunching ${config.displayName}...\n`);
|
|
189
|
+
(0, backends_1.launchAgent)(backend, buildSystemPrompt({ output: inputs.output }), (0, shared_1.buildMessage)([
|
|
190
|
+
`Analyze ${inputs.url}`,
|
|
191
|
+
inputs.competitors && `Competitors to include: ${inputs.competitors}`,
|
|
192
|
+
inputs.context,
|
|
193
|
+
]), skipPermissions);
|
|
194
|
+
});
|
|
195
|
+
}
|
|
196
|
+
//# sourceMappingURL=competitor-analysis.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"competitor-analysis.js","sourceRoot":"","sources":["../../../../src/commands/experimental/workflows/competitor-analysis.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0KH,4BAyBC;AAhMD,0CAAkE;AAClE,sCAOmB;AAWnB,+EAA+E;AAE/E,KAAK,UAAU,YAAY,CAAC,OAA0B;IACpD,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,wDAAa,mBAAmB,GAAC,CAAC;IAE5D,MAAM,MAAM,GACV,OAAO,EAAE,GAAG;QACZ,CAAC,MAAM,KAAK,CAAC;YACX,OAAO,EAAE,iDAAiD;YAC1D,QAAQ,EAAE,oBAAW;SACtB,CAAC,CAAC,CAAC;IAEN,MAAM,WAAW,GAAG,MAAM,KAAK,CAAC;QAC9B,OAAO,EACL,mFAAmF;QACrF,OAAO,EAAE,EAAE;KACZ,CAAC,CAAC;IAEH,MAAM,OAAO,GAAG,MAAM,KAAK,CAAC;QAC1B,OAAO,EAAE,oDAAoD;QAC7D,OAAO,EAAE,EAAE;KACZ,CAAC,CAAC;IAEH,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC;QAC1B,OAAO,EAAE,qCAAqC;QAC9C,OAAO,EAAE;YACP,EAAE,IAAI,EAAE,mBAAmB,EAAE,KAAK,EAAE,UAAU,EAAE;YAChD,EAAE,IAAI,EAAE,uBAAuB,EAAE,KAAK,EAAE,UAAU,EAAE;YACpD,EAAE,IAAI,EAAE,gCAAgC,EAAE,KAAK,EAAE,MAAM,EAAE;SAC1D;KACF,CAAC,CAAC;IAEH,OAAO,EAAE,GAAG,EAAE,IAAA,qBAAY,EAAC,MAAM,CAAC,EAAE,WAAW,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;AACrE,CAAC;AAED,+EAA+E;AAE/E,SAAS,iBAAiB,CAAC,IAAwB;IACjD,MAAM,kBAAkB,GAA2B;QACjD,QAAQ,EACN,mEAAmE;QACrE,QAAQ,EACN,4HAA4H;QAC9H,IAAI,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAoCH;KACJ,CAAC;IAEF,OAAO;;EAEP,8BAAqB;;;;;;;;;;;;;;;;;EAiBrB,8BAAqB;;;;EAIrB,kBAAkB,CAAC,IAAI,CAAC,MAAM,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;+CAsCc,CAAC;AAChD,CAAC;AAED,+EAA+E;AAE/E,SAAgB,QAAQ,CAAC,SAAkB,EAAE,OAAgB;IAC3D,MAAM,MAAM,GAAG,mBAAQ,CAAC,OAAO,CAAC,CAAC;IAEjC,SAAS;SACN,OAAO,CAAC,qBAAqB,CAAC;SAC9B,WAAW,CAAC,iDAAiD,CAAC;SAC9D,QAAQ,CAAC,OAAO,EAAE,gBAAgB,CAAC;SACnC,MAAM,CAAC,WAAW,EAAE,mCAAmC,CAAC;SACxD,MAAM,CAAC,KAAK,EAAE,GAAG,EAAE,OAAO,EAAE,EAAE;QAC7B,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;QAE7D,MAAM,eAAe,GAAG,OAAO,CAAC,GAAG,IAAI,CAAC,MAAM,IAAA,0BAAiB,EAAC,OAAO,CAAC,CAAC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,eAAe,MAAM,CAAC,WAAW,OAAO,CAAC,CAAC;QAEtD,IAAA,sBAAW,EACT,OAAO,EACP,iBAAiB,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,CAAC,EAC5C,IAAA,qBAAY,EAAC;YACX,WAAW,MAAM,CAAC,GAAG,EAAE;YACvB,MAAM,CAAC,WAAW,IAAI,2BAA2B,MAAM,CAAC,WAAW,EAAE;YACrE,MAAM,CAAC,OAAO;SACf,CAAC,EACF,eAAe,CAChB,CAAC;IACJ,CAAC,CAAC,CAAC;AACP,CAAC"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Workflow: Deep Research
|
|
3
|
+
*
|
|
4
|
+
* Breaks a topic into research angles, then spawns parallel agents -- one per
|
|
5
|
+
* angle (overview, technical, market, contrarian). Results are cross-referenced
|
|
6
|
+
* and synthesized into a structured report.
|
|
7
|
+
*/
|
|
8
|
+
import { Command } from 'commander';
|
|
9
|
+
import { type Backend } from '../backends';
|
|
10
|
+
export declare function register(parentCmd: Command, backend: Backend): void;
|
|
11
|
+
//# sourceMappingURL=deep-research.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"deep-research.d.ts","sourceRoot":"","sources":["../../../../src/commands/experimental/workflows/deep-research.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AACpC,OAAO,EAAE,KAAK,OAAO,EAAyB,MAAM,aAAa,CAAC;AAuIlE,wBAAgB,QAAQ,CAAC,SAAS,EAAE,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG,IAAI,CAqBnE"}
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Workflow: Deep Research
|
|
4
|
+
*
|
|
5
|
+
* Breaks a topic into research angles, then spawns parallel agents -- one per
|
|
6
|
+
* angle (overview, technical, market, contrarian). Results are cross-referenced
|
|
7
|
+
* and synthesized into a structured report.
|
|
8
|
+
*/
|
|
9
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
12
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
13
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
14
|
+
}
|
|
15
|
+
Object.defineProperty(o, k2, desc);
|
|
16
|
+
}) : (function(o, m, k, k2) {
|
|
17
|
+
if (k2 === undefined) k2 = k;
|
|
18
|
+
o[k2] = m[k];
|
|
19
|
+
}));
|
|
20
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
21
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
22
|
+
}) : function(o, v) {
|
|
23
|
+
o["default"] = v;
|
|
24
|
+
});
|
|
25
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
26
|
+
var ownKeys = function(o) {
|
|
27
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
28
|
+
var ar = [];
|
|
29
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
30
|
+
return ar;
|
|
31
|
+
};
|
|
32
|
+
return ownKeys(o);
|
|
33
|
+
};
|
|
34
|
+
return function (mod) {
|
|
35
|
+
if (mod && mod.__esModule) return mod;
|
|
36
|
+
var result = {};
|
|
37
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
38
|
+
__setModuleDefault(result, mod);
|
|
39
|
+
return result;
|
|
40
|
+
};
|
|
41
|
+
})();
|
|
42
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
43
|
+
exports.register = register;
|
|
44
|
+
const backends_1 = require("../backends");
|
|
45
|
+
const shared_1 = require("../shared");
|
|
46
|
+
// ─── Input gathering ────────────────────────────────────────────────────────
|
|
47
|
+
async function gatherInputs(prefill) {
|
|
48
|
+
const { input, select } = await Promise.resolve().then(() => __importStar(require('@inquirer/prompts')));
|
|
49
|
+
const topic = prefill?.topic ||
|
|
50
|
+
(await input({
|
|
51
|
+
message: 'What topic do you want to research?',
|
|
52
|
+
validate: (0, shared_1.validateRequired)('Topic'),
|
|
53
|
+
}));
|
|
54
|
+
const depth = await select({
|
|
55
|
+
message: 'How deep should the research go?',
|
|
56
|
+
choices: [
|
|
57
|
+
{ name: 'Quick overview (5-10 sources)', value: 'quick' },
|
|
58
|
+
{ name: 'Thorough analysis (15-25 sources)', value: 'thorough' },
|
|
59
|
+
{ name: 'Exhaustive deep-dive (25+ sources)', value: 'exhaustive' },
|
|
60
|
+
],
|
|
61
|
+
});
|
|
62
|
+
const context = await input({
|
|
63
|
+
message: 'Any specific angles or questions to focus on? (leave blank to skip)',
|
|
64
|
+
default: '',
|
|
65
|
+
});
|
|
66
|
+
const output = await select({
|
|
67
|
+
message: 'How should the research be delivered?',
|
|
68
|
+
choices: [
|
|
69
|
+
{ name: 'Print to terminal', value: 'terminal' },
|
|
70
|
+
{ name: 'Save as Markdown file', value: 'markdown' },
|
|
71
|
+
{ name: 'Save as JSON (structured data)', value: 'json' },
|
|
72
|
+
],
|
|
73
|
+
});
|
|
74
|
+
return { topic, depth, context, output };
|
|
75
|
+
}
|
|
76
|
+
// ─── System prompt ──────────────────────────────────────────────────────────
|
|
77
|
+
function buildSystemPrompt(opts) {
|
|
78
|
+
const depthInstructions = {
|
|
79
|
+
quick: 'Search 3-5 queries and scrape 5-10 of the most relevant pages.',
|
|
80
|
+
thorough: 'Search 5-10 queries from different angles and scrape 15-25 pages. Cross-reference claims across sources.',
|
|
81
|
+
exhaustive: 'Search 10+ queries covering every angle. Scrape 25+ pages including primary sources, research papers, expert opinions, and contrarian views. Cross-reference everything.',
|
|
82
|
+
};
|
|
83
|
+
const outputInstructions = {
|
|
84
|
+
terminal: 'Print the full research report to the terminal in well-formatted markdown.',
|
|
85
|
+
markdown: 'Save the report to a file called `research-report.md` in the current directory. Tell the user the file path when done.',
|
|
86
|
+
json: 'Save the report as structured JSON to `research-report.json` in the current directory. Tell the user the file path when done.',
|
|
87
|
+
};
|
|
88
|
+
return `You are a deep research team lead powered by Firecrawl. You orchestrate parallel research agents to investigate a topic from every angle simultaneously.
|
|
89
|
+
|
|
90
|
+
${shared_1.FIRECRAWL_TOOLS_BLOCK}
|
|
91
|
+
|
|
92
|
+
## Research Depth
|
|
93
|
+
|
|
94
|
+
${depthInstructions[opts.depth]}
|
|
95
|
+
|
|
96
|
+
## Your Strategy
|
|
97
|
+
|
|
98
|
+
You are a **team lead**, not a solo researcher. Your job is to:
|
|
99
|
+
|
|
100
|
+
1. **Break the topic into angles** -- Identify 3-5 distinct research angles or subtopics.
|
|
101
|
+
2. **Spawn parallel subagents** -- One agent per angle. Each searches, scrapes, and analyzes from their specific perspective.
|
|
102
|
+
3. **Collect results** -- Each agent reports back findings with sources.
|
|
103
|
+
4. **Cross-reference and synthesize** -- Merge findings, resolve conflicting claims, build the unified report.
|
|
104
|
+
|
|
105
|
+
## Agent Assignments
|
|
106
|
+
|
|
107
|
+
Based on the topic, spawn agents like:
|
|
108
|
+
1. **Overview Agent** -- Broad searches, foundational context, definitions, key players. Scrape Wikipedia, encyclopedia-style sources, overview articles.
|
|
109
|
+
2. **Technical Deep-Dive Agent** -- Technical details, documentation, specifications, architecture. Scrape docs, technical blogs, research papers.
|
|
110
|
+
3. **Market & Industry Agent** -- Market size, trends, adoption, industry analyst perspectives. Scrape reports, news articles, industry publications.
|
|
111
|
+
4. **Contrarian & Risks Agent** -- Counterarguments, criticisms, failure cases, limitations. Search for "<topic> problems", "<topic> criticism", "<topic> limitations".
|
|
112
|
+
|
|
113
|
+
Adjust the number and focus of agents based on the topic and depth level.
|
|
114
|
+
|
|
115
|
+
${shared_1.SUBAGENT_INSTRUCTIONS}
|
|
116
|
+
|
|
117
|
+
## Output Format
|
|
118
|
+
|
|
119
|
+
${outputInstructions[opts.output]}
|
|
120
|
+
|
|
121
|
+
Structure the report as:
|
|
122
|
+
|
|
123
|
+
### Executive Summary
|
|
124
|
+
2-3 paragraph overview of key findings.
|
|
125
|
+
|
|
126
|
+
### Key Findings
|
|
127
|
+
Numbered list of the most important discoveries, each with supporting evidence.
|
|
128
|
+
|
|
129
|
+
### Detailed Analysis
|
|
130
|
+
Deep dive into each major theme or subtopic.
|
|
131
|
+
|
|
132
|
+
### Contrarian Views & Risks
|
|
133
|
+
What are the counterarguments? What could go wrong?
|
|
134
|
+
|
|
135
|
+
### Sources
|
|
136
|
+
Every URL you scraped, with a one-line summary of what you found there.
|
|
137
|
+
|
|
138
|
+
---
|
|
139
|
+
|
|
140
|
+
Be thorough and honest. Cite your sources. Flag uncertainty. Do not fabricate information.
|
|
141
|
+
|
|
142
|
+
Start working immediately when given a topic.`;
|
|
143
|
+
}
|
|
144
|
+
// ─── Command registration ───────────────────────────────────────────────────
|
|
145
|
+
function register(parentCmd, backend) {
|
|
146
|
+
const config = backends_1.BACKENDS[backend];
|
|
147
|
+
parentCmd
|
|
148
|
+
.command('deep-research')
|
|
149
|
+
.description('Deep research any topic using web search and scraping')
|
|
150
|
+
.argument('[topic]', 'Topic to research')
|
|
151
|
+
.option('-y, --yes', 'Auto-approve all tool permissions')
|
|
152
|
+
.action(async (topic, options) => {
|
|
153
|
+
const inputs = await gatherInputs(topic ? { topic } : undefined);
|
|
154
|
+
const skipPermissions = options.yes || (await (0, shared_1.askPermissionMode)(backend));
|
|
155
|
+
console.log(`\nLaunching ${config.displayName}...\n`);
|
|
156
|
+
(0, backends_1.launchAgent)(backend, buildSystemPrompt({ depth: inputs.depth, output: inputs.output }), (0, shared_1.buildMessage)([inputs.topic, inputs.context]), skipPermissions);
|
|
157
|
+
});
|
|
158
|
+
}
|
|
159
|
+
//# sourceMappingURL=deep-research.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"deep-research.js","sourceRoot":"","sources":["../../../../src/commands/experimental/workflows/deep-research.ts"],"names":[],"mappings":";AAAA;;;;;;GAMG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0IH,4BAqBC;AA5JD,0CAAkE;AAClE,sCAMmB;AAWnB,+EAA+E;AAE/E,KAAK,UAAU,YAAY,CAAC,OAA4B;IACtD,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,wDAAa,mBAAmB,GAAC,CAAC;IAE5D,MAAM,KAAK,GACT,OAAO,EAAE,KAAK;QACd,CAAC,MAAM,KAAK,CAAC;YACX,OAAO,EAAE,qCAAqC;YAC9C,QAAQ,EAAE,IAAA,yBAAgB,EAAC,OAAO,CAAC;SACpC,CAAC,CAAC,CAAC;IAEN,MAAM,KAAK,GAAG,MAAM,MAAM,CAAC;QACzB,OAAO,EAAE,kCAAkC;QAC3C,OAAO,EAAE;YACP,EAAE,IAAI,EAAE,+BAA+B,EAAE,KAAK,EAAE,OAAO,EAAE;YACzD,EAAE,IAAI,EAAE,mCAAmC,EAAE,KAAK,EAAE,UAAU,EAAE;YAChE,EAAE,IAAI,EAAE,oCAAoC,EAAE,KAAK,EAAE,YAAY,EAAE;SACpE;KACF,CAAC,CAAC;IAEH,MAAM,OAAO,GAAG,MAAM,KAAK,CAAC;QAC1B,OAAO,EACL,qEAAqE;QACvE,OAAO,EAAE,EAAE;KACZ,CAAC,CAAC;IAEH,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC;QAC1B,OAAO,EAAE,uCAAuC;QAChD,OAAO,EAAE;YACP,EAAE,IAAI,EAAE,mBAAmB,EAAE,KAAK,EAAE,UAAU,EAAE;YAChD,EAAE,IAAI,EAAE,uBAAuB,EAAE,KAAK,EAAE,UAAU,EAAE;YACpD,EAAE,IAAI,EAAE,gCAAgC,EAAE,KAAK,EAAE,MAAM,EAAE;SAC1D;KACF,CAAC,CAAC;IAEH,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;AAC3C,CAAC;AAED,+EAA+E;AAE/E,SAAS,iBAAiB,CAAC,IAAuC;IAChE,MAAM,iBAAiB,GAA2B;QAChD,KAAK,EAAE,gEAAgE;QACvE,QAAQ,EACN,0GAA0G;QAC5G,UAAU,EACR,0KAA0K;KAC7K,CAAC;IAEF,MAAM,kBAAkB,GAA2B;QACjD,QAAQ,EACN,4EAA4E;QAC9E,QAAQ,EACN,wHAAwH;QAC1H,IAAI,EAAE,+HAA+H;KACtI,CAAC;IAEF,OAAO;;EAEP,8BAAqB;;;;EAIrB,iBAAiB,CAAC,IAAI,CAAC,KAAK,CAAC;;;;;;;;;;;;;;;;;;;;;EAqB7B,8BAAqB;;;;EAIrB,kBAAkB,CAAC,IAAI,CAAC,MAAM,CAAC;;;;;;;;;;;;;;;;;;;;;;;8CAuBa,CAAC;AAC/C,CAAC;AAED,+EAA+E;AAE/E,SAAgB,QAAQ,CAAC,SAAkB,EAAE,OAAgB;IAC3D,MAAM,MAAM,GAAG,mBAAQ,CAAC,OAAO,CAAC,CAAC;IAEjC,SAAS;SACN,OAAO,CAAC,eAAe,CAAC;SACxB,WAAW,CAAC,uDAAuD,CAAC;SACpE,QAAQ,CAAC,SAAS,EAAE,mBAAmB,CAAC;SACxC,MAAM,CAAC,WAAW,EAAE,mCAAmC,CAAC;SACxD,MAAM,CAAC,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE;QAC/B,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;QAEjE,MAAM,eAAe,GAAG,OAAO,CAAC,GAAG,IAAI,CAAC,MAAM,IAAA,0BAAiB,EAAC,OAAO,CAAC,CAAC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,eAAe,MAAM,CAAC,WAAW,OAAO,CAAC,CAAC;QAEtD,IAAA,sBAAW,EACT,OAAO,EACP,iBAAiB,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,CAAC,EACjE,IAAA,qBAAY,EAAC,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC,EAC5C,eAAe,CAChB,CAAC;IACJ,CAAC,CAAC,CAAC;AACP,CAAC"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Workflow: Demo Walkthrough
|
|
3
|
+
*
|
|
4
|
+
* Uses Firecrawl's cloud browser to walk through a product's key flows --
|
|
5
|
+
* signup, onboarding, pricing, docs -- step by step. Captures every screen,
|
|
6
|
+
* documents interactions, and produces a structured walkthrough report.
|
|
7
|
+
*/
|
|
8
|
+
import { Command } from 'commander';
|
|
9
|
+
import { type Backend } from '../backends';
|
|
10
|
+
export declare function register(parentCmd: Command, backend: Backend): void;
|
|
11
|
+
//# sourceMappingURL=demo.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"demo.d.ts","sourceRoot":"","sources":["../../../../src/commands/experimental/workflows/demo.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AACpC,OAAO,EAAE,KAAK,OAAO,EAAyB,MAAM,aAAa,CAAC;AAuKlE,wBAAgB,QAAQ,CAAC,SAAS,EAAE,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG,IAAI,CAqBnE"}
|