autocrew 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/HAMLETDEER.md +562 -0
- package/LICENSE +21 -0
- package/README.md +190 -0
- package/README_CN.md +190 -0
- package/adapters/openclaw/index.ts +68 -0
- package/bin/autocrew.mjs +23 -0
- package/bin/autocrew.ts +13 -0
- package/openclaw.plugin.json +36 -0
- package/package.json +74 -0
- package/skills/_writing-style/SKILL.md +68 -0
- package/skills/audience-profiler/SKILL.md +241 -0
- package/skills/content-attribution/SKILL.md +128 -0
- package/skills/content-review/SKILL.md +257 -0
- package/skills/cover-generator/SKILL.md +93 -0
- package/skills/humanizer-zh/SKILL.md +75 -0
- package/skills/intel-digest/SKILL.md +57 -0
- package/skills/intel-pull/SKILL.md +74 -0
- package/skills/manage-pipeline/SKILL.md +63 -0
- package/skills/memory-distill/SKILL.md +89 -0
- package/skills/onboarding/SKILL.md +117 -0
- package/skills/pipeline-status/SKILL.md +51 -0
- package/skills/platform-rewrite/SKILL.md +125 -0
- package/skills/pre-publish/SKILL.md +142 -0
- package/skills/publish-content/SKILL.md +500 -0
- package/skills/remix-content/SKILL.md +77 -0
- package/skills/research/SKILL.md +127 -0
- package/skills/setup/SKILL.md +353 -0
- package/skills/spawn-batch-writer/SKILL.md +66 -0
- package/skills/spawn-planner/SKILL.md +72 -0
- package/skills/spawn-writer/SKILL.md +60 -0
- package/skills/teardown/SKILL.md +144 -0
- package/skills/title-craft/SKILL.md +234 -0
- package/skills/topic-ideas/SKILL.md +105 -0
- package/skills/video-timeline/SKILL.md +117 -0
- package/skills/write-script/SKILL.md +232 -0
- package/skills/xhs-cover-review/SKILL.md +48 -0
- package/src/adapters/browser/browser-cdp.ts +260 -0
- package/src/adapters/browser/browser-relay.ts +236 -0
- package/src/adapters/browser/gateway-client.ts +148 -0
- package/src/adapters/browser/types.ts +36 -0
- package/src/adapters/image/gemini.ts +219 -0
- package/src/adapters/research/tikhub.ts +19 -0
- package/src/cli/banner.ts +18 -0
- package/src/cli/bootstrap.ts +33 -0
- package/src/cli/commands/adapt.ts +28 -0
- package/src/cli/commands/advance.ts +28 -0
- package/src/cli/commands/assets.ts +24 -0
- package/src/cli/commands/audit.ts +18 -0
- package/src/cli/commands/contents.ts +18 -0
- package/src/cli/commands/cover.ts +58 -0
- package/src/cli/commands/events.ts +17 -0
- package/src/cli/commands/humanize.ts +27 -0
- package/src/cli/commands/index.ts +80 -0
- package/src/cli/commands/init.ts +28 -0
- package/src/cli/commands/intel.ts +55 -0
- package/src/cli/commands/learn.ts +34 -0
- package/src/cli/commands/memory.ts +18 -0
- package/src/cli/commands/migrate.ts +24 -0
- package/src/cli/commands/open.ts +21 -0
- package/src/cli/commands/pipelines.ts +18 -0
- package/src/cli/commands/pre-publish.ts +27 -0
- package/src/cli/commands/profile.ts +31 -0
- package/src/cli/commands/research.ts +36 -0
- package/src/cli/commands/restore.ts +28 -0
- package/src/cli/commands/review.ts +61 -0
- package/src/cli/commands/start.ts +28 -0
- package/src/cli/commands/status.ts +14 -0
- package/src/cli/commands/templates.ts +15 -0
- package/src/cli/commands/topics.ts +18 -0
- package/src/cli/commands/trash.ts +28 -0
- package/src/cli/commands/upgrade.ts +48 -0
- package/src/cli/commands/versions.ts +24 -0
- package/src/cli/index.ts +40 -0
- package/src/data/sensitive-words-builtin.json +114 -0
- package/src/data/source-presets.yaml +54 -0
- package/src/e2e.test.ts +596 -0
- package/src/modules/auth/cookie-manager.ts +113 -0
- package/src/modules/cards/template-engine.ts +74 -0
- package/src/modules/cards/templates/comparison-table.ts +71 -0
- package/src/modules/cards/templates/data-chart.ts +76 -0
- package/src/modules/cards/templates/flow-chart.ts +49 -0
- package/src/modules/cards/templates/key-points.ts +59 -0
- package/src/modules/cover/prompt-builder.test.ts +157 -0
- package/src/modules/cover/prompt-builder.ts +212 -0
- package/src/modules/cover/ratio-adapter.test.ts +122 -0
- package/src/modules/cover/ratio-adapter.ts +104 -0
- package/src/modules/filter/sensitive-words.test.ts +72 -0
- package/src/modules/filter/sensitive-words.ts +212 -0
- package/src/modules/humanizer/zh.test.ts +75 -0
- package/src/modules/humanizer/zh.ts +175 -0
- package/src/modules/intel/collector.ts +19 -0
- package/src/modules/intel/collectors/competitor.test.ts +71 -0
- package/src/modules/intel/collectors/competitor.ts +65 -0
- package/src/modules/intel/collectors/rss.test.ts +56 -0
- package/src/modules/intel/collectors/rss.ts +70 -0
- package/src/modules/intel/collectors/trends.test.ts +80 -0
- package/src/modules/intel/collectors/trends.ts +107 -0
- package/src/modules/intel/collectors/web-search.test.ts +85 -0
- package/src/modules/intel/collectors/web-search.ts +81 -0
- package/src/modules/intel/integration.test.ts +203 -0
- package/src/modules/intel/intel-engine.test.ts +103 -0
- package/src/modules/intel/intel-engine.ts +96 -0
- package/src/modules/intel/source-config.test.ts +113 -0
- package/src/modules/intel/source-config.ts +131 -0
- package/src/modules/learnings/diff-tracker.test.ts +144 -0
- package/src/modules/learnings/diff-tracker.ts +189 -0
- package/src/modules/learnings/rule-distiller.ts +141 -0
- package/src/modules/memory/distill.ts +208 -0
- package/src/modules/migrate/legacy-migrate.test.ts +169 -0
- package/src/modules/migrate/legacy-migrate.ts +229 -0
- package/src/modules/pro/api-client.ts +192 -0
- package/src/modules/pro/gate.test.ts +110 -0
- package/src/modules/pro/gate.ts +104 -0
- package/src/modules/profile/creator-profile.test.ts +178 -0
- package/src/modules/profile/creator-profile.ts +248 -0
- package/src/modules/publish/douyin-api.ts +34 -0
- package/src/modules/publish/wechat-mp.ts +320 -0
- package/src/modules/publish/xiaohongshu-api.ts +127 -0
- package/src/modules/research/free-engine.ts +360 -0
- package/src/modules/timeline/markup-generator.ts +63 -0
- package/src/modules/timeline/parser.ts +275 -0
- package/src/modules/workflow/templates.ts +124 -0
- package/src/modules/writing/platform-rewrite.ts +190 -0
- package/src/modules/writing/title-hashtag.ts +385 -0
- package/src/runtime/context.test.ts +97 -0
- package/src/runtime/context.ts +129 -0
- package/src/runtime/events.test.ts +83 -0
- package/src/runtime/events.ts +104 -0
- package/src/runtime/hooks.ts +174 -0
- package/src/runtime/tool-runner.test.ts +204 -0
- package/src/runtime/tool-runner.ts +282 -0
- package/src/runtime/workflow-engine.test.ts +455 -0
- package/src/runtime/workflow-engine.ts +391 -0
- package/src/server/index.ts +409 -0
- package/src/server/start.ts +39 -0
- package/src/storage/local-store.test.ts +304 -0
- package/src/storage/local-store.ts +704 -0
- package/src/storage/pipeline-store.test.ts +363 -0
- package/src/storage/pipeline-store.ts +698 -0
- package/src/tools/asset.ts +96 -0
- package/src/tools/content-save.ts +276 -0
- package/src/tools/cover-review.ts +221 -0
- package/src/tools/humanize.ts +54 -0
- package/src/tools/init.ts +133 -0
- package/src/tools/intel.ts +92 -0
- package/src/tools/memory.ts +76 -0
- package/src/tools/pipeline-ops.ts +109 -0
- package/src/tools/pipeline.ts +168 -0
- package/src/tools/pre-publish.ts +232 -0
- package/src/tools/publish.ts +183 -0
- package/src/tools/registry.ts +198 -0
- package/src/tools/research.ts +304 -0
- package/src/tools/review.ts +305 -0
- package/src/tools/rewrite.ts +165 -0
- package/src/tools/status.ts +30 -0
- package/src/tools/timeline.ts +234 -0
- package/src/tools/topic-create.ts +50 -0
- package/src/types/providers.ts +69 -0
- package/src/types/timeline.test.ts +147 -0
- package/src/types/timeline.ts +83 -0
- package/src/utils/retry.test.ts +97 -0
- package/src/utils/retry.ts +85 -0
- package/templates/AGENTS.md +99 -0
- package/templates/SOUL.md +31 -0
- package/templates/TOOLS.md +76 -0
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import RssParser from "rss-parser";
|
|
2
|
+
import type { IntelItem } from "../../../storage/pipeline-store.js";
|
|
3
|
+
import type { Collector, CollectorOptions, CollectorResult } from "../collector.js";
|
|
4
|
+
import { loadSourceConfig } from "../source-config.js";
|
|
5
|
+
|
|
6
|
+
// ─── RSS Item Parser ────────────────────────────────────────────────────────
|
|
7
|
+
|
|
8
|
+
export interface RssItem {
|
|
9
|
+
title?: string;
|
|
10
|
+
link?: string;
|
|
11
|
+
contentSnippet?: string;
|
|
12
|
+
content?: string;
|
|
13
|
+
isoDate?: string;
|
|
14
|
+
categories?: string[];
|
|
15
|
+
creator?: string;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export function parseRssItems(
|
|
19
|
+
items: RssItem[],
|
|
20
|
+
domain: string,
|
|
21
|
+
tags?: string[],
|
|
22
|
+
): IntelItem[] {
|
|
23
|
+
return items
|
|
24
|
+
.filter((item) => item.title)
|
|
25
|
+
.map((item) => ({
|
|
26
|
+
title: item.title!,
|
|
27
|
+
domain,
|
|
28
|
+
source: "rss" as const,
|
|
29
|
+
sourceUrl: item.link,
|
|
30
|
+
collectedAt: item.isoDate ?? new Date().toISOString(),
|
|
31
|
+
relevance: 40,
|
|
32
|
+
tags: [...(tags ?? []), ...(item.categories ?? [])],
|
|
33
|
+
expiresAfter: 14,
|
|
34
|
+
summary: item.contentSnippet ?? item.content?.slice(0, 200) ?? "",
|
|
35
|
+
keyPoints: [],
|
|
36
|
+
topicPotential: "",
|
|
37
|
+
}));
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// ─── RSS Collector ──────────────────────────────────────────────────────────
|
|
41
|
+
|
|
42
|
+
export function createRssCollector(): Collector {
|
|
43
|
+
const parser = new RssParser();
|
|
44
|
+
|
|
45
|
+
return {
|
|
46
|
+
id: "rss",
|
|
47
|
+
async collect(opts: CollectorOptions): Promise<CollectorResult> {
|
|
48
|
+
const config = await loadSourceConfig(opts.dataDir);
|
|
49
|
+
const items: IntelItem[] = [];
|
|
50
|
+
const errors: string[] = [];
|
|
51
|
+
|
|
52
|
+
for (const feed of config.rss) {
|
|
53
|
+
try {
|
|
54
|
+
const parsed = await parser.parseURL(feed.url);
|
|
55
|
+
const feedItems = parseRssItems(
|
|
56
|
+
parsed.items as RssItem[],
|
|
57
|
+
feed.domain,
|
|
58
|
+
feed.tags,
|
|
59
|
+
);
|
|
60
|
+
items.push(...feedItems);
|
|
61
|
+
} catch (err: unknown) {
|
|
62
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
63
|
+
errors.push(`RSS feed ${feed.url} failed: ${msg}`);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
return { items, source: "rss", errors };
|
|
68
|
+
},
|
|
69
|
+
};
|
|
70
|
+
}
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { describe, it, expect, vi } from "vitest";
|
|
2
|
+
import { buildTrendQueries, createTrendCollector } from "./trends.js";
|
|
3
|
+
import type { TrendSource } from "../source-config.js";
|
|
4
|
+
|
|
5
|
+
describe("buildTrendQueries", () => {
|
|
6
|
+
it("builds correct queries for each platform", () => {
|
|
7
|
+
const sources: TrendSource[] = [
|
|
8
|
+
{ source: "hackernews", min_score: 100 },
|
|
9
|
+
{ source: "weibo_hot" },
|
|
10
|
+
{ source: "reddit", subreddits: ["ChatGPT", "LocalLLaMA"] },
|
|
11
|
+
];
|
|
12
|
+
|
|
13
|
+
const queries = buildTrendQueries(sources, ["AI"]);
|
|
14
|
+
expect(queries).toHaveLength(3);
|
|
15
|
+
expect(queries[0]).toContain("Hacker News");
|
|
16
|
+
expect(queries[0]).toContain("score>100");
|
|
17
|
+
expect(queries[1]).toContain("微博热搜");
|
|
18
|
+
expect(queries[2]).toContain("r/ChatGPT");
|
|
19
|
+
expect(queries[2]).toContain("r/LocalLLaMA");
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
it("skips disabled sources", () => {
|
|
23
|
+
const sources: TrendSource[] = [
|
|
24
|
+
{ source: "hackernews", enabled: false },
|
|
25
|
+
{ source: "weibo_hot", enabled: true },
|
|
26
|
+
{ source: "google_trends" },
|
|
27
|
+
];
|
|
28
|
+
|
|
29
|
+
const queries = buildTrendQueries(sources, ["test"]);
|
|
30
|
+
expect(queries).toHaveLength(2);
|
|
31
|
+
expect(queries.every((q) => !q.includes("Hacker News"))).toBe(true);
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
it("includes keywords in all queries", () => {
|
|
35
|
+
const sources: TrendSource[] = [
|
|
36
|
+
{ source: "zhihu_hot" },
|
|
37
|
+
{ source: "douyin_hot" },
|
|
38
|
+
];
|
|
39
|
+
|
|
40
|
+
const queries = buildTrendQueries(sources, ["LLM", "Agent"]);
|
|
41
|
+
for (const q of queries) {
|
|
42
|
+
expect(q).toContain("LLM");
|
|
43
|
+
expect(q).toContain("Agent");
|
|
44
|
+
}
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
it("handles arxiv with categories", () => {
|
|
48
|
+
const sources: TrendSource[] = [
|
|
49
|
+
{ source: "arxiv", categories: ["cs.AI", "cs.CL"] },
|
|
50
|
+
];
|
|
51
|
+
|
|
52
|
+
const queries = buildTrendQueries(sources, ["transformer"]);
|
|
53
|
+
expect(queries[0]).toContain("arXiv");
|
|
54
|
+
expect(queries[0]).toContain("cs.AI");
|
|
55
|
+
expect(queries[0]).toContain("cs.CL");
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
it("handles twitter_trending with region", () => {
|
|
59
|
+
const sources: TrendSource[] = [
|
|
60
|
+
{ source: "twitter_trending", region: "US" },
|
|
61
|
+
];
|
|
62
|
+
|
|
63
|
+
const queries = buildTrendQueries(sources, ["tech"]);
|
|
64
|
+
expect(queries[0]).toContain("Twitter");
|
|
65
|
+
expect(queries[0]).toContain("US");
|
|
66
|
+
});
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
describe("createTrendCollector", () => {
|
|
70
|
+
it("uses trend source with 7-day expiry", async () => {
|
|
71
|
+
const mockSearch = vi.fn().mockResolvedValue([
|
|
72
|
+
{ title: "Trending Topic", url: "https://example.com/t", snippet: "Hot" },
|
|
73
|
+
]);
|
|
74
|
+
const collector = createTrendCollector(mockSearch);
|
|
75
|
+
|
|
76
|
+
// This will try to loadSourceConfig which needs real fs — we test the
|
|
77
|
+
// query building separately, so just verify the collector has correct id
|
|
78
|
+
expect(collector.id).toBe("trend");
|
|
79
|
+
});
|
|
80
|
+
});
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import type { IntelItem } from "../../../storage/pipeline-store.js";
|
|
2
|
+
import type { Collector, CollectorOptions, CollectorResult } from "../collector.js";
|
|
3
|
+
import type { TrendSource } from "../source-config.js";
|
|
4
|
+
import { loadSourceConfig } from "../source-config.js";
|
|
5
|
+
import type { SearchFn, SearchResult } from "./web-search.js";
|
|
6
|
+
|
|
7
|
+
// ─── Platform Query Templates ───────────────────────────────────────────────
|
|
8
|
+
|
|
9
|
+
const PLATFORM_QUERY_MAP: Record<string, (src: TrendSource, keywords: string[]) => string> = {
|
|
10
|
+
hackernews: (src, keywords) =>
|
|
11
|
+
`Hacker News top stories today ${keywords.join(" ")}${src.min_score ? ` score>${src.min_score}` : ""}`,
|
|
12
|
+
producthunt: (_src, keywords) =>
|
|
13
|
+
`Product Hunt trending today ${keywords.join(" ")}`,
|
|
14
|
+
github_trending: (_src, keywords) =>
|
|
15
|
+
`GitHub trending repositories today ${keywords.join(" ")}`,
|
|
16
|
+
weibo_hot: (_src, keywords) =>
|
|
17
|
+
`微博热搜 今日 ${keywords.join(" ")}`,
|
|
18
|
+
zhihu_hot: (_src, keywords) =>
|
|
19
|
+
`知乎热榜 今日 ${keywords.join(" ")}`,
|
|
20
|
+
douyin_hot: (_src, keywords) =>
|
|
21
|
+
`抖音热榜 今日 ${keywords.join(" ")}`,
|
|
22
|
+
bilibili_hot: (_src, keywords) =>
|
|
23
|
+
`B站热搜 今日 ${keywords.join(" ")}`,
|
|
24
|
+
toutiao_hot: (_src, keywords) =>
|
|
25
|
+
`今日头条热榜 ${keywords.join(" ")}`,
|
|
26
|
+
twitter_trending: (src, keywords) =>
|
|
27
|
+
`Twitter trending${src.region ? ` ${src.region}` : ""} ${keywords.join(" ")}`,
|
|
28
|
+
reddit: (src, keywords) => {
|
|
29
|
+
const subs = src.subreddits?.length
|
|
30
|
+
? `r/${src.subreddits.join(" r/")}`
|
|
31
|
+
: "";
|
|
32
|
+
return `Reddit trending ${subs} ${keywords.join(" ")}`.trim();
|
|
33
|
+
},
|
|
34
|
+
google_trends: (_src, keywords) =>
|
|
35
|
+
`Google Trends today ${keywords.join(" ")}`,
|
|
36
|
+
arxiv: (src, keywords) => {
|
|
37
|
+
const cats = src.categories?.length
|
|
38
|
+
? src.categories.join(" ")
|
|
39
|
+
: "";
|
|
40
|
+
return `arXiv latest papers ${cats} ${keywords.join(" ")}`.trim();
|
|
41
|
+
},
|
|
42
|
+
youtube_trending: (_src, keywords) =>
|
|
43
|
+
`YouTube trending videos today ${keywords.join(" ")}`,
|
|
44
|
+
};
|
|
45
|
+
|
|
46
|
+
// ─── Query Builder ──────────────────────────────────────────────────────────
|
|
47
|
+
|
|
48
|
+
export function buildTrendQueries(
|
|
49
|
+
sources: TrendSource[],
|
|
50
|
+
keywords: string[],
|
|
51
|
+
): string[] {
|
|
52
|
+
return sources
|
|
53
|
+
.filter((src) => src.enabled !== false)
|
|
54
|
+
.map((src) => {
|
|
55
|
+
const buildQuery = PLATFORM_QUERY_MAP[src.source];
|
|
56
|
+
if (!buildQuery) return `${src.source} trending ${keywords.join(" ")}`;
|
|
57
|
+
return buildQuery(src, keywords);
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// ─── Trend Collector ────────────────────────────────────────────────────────
|
|
62
|
+
|
|
63
|
+
function trendResultToIntel(result: SearchResult, source: string): IntelItem {
|
|
64
|
+
return {
|
|
65
|
+
title: result.title,
|
|
66
|
+
domain: source,
|
|
67
|
+
source: "trend",
|
|
68
|
+
sourceUrl: result.url,
|
|
69
|
+
collectedAt: new Date().toISOString(),
|
|
70
|
+
relevance: 50,
|
|
71
|
+
tags: ["trend", source],
|
|
72
|
+
expiresAfter: 7,
|
|
73
|
+
summary: result.snippet,
|
|
74
|
+
keyPoints: [],
|
|
75
|
+
topicPotential: "",
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
export function createTrendCollector(searchFn: SearchFn): Collector {
|
|
80
|
+
return {
|
|
81
|
+
id: "trend",
|
|
82
|
+
async collect(opts: CollectorOptions): Promise<CollectorResult> {
|
|
83
|
+
const config = await loadSourceConfig(opts.dataDir);
|
|
84
|
+
const items: IntelItem[] = [];
|
|
85
|
+
const errors: string[] = [];
|
|
86
|
+
const seen = new Set<string>();
|
|
87
|
+
|
|
88
|
+
const queries = buildTrendQueries(config.trends, opts.keywords);
|
|
89
|
+
|
|
90
|
+
for (const query of queries) {
|
|
91
|
+
try {
|
|
92
|
+
const results = await searchFn(query);
|
|
93
|
+
for (const result of results) {
|
|
94
|
+
if (seen.has(result.url)) continue;
|
|
95
|
+
seen.add(result.url);
|
|
96
|
+
items.push(trendResultToIntel(result, "trend"));
|
|
97
|
+
}
|
|
98
|
+
} catch (err: unknown) {
|
|
99
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
100
|
+
errors.push(`Trend query "${query}" failed: ${msg}`);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
return { items, source: "trend", errors };
|
|
105
|
+
},
|
|
106
|
+
};
|
|
107
|
+
}
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import { describe, it, expect, vi } from "vitest";
|
|
2
|
+
import { buildMultiDimensionQueries, createWebSearchCollector } from "./web-search.js";
|
|
3
|
+
import type { SearchResult } from "./web-search.js";
|
|
4
|
+
|
|
5
|
+
describe("buildMultiDimensionQueries", () => {
|
|
6
|
+
it("generates 4+ queries with expected keywords", () => {
|
|
7
|
+
const queries = buildMultiDimensionQueries("AI Agent", "科技", ["小红书", "抖音"]);
|
|
8
|
+
expect(queries.length).toBeGreaterThanOrEqual(4);
|
|
9
|
+
// Every query should contain the keyword
|
|
10
|
+
for (const q of queries) {
|
|
11
|
+
expect(q).toContain("AI Agent");
|
|
12
|
+
}
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
it("includes dimension-specific terms", () => {
|
|
16
|
+
const queries = buildMultiDimensionQueries("ChatGPT", "AI", []);
|
|
17
|
+
const joined = queries.join(" ");
|
|
18
|
+
expect(joined).toContain("行业动态");
|
|
19
|
+
expect(joined).toContain("争议");
|
|
20
|
+
expect(joined).toContain("数据报告");
|
|
21
|
+
expect(joined).toContain("实操教程");
|
|
22
|
+
expect(joined).toContain("最新趋势");
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
it("adds platform-specific queries", () => {
|
|
26
|
+
const queries = buildMultiDimensionQueries("LLM", "科技", ["B站"]);
|
|
27
|
+
expect(queries.some((q) => q.includes("B站"))).toBe(true);
|
|
28
|
+
});
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
describe("createWebSearchCollector", () => {
|
|
32
|
+
it("collects items from search results", async () => {
|
|
33
|
+
const mockResults: SearchResult[] = [
|
|
34
|
+
{ title: "Test Article", url: "https://example.com/1", snippet: "A test" },
|
|
35
|
+
{ title: "Another Article", url: "https://example.com/2", snippet: "Another" },
|
|
36
|
+
];
|
|
37
|
+
const searchFn = vi.fn().mockResolvedValue(mockResults);
|
|
38
|
+
const collector = createWebSearchCollector(searchFn);
|
|
39
|
+
|
|
40
|
+
const result = await collector.collect({
|
|
41
|
+
keywords: ["test"],
|
|
42
|
+
industry: "科技",
|
|
43
|
+
platforms: [],
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
expect(result.source).toBe("web_search");
|
|
47
|
+
expect(result.items.length).toBeGreaterThan(0);
|
|
48
|
+
expect(result.items[0].source).toBe("web_search");
|
|
49
|
+
expect(result.errors).toEqual([]);
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
it("deduplicates by URL", async () => {
|
|
53
|
+
const sameResult: SearchResult[] = [
|
|
54
|
+
{ title: "Dup", url: "https://example.com/same", snippet: "dup" },
|
|
55
|
+
];
|
|
56
|
+
const searchFn = vi.fn().mockResolvedValue(sameResult);
|
|
57
|
+
const collector = createWebSearchCollector(searchFn);
|
|
58
|
+
|
|
59
|
+
const result = await collector.collect({
|
|
60
|
+
keywords: ["test"],
|
|
61
|
+
industry: "科技",
|
|
62
|
+
platforms: [],
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
// All queries return the same URL, should be deduped to 1
|
|
66
|
+
const urls = result.items.map((i) => i.sourceUrl);
|
|
67
|
+
const unique = new Set(urls);
|
|
68
|
+
expect(unique.size).toBe(1);
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
it("captures errors without crashing", async () => {
|
|
72
|
+
const searchFn = vi.fn().mockRejectedValue(new Error("Network error"));
|
|
73
|
+
const collector = createWebSearchCollector(searchFn);
|
|
74
|
+
|
|
75
|
+
const result = await collector.collect({
|
|
76
|
+
keywords: ["fail"],
|
|
77
|
+
industry: "科技",
|
|
78
|
+
platforms: [],
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
expect(result.items).toEqual([]);
|
|
82
|
+
expect(result.errors.length).toBeGreaterThan(0);
|
|
83
|
+
expect(result.errors[0]).toContain("Network error");
|
|
84
|
+
});
|
|
85
|
+
});
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import type { IntelItem } from "../../../storage/pipeline-store.js";
|
|
2
|
+
import type { Collector, CollectorOptions, CollectorResult } from "../collector.js";
|
|
3
|
+
|
|
4
|
+
export interface SearchResult {
|
|
5
|
+
title: string;
|
|
6
|
+
url: string;
|
|
7
|
+
snippet: string;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
export type SearchFn = (query: string) => Promise<SearchResult[]>;
|
|
11
|
+
|
|
12
|
+
// ─── Query Builder ──────────────────────────────────────────────────────────
|
|
13
|
+
|
|
14
|
+
export function buildMultiDimensionQueries(
|
|
15
|
+
keyword: string,
|
|
16
|
+
industry: string,
|
|
17
|
+
platforms: string[],
|
|
18
|
+
): string[] {
|
|
19
|
+
const queries: string[] = [
|
|
20
|
+
`${keyword} ${industry} 行业动态 最新`,
|
|
21
|
+
`${keyword} 争议 观点 讨论`,
|
|
22
|
+
`${keyword} 数据报告 研究 ${new Date().getFullYear()}`,
|
|
23
|
+
`${keyword} 实操教程 方法论`,
|
|
24
|
+
`${keyword} 最新趋势 ${industry}`,
|
|
25
|
+
];
|
|
26
|
+
|
|
27
|
+
// Platform-specific queries
|
|
28
|
+
for (const platform of platforms) {
|
|
29
|
+
queries.push(`${keyword} ${platform} 热门内容`);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
return queries;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// ─── Web Search Collector ───────────────────────────────────────────────────
|
|
36
|
+
|
|
37
|
+
function searchResultToIntel(result: SearchResult, keyword: string): IntelItem {
|
|
38
|
+
return {
|
|
39
|
+
title: result.title,
|
|
40
|
+
domain: keyword,
|
|
41
|
+
source: "web_search",
|
|
42
|
+
sourceUrl: result.url,
|
|
43
|
+
collectedAt: new Date().toISOString(),
|
|
44
|
+
relevance: 50,
|
|
45
|
+
tags: [keyword],
|
|
46
|
+
expiresAfter: 14,
|
|
47
|
+
summary: result.snippet,
|
|
48
|
+
keyPoints: [],
|
|
49
|
+
topicPotential: "",
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export function createWebSearchCollector(searchFn: SearchFn): Collector {
|
|
54
|
+
return {
|
|
55
|
+
id: "web_search",
|
|
56
|
+
async collect(opts: CollectorOptions): Promise<CollectorResult> {
|
|
57
|
+
const items: IntelItem[] = [];
|
|
58
|
+
const errors: string[] = [];
|
|
59
|
+
const seen = new Set<string>();
|
|
60
|
+
|
|
61
|
+
for (const keyword of opts.keywords) {
|
|
62
|
+
const queries = buildMultiDimensionQueries(keyword, opts.industry, opts.platforms);
|
|
63
|
+
for (const query of queries) {
|
|
64
|
+
try {
|
|
65
|
+
const results = await searchFn(query);
|
|
66
|
+
for (const result of results) {
|
|
67
|
+
if (seen.has(result.url)) continue;
|
|
68
|
+
seen.add(result.url);
|
|
69
|
+
items.push(searchResultToIntel(result, keyword));
|
|
70
|
+
}
|
|
71
|
+
} catch (err: unknown) {
|
|
72
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
73
|
+
errors.push(`Query "${query}" failed: ${msg}`);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
return { items, source: "web_search", errors };
|
|
79
|
+
},
|
|
80
|
+
};
|
|
81
|
+
}
|
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
|
2
|
+
import fs from "node:fs/promises";
|
|
3
|
+
import os from "node:os";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
import {
|
|
6
|
+
initPipeline,
|
|
7
|
+
saveIntel,
|
|
8
|
+
listIntel,
|
|
9
|
+
saveTopic,
|
|
10
|
+
listTopics,
|
|
11
|
+
startProject,
|
|
12
|
+
addDraftVersion,
|
|
13
|
+
advanceProject,
|
|
14
|
+
getProjectMeta,
|
|
15
|
+
trashProject,
|
|
16
|
+
restoreProject,
|
|
17
|
+
listProjects,
|
|
18
|
+
slugify,
|
|
19
|
+
stagePath,
|
|
20
|
+
type IntelItem,
|
|
21
|
+
type TopicCandidate,
|
|
22
|
+
} from "../../storage/pipeline-store.js";
|
|
23
|
+
|
|
24
|
+
let testDir: string;
|
|
25
|
+
|
|
26
|
+
beforeEach(async () => {
|
|
27
|
+
testDir = await fs.mkdtemp(
|
|
28
|
+
path.join(os.tmpdir(), "autocrew-integration-test-"),
|
|
29
|
+
);
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
afterEach(async () => {
|
|
33
|
+
await fs.rm(testDir, { recursive: true, force: true });
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
// ─── Fixtures ───────────────────────────────────────────────────────────────
|
|
37
|
+
|
|
38
|
+
function makeIntel(overrides: Partial<IntelItem> = {}): IntelItem {
|
|
39
|
+
return {
|
|
40
|
+
title: "AI内容创作新趋势",
|
|
41
|
+
domain: "ai-content",
|
|
42
|
+
source: "web_search",
|
|
43
|
+
collectedAt: new Date().toISOString(),
|
|
44
|
+
relevance: 85,
|
|
45
|
+
tags: ["AI", "内容创作"],
|
|
46
|
+
expiresAfter: 7,
|
|
47
|
+
summary: "AI正在改变内容创作流程",
|
|
48
|
+
keyPoints: ["效率提升3倍", "质量可控"],
|
|
49
|
+
topicPotential: "可做系列教程选题",
|
|
50
|
+
...overrides,
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
function makeTopic(overrides: Partial<TopicCandidate> = {}): TopicCandidate {
|
|
55
|
+
return {
|
|
56
|
+
title: "AI写作工具评测",
|
|
57
|
+
domain: "ai-content",
|
|
58
|
+
score: { heat: 80, differentiation: 70, audienceFit: 90, overall: 80 },
|
|
59
|
+
formats: ["video", "article"],
|
|
60
|
+
suggestedPlatforms: ["小红书", "B站"],
|
|
61
|
+
createdAt: new Date().toISOString(),
|
|
62
|
+
intelRefs: ["2024-01-15-ai-content-trend.md"],
|
|
63
|
+
angles: ["横向对比", "实操演示"],
|
|
64
|
+
audienceResonance: "目标用户对AI工具有强烈兴趣",
|
|
65
|
+
references: ["https://example.com/ai-tools"],
|
|
66
|
+
...overrides,
|
|
67
|
+
};
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// ─── Full Pipeline Flow ─────────────────────────────────────────────────────
|
|
71
|
+
|
|
72
|
+
describe("Pipeline Integration — full flow", () => {
|
|
73
|
+
it("walks intel → topic → project → production → published", async () => {
|
|
74
|
+
// 1. Init pipeline
|
|
75
|
+
await initPipeline(testDir);
|
|
76
|
+
for (const stage of ["intel", "topics", "drafting", "production", "published", "trash"] as const) {
|
|
77
|
+
const stat = await fs.stat(stagePath(stage, testDir));
|
|
78
|
+
expect(stat.isDirectory()).toBe(true);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// 2. Save intel → list confirms it's there
|
|
82
|
+
const intel = makeIntel();
|
|
83
|
+
await saveIntel(intel, testDir);
|
|
84
|
+
const intelItems = await listIntel(undefined, testDir);
|
|
85
|
+
expect(intelItems.length).toBe(1);
|
|
86
|
+
expect(intelItems[0].title).toBe(intel.title);
|
|
87
|
+
|
|
88
|
+
// 3. Save topic → list confirms sorted by score
|
|
89
|
+
const topicHigh = makeTopic({
|
|
90
|
+
title: "高分选题",
|
|
91
|
+
score: { heat: 95, differentiation: 90, audienceFit: 95, overall: 95 },
|
|
92
|
+
});
|
|
93
|
+
const topicLow = makeTopic({
|
|
94
|
+
title: "低分选题",
|
|
95
|
+
score: { heat: 40, differentiation: 30, audienceFit: 50, overall: 40 },
|
|
96
|
+
});
|
|
97
|
+
await saveTopic(topicLow, testDir);
|
|
98
|
+
await saveTopic(topicHigh, testDir);
|
|
99
|
+
|
|
100
|
+
const topics = await listTopics(undefined, testDir);
|
|
101
|
+
expect(topics.length).toBe(2);
|
|
102
|
+
expect(topics[0].title).toBe("高分选题");
|
|
103
|
+
expect(topics[1].title).toBe("低分选题");
|
|
104
|
+
|
|
105
|
+
// 4. Start project from high-score topic → topic consumed, project dir created
|
|
106
|
+
const projectDir = await startProject("高分选题", testDir);
|
|
107
|
+
expect(projectDir).toContain("drafting");
|
|
108
|
+
|
|
109
|
+
const remainingTopics = await listTopics(undefined, testDir);
|
|
110
|
+
expect(remainingTopics.length).toBe(1);
|
|
111
|
+
expect(remainingTopics[0].title).toBe("低分选题");
|
|
112
|
+
|
|
113
|
+
const files = await fs.readdir(projectDir);
|
|
114
|
+
expect(files).toContain("meta.yaml");
|
|
115
|
+
expect(files).toContain("draft-v1.md");
|
|
116
|
+
expect(files).toContain("draft.md");
|
|
117
|
+
|
|
118
|
+
const projectName = slugify("高分选题");
|
|
119
|
+
|
|
120
|
+
// 5. Add draft version → meta.yaml updated with v2
|
|
121
|
+
await addDraftVersion(
|
|
122
|
+
projectName,
|
|
123
|
+
"# 高分选题\n\n第二版内容",
|
|
124
|
+
"improved draft",
|
|
125
|
+
testDir,
|
|
126
|
+
);
|
|
127
|
+
|
|
128
|
+
let meta = await getProjectMeta(projectName, testDir);
|
|
129
|
+
expect(meta).not.toBeNull();
|
|
130
|
+
expect(meta!.versions.length).toBe(2);
|
|
131
|
+
expect(meta!.current).toBe("draft-v2.md");
|
|
132
|
+
expect(meta!.versions[1].note).toBe("improved draft");
|
|
133
|
+
|
|
134
|
+
// 6. Advance: drafting → production → published
|
|
135
|
+
await advanceProject(projectName, testDir);
|
|
136
|
+
meta = await getProjectMeta(projectName, testDir);
|
|
137
|
+
expect(meta!.history.at(-1)!.stage).toBe("production");
|
|
138
|
+
|
|
139
|
+
await advanceProject(projectName, testDir);
|
|
140
|
+
meta = await getProjectMeta(projectName, testDir);
|
|
141
|
+
expect(meta!.history.at(-1)!.stage).toBe("published");
|
|
142
|
+
|
|
143
|
+
// 7. Verify history has all stage entries
|
|
144
|
+
expect(meta!.history.length).toBe(3);
|
|
145
|
+
expect(meta!.history[0].stage).toBe("drafting");
|
|
146
|
+
expect(meta!.history[1].stage).toBe("production");
|
|
147
|
+
expect(meta!.history[2].stage).toBe("published");
|
|
148
|
+
});
|
|
149
|
+
});
|
|
150
|
+
|
|
151
|
+
// ─── Trash & Restore ────────────────────────────────────────────────────────
|
|
152
|
+
|
|
153
|
+
describe("Pipeline Integration — trash and restore", () => {
|
|
154
|
+
it("trashes and restores a project preserving state", async () => {
|
|
155
|
+
await initPipeline(testDir);
|
|
156
|
+
|
|
157
|
+
// Set up a project in drafting
|
|
158
|
+
await saveTopic(makeTopic({ title: "回收还原测试" }), testDir);
|
|
159
|
+
await startProject("回收还原测试", testDir);
|
|
160
|
+
const projectName = slugify("回收还原测试");
|
|
161
|
+
|
|
162
|
+
// Add a draft version so there's real state
|
|
163
|
+
await addDraftVersion(
|
|
164
|
+
projectName,
|
|
165
|
+
"# 重要内容\n\n不能丢失",
|
|
166
|
+
"v2",
|
|
167
|
+
testDir,
|
|
168
|
+
);
|
|
169
|
+
|
|
170
|
+
// Trash it
|
|
171
|
+
await trashProject(projectName, testDir);
|
|
172
|
+
|
|
173
|
+
const trashList = await listProjects("trash", testDir);
|
|
174
|
+
expect(trashList).toContain(projectName);
|
|
175
|
+
|
|
176
|
+
const draftingList = await listProjects("drafting", testDir);
|
|
177
|
+
expect(draftingList).not.toContain(projectName);
|
|
178
|
+
|
|
179
|
+
// Restore it
|
|
180
|
+
const restoredDir = await restoreProject(projectName, testDir);
|
|
181
|
+
expect(restoredDir).toContain("drafting");
|
|
182
|
+
|
|
183
|
+
// Verify state is preserved
|
|
184
|
+
const meta = await getProjectMeta(projectName, testDir);
|
|
185
|
+
expect(meta).not.toBeNull();
|
|
186
|
+
expect(meta!.versions.length).toBe(2);
|
|
187
|
+
expect(meta!.current).toBe("draft-v2.md");
|
|
188
|
+
expect(meta!.title).toBe("回收还原测试");
|
|
189
|
+
|
|
190
|
+
// History should show: drafting → trash → drafting
|
|
191
|
+
expect(meta!.history.length).toBe(3);
|
|
192
|
+
expect(meta!.history[0].stage).toBe("drafting");
|
|
193
|
+
expect(meta!.history[1].stage).toBe("trash");
|
|
194
|
+
expect(meta!.history[2].stage).toBe("drafting");
|
|
195
|
+
|
|
196
|
+
// Content should be intact
|
|
197
|
+
const draftContent = await fs.readFile(
|
|
198
|
+
path.join(restoredDir, "draft.md"),
|
|
199
|
+
"utf-8",
|
|
200
|
+
);
|
|
201
|
+
expect(draftContent).toBe("# 重要内容\n\n不能丢失");
|
|
202
|
+
});
|
|
203
|
+
});
|