@zzznpm/pia 0.1.1 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +12 -0
- package/package.json +2 -1
- package/demo.js +0 -156
package/README.md
CHANGED
|
@@ -7,6 +7,18 @@ A small tool that collects recent headlines from Hacker News, Reddit, and TechCr
|
|
|
7
7
|
- Run the demo script to fetch last week’s results and generate output:
|
|
8
8
|
- `node demo.js`
|
|
9
9
|
|
|
10
|
+
## Import
|
|
11
|
+
|
|
12
|
+
- Import the key fetchers from the package entry point:
|
|
13
|
+
|
|
14
|
+
```/dev/null/README-example.mjs#L1-5
|
|
15
|
+
import {
|
|
16
|
+
fetchHackerNewsByKeyword,
|
|
17
|
+
fetchRedditByKeyword,
|
|
18
|
+
fetchTechCrunchByKeyword,
|
|
19
|
+
} from '@zzznpm/pia'
|
|
20
|
+
```
|
|
21
|
+
|
|
10
22
|
## Output
|
|
11
23
|
|
|
12
24
|
- A summary JSON is returned.
|
package/package.json
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@zzznpm/pia",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.2",
|
|
4
4
|
"public": true,
|
|
5
5
|
"type": "module",
|
|
6
6
|
"types": "src/index.d.ts",
|
|
7
|
+
"main": "./src/index.js",
|
|
7
8
|
"description": "Lightweight LLM/AI agent trend analyzer.",
|
|
8
9
|
"scripts": {
|
|
9
10
|
"start": "node src/index.js",
|
package/demo.js
DELETED
|
@@ -1,156 +0,0 @@
|
|
|
1
|
-
import fs from 'node:fs/promises'
|
|
2
|
-
import path from 'node:path'
|
|
3
|
-
import { endOfWeek, startOfWeek, subWeeks } from 'date-fns'
|
|
4
|
-
import { fetchByKeyword as fetchHackerNewsByKeyword } from './fetcher/hackernews.js'
|
|
5
|
-
import { fetchByKeyword as fetchRedditByKeyword } from './fetcher/reddit.js'
|
|
6
|
-
import { fetchByKeyword as fetchTechCrunchByKeyword } from './fetcher/techcrunch.js'
|
|
7
|
-
|
|
8
|
-
const GLOBAL_KEYWORDS = ['LLM', 'AI agent']
|
|
9
|
-
|
|
10
|
-
const toSlug = (value) =>
|
|
11
|
-
value
|
|
12
|
-
.replace(/[^a-z0-9]+/gi, '-')
|
|
13
|
-
.replace(/^-|-$/g, '')
|
|
14
|
-
.toLowerCase()
|
|
15
|
-
|
|
16
|
-
const formatRunDatePath = (date = new Date()) => {
|
|
17
|
-
const monthNames = [
|
|
18
|
-
'Jan',
|
|
19
|
-
'Feb',
|
|
20
|
-
'Mar',
|
|
21
|
-
'Apr',
|
|
22
|
-
'May',
|
|
23
|
-
'Jun',
|
|
24
|
-
'Jul',
|
|
25
|
-
'Aug',
|
|
26
|
-
'Sep',
|
|
27
|
-
'Oct',
|
|
28
|
-
'Nov',
|
|
29
|
-
'Dec',
|
|
30
|
-
]
|
|
31
|
-
const month = monthNames[date.getMonth()]
|
|
32
|
-
const day = String(date.getDate())
|
|
33
|
-
const year = date.getFullYear()
|
|
34
|
-
return `${month}${day}_${year}`
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
const getLastWeekRange = (now = new Date()) => {
|
|
38
|
-
const lastWeekPoint = subWeeks(now, 1)
|
|
39
|
-
|
|
40
|
-
return {
|
|
41
|
-
start: startOfWeek(lastWeekPoint, { weekStartsOn: 1 }),
|
|
42
|
-
end: endOfWeek(lastWeekPoint, { weekStartsOn: 1 }),
|
|
43
|
-
}
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
const main = () => {
|
|
47
|
-
const { start, end } = getLastWeekRange()
|
|
48
|
-
const range = {
|
|
49
|
-
start: start.toISOString(),
|
|
50
|
-
end: end.toISOString(),
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
const sources = [
|
|
54
|
-
{ name: 'HackerNews', fetcher: fetchHackerNewsByKeyword },
|
|
55
|
-
{ name: 'Reddit', fetcher: fetchRedditByKeyword },
|
|
56
|
-
{ name: 'TechCrunch', fetcher: fetchTechCrunchByKeyword },
|
|
57
|
-
]
|
|
58
|
-
|
|
59
|
-
const jobs = sources.flatMap((source) =>
|
|
60
|
-
GLOBAL_KEYWORDS.map((keyword) => ({
|
|
61
|
-
name: source.name,
|
|
62
|
-
keyword,
|
|
63
|
-
promise: source.fetcher(keyword, start, end).then((titles) => [...new Set(titles)]),
|
|
64
|
-
}))
|
|
65
|
-
)
|
|
66
|
-
|
|
67
|
-
return Promise.allSettled(jobs.map((job) => job.promise))
|
|
68
|
-
.then((results) => {
|
|
69
|
-
const perKeywordResults = results.map((result, index) => {
|
|
70
|
-
const { name, keyword } = jobs[index]
|
|
71
|
-
if (result.status === 'fulfilled') {
|
|
72
|
-
return {
|
|
73
|
-
name,
|
|
74
|
-
keyword,
|
|
75
|
-
titles: result.value,
|
|
76
|
-
error: null,
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
return {
|
|
80
|
-
name,
|
|
81
|
-
keyword,
|
|
82
|
-
titles: [],
|
|
83
|
-
error: result.reason?.message || String(result.reason),
|
|
84
|
-
}
|
|
85
|
-
})
|
|
86
|
-
|
|
87
|
-
return {
|
|
88
|
-
range,
|
|
89
|
-
perKeywordResults,
|
|
90
|
-
}
|
|
91
|
-
})
|
|
92
|
-
.then(({ range: reportRange, perKeywordResults }) => {
|
|
93
|
-
const sourceSummary = new Map()
|
|
94
|
-
for (const result of perKeywordResults) {
|
|
95
|
-
if (!sourceSummary.has(result.name)) {
|
|
96
|
-
sourceSummary.set(result.name, { name: result.name, count: 0, errors: [] })
|
|
97
|
-
}
|
|
98
|
-
const entry = sourceSummary.get(result.name)
|
|
99
|
-
entry.count += result.titles.length
|
|
100
|
-
if (result.error) {
|
|
101
|
-
entry.errors.push(result.error)
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
const report = {
|
|
106
|
-
range: reportRange,
|
|
107
|
-
sources: Array.from(sourceSummary.values()).map((entry) => ({
|
|
108
|
-
name: entry.name,
|
|
109
|
-
count: entry.count,
|
|
110
|
-
error: entry.errors.length ? entry.errors.join('; ') : null,
|
|
111
|
-
})),
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
const outputDir = path.resolve('articles', formatRunDatePath())
|
|
115
|
-
const keywordDirMap = new Map([
|
|
116
|
-
['LLM', 'LLM'],
|
|
117
|
-
['AI agent', 'agent'],
|
|
118
|
-
])
|
|
119
|
-
return fs
|
|
120
|
-
.mkdir(outputDir, { recursive: true })
|
|
121
|
-
.then(() =>
|
|
122
|
-
Promise.all(
|
|
123
|
-
perKeywordResults.map((result) => {
|
|
124
|
-
const safeSource = toSlug(result.name) || 'unknown'
|
|
125
|
-
const keywordDir =
|
|
126
|
-
keywordDirMap.get(result.keyword) ||
|
|
127
|
-
toSlug(result.keyword) ||
|
|
128
|
-
'keyword'
|
|
129
|
-
const fileDir = path.join(outputDir, keywordDir)
|
|
130
|
-
const filePath = path.join(fileDir, `${safeSource}.json`)
|
|
131
|
-
const payload = {
|
|
132
|
-
range: reportRange,
|
|
133
|
-
source: result.name,
|
|
134
|
-
keyword: result.keyword,
|
|
135
|
-
count: result.titles.length,
|
|
136
|
-
titles: result.titles,
|
|
137
|
-
error: result.error,
|
|
138
|
-
}
|
|
139
|
-
return fs
|
|
140
|
-
.mkdir(fileDir, { recursive: true })
|
|
141
|
-
.then(() =>
|
|
142
|
-
fs.writeFile(filePath, JSON.stringify(payload, null, 2), 'utf8')
|
|
143
|
-
)
|
|
144
|
-
})
|
|
145
|
-
)
|
|
146
|
-
)
|
|
147
|
-
.then(() => {
|
|
148
|
-
console.log(JSON.stringify(report, null, 2))
|
|
149
|
-
})
|
|
150
|
-
})
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
main().catch((err) => {
|
|
154
|
-
console.error('Fatal:', err)
|
|
155
|
-
process.exit(1)
|
|
156
|
-
})
|