@docyrus/docyrus 0.0.56 → 0.0.57

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,35 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- import puppeteer from "puppeteer-core";
4
-
5
- const b = await Promise.race([
6
- puppeteer.connect({
7
- browserURL: "http://localhost:9222",
8
- defaultViewport: null,
9
- }),
10
- new Promise((_, reject) => setTimeout(() => reject(new Error("timeout")), 5000)),
11
- ]).catch((e) => {
12
- console.error("✗ Could not connect to browser:", e.message);
13
- console.error(" Run: browser-start.js");
14
- process.exit(1);
15
- });
16
-
17
- const p = (await b.pages()).at(-1);
18
-
19
- if (!p) {
20
- console.error("✗ No active tab found");
21
- process.exit(1);
22
- }
23
-
24
- const cookies = await p.cookies();
25
-
26
- for (const cookie of cookies) {
27
- console.log(`${cookie.name}: ${cookie.value}`);
28
- console.log(` domain: ${cookie.domain}`);
29
- console.log(` path: ${cookie.path}`);
30
- console.log(` httpOnly: ${cookie.httpOnly}`);
31
- console.log(` secure: ${cookie.secure}`);
32
- console.log("");
33
- }
34
-
35
- await b.disconnect();
@@ -1,53 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- import puppeteer from "puppeteer-core";
4
-
5
- const code = process.argv.slice(2).join(" ");
6
- if (!code) {
7
- console.log("Usage: browser-eval.js 'code'");
8
- console.log("\nExamples:");
9
- console.log(' browser-eval.js "document.title"');
10
- console.log(' browser-eval.js "document.querySelectorAll(\'a\').length"');
11
- process.exit(1);
12
- }
13
-
14
- const b = await Promise.race([
15
- puppeteer.connect({
16
- browserURL: "http://localhost:9222",
17
- defaultViewport: null,
18
- }),
19
- new Promise((_, reject) => setTimeout(() => reject(new Error("timeout")), 5000)),
20
- ]).catch((e) => {
21
- console.error("✗ Could not connect to browser:", e.message);
22
- console.error(" Run: browser-start.js");
23
- process.exit(1);
24
- });
25
-
26
- const p = (await b.pages()).at(-1);
27
-
28
- if (!p) {
29
- console.error("✗ No active tab found");
30
- process.exit(1);
31
- }
32
-
33
- const result = await p.evaluate((c) => {
34
- const AsyncFunction = (async() => {}).constructor;
35
- return new AsyncFunction(`return (${c})`)();
36
- }, code);
37
-
38
- if (Array.isArray(result)) {
39
- for (let i = 0; i < result.length; i++) {
40
- if (i > 0) {console.log("");}
41
- for (const [key, value] of Object.entries(result[i])) {
42
- console.log(`${key}: ${value}`);
43
- }
44
- }
45
- } else if (typeof result === "object" && result !== null) {
46
- for (const [key, value] of Object.entries(result)) {
47
- console.log(`${key}: ${value}`);
48
- }
49
- } else {
50
- console.log(result);
51
- }
52
-
53
- await b.disconnect();
@@ -1,99 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- /**
4
- * Hacker News Scraper
5
- *
6
- * Fetches and parses submissions from Hacker News front page.
7
- * Usage: node browser-hn-scraper.js [--limit <number>]
8
- */
9
-
10
- import { JSDOM } from "jsdom";
11
-
12
- /**
13
- * Scrapes Hacker News front page
14
- * @param {number} limit - Maximum number of submissions to return (default: 30)
15
- * @returns {Promise<Array>} Array of submission objects
16
- */
17
- async function scrapeHackerNews(limit = 30) {
18
- const url = 'https://news.ycombinator.com';
19
-
20
- try {
21
- const response = await fetch(url);
22
- if (!response.ok) {
23
- throw new Error(`HTTP error! status: ${response.status}`);
24
- }
25
-
26
- const html = await response.text();
27
- const dom = new JSDOM(html, { url });
28
- const document = dom.window.document;
29
- const submissions = [];
30
-
31
- for (const element of document.querySelectorAll(".athing")) {
32
- if (submissions.length >= limit) {
33
- break;
34
- }
35
-
36
- const id = element.getAttribute("id");
37
- const titleLink = element.querySelector(".titleline > a");
38
- const title = titleLink?.textContent?.trim() || "";
39
- const submissionUrl = titleLink?.getAttribute("href") || "";
40
-
41
- const metadataRow = element.nextElementSibling;
42
- const subtext = metadataRow?.querySelector(".subtext");
43
- const score = id ? subtext?.querySelector(`#score_${id}`) : null;
44
- const pointsText = score?.textContent || "";
45
- const points = pointsText ? parseInt(pointsText.match(/\d+/)?.[0] || "0", 10) : 0;
46
- const author = subtext?.querySelector(".hnuser")?.textContent?.trim() || "";
47
- const age = subtext?.querySelector(".age");
48
- const time = age?.getAttribute("title") || age?.textContent?.trim() || "";
49
- const commentLinks = subtext ? Array.from(subtext.querySelectorAll("a")) : [];
50
- const commentsText = commentLinks.at(-1)?.textContent || "";
51
- let commentsCount = 0;
52
-
53
- if (commentsText.includes("comment")) {
54
- const match = commentsText.match(/(\d+)/);
55
- commentsCount = match ? parseInt(match[0], 10) : 0;
56
- }
57
-
58
- submissions.push({
59
- id,
60
- title,
61
- url: submissionUrl,
62
- points,
63
- author,
64
- time,
65
- comments: commentsCount,
66
- hnUrl: `https://news.ycombinator.com/item?id=${id}`
67
- });
68
- }
69
-
70
- return submissions;
71
- } catch (error) {
72
- console.error('Error scraping Hacker News:', error.message);
73
- throw error;
74
- }
75
- }
76
-
77
- // CLI interface
78
- if (import.meta.url === `file://${process.argv[1]}`) {
79
- const args = process.argv.slice(2);
80
- let limit = 30;
81
-
82
- // Parse --limit argument
83
- const limitIndex = args.indexOf('--limit');
84
- if (limitIndex !== -1 && args[limitIndex + 1]) {
85
- limit = parseInt(args[limitIndex + 1]);
86
- }
87
-
88
- scrapeHackerNews(limit)
89
- .then(submissions => {
90
- console.log(JSON.stringify(submissions, null, 2));
91
- console.error(`\n✓ Scraped ${submissions.length} submissions`);
92
- })
93
- .catch(error => {
94
- console.error('Failed to scrape:', error.message);
95
- process.exit(1);
96
- });
97
- }
98
-
99
- export { scrapeHackerNews };