@refract-org/ingestion 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/README.md +27 -0
  2. package/dist/src/index.d.ts +49 -0
  3. package/dist/src/index.d.ts.map +1 -0
  4. package/dist/src/index.js +5 -0
  5. package/dist/src/index.js.map +1 -0
  6. package/dist/src/mediawiki-client.d.ts +24 -0
  7. package/dist/src/mediawiki-client.d.ts.map +1 -0
  8. package/dist/src/mediawiki-client.js +292 -0
  9. package/dist/src/mediawiki-client.js.map +1 -0
  10. package/dist/src/rate-limiter.d.ts +8 -0
  11. package/dist/src/rate-limiter.d.ts.map +1 -0
  12. package/dist/src/rate-limiter.js +28 -0
  13. package/dist/src/rate-limiter.js.map +1 -0
  14. package/dist/src/wikidata-mapper.d.ts +29 -0
  15. package/dist/src/wikidata-mapper.d.ts.map +1 -0
  16. package/dist/src/wikidata-mapper.js +138 -0
  17. package/dist/src/wikidata-mapper.js.map +1 -0
  18. package/dist/src/xml-dump-source.d.ts +8 -0
  19. package/dist/src/xml-dump-source.d.ts.map +1 -0
  20. package/dist/src/xml-dump-source.js +77 -0
  21. package/dist/src/xml-dump-source.js.map +1 -0
  22. package/dist/tsconfig 2.tsbuildinfo +1 -0
  23. package/dist/tsconfig.tsbuildinfo +1 -0
  24. package/package.json +28 -0
  25. package/src/__tests__/auth-integration.test.ts +59 -0
  26. package/src/__tests__/integration.test.ts +95 -0
  27. package/src/__tests__/mediawiki-client.test.ts +113 -0
  28. package/src/__tests__/page-move.test.ts +31 -0
  29. package/src/__tests__/rate-limiter.test.ts +30 -0
  30. package/src/__tests__/talk-page.test.ts +46 -0
  31. package/src/__tests__/wikidata-mapper.test.ts +134 -0
  32. package/src/__tests__/xml-dump-source.test.ts +151 -0
  33. package/src/index.ts +63 -0
  34. package/src/mediawiki-client.ts +420 -0
  35. package/src/rate-limiter.ts +29 -0
  36. package/src/wikidata-mapper.ts +197 -0
  37. package/src/xml-dump-source.ts +89 -0
@@ -0,0 +1,113 @@
1
+ import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
2
+ import { MediaWikiClient } from "../mediawiki-client.js";
3
+
4
+ const MOCK_PROTECT_RESPONSE = {
5
+ query: {
6
+ logevents: [
7
+ {
8
+ logid: 100,
9
+ title: "Test",
10
+ timestamp: "2026-01-15T00:00:00Z",
11
+ comment: "semi-protected",
12
+ action: "protect",
13
+ },
14
+ {
15
+ logid: 101,
16
+ title: "Test",
17
+ timestamp: "2026-02-01T00:00:00Z",
18
+ comment: "extended",
19
+ action: "modify",
20
+ },
21
+ ],
22
+ },
23
+ };
24
+
25
+ const MOCK_EMPTY_RESPONSE = { query: { logevents: [] } };
26
+
27
+ const MOCK_PAGE_INFO_RESPONSE = {
28
+ query: {
29
+ pages: {
30
+ "100": {
31
+ pageid: 100,
32
+ title: "Test",
33
+ revisions: [
34
+ {
35
+ revid: 1,
36
+ parentid: 0,
37
+ timestamp: "2026-01-01T00:00:00Z",
38
+ comment: "first",
39
+ size: 100,
40
+ },
41
+ ],
42
+ },
43
+ },
44
+ },
45
+ };
46
+
47
+ describe("MediaWikiClient", () => {
48
+ let client: MediaWikiClient;
49
+
50
+ beforeEach(() => {
51
+ vi.useFakeTimers();
52
+ client = new MediaWikiClient({ apiUrl: "https://en.wikipedia.org/w/api.php", minDelayMs: 0 });
53
+ });
54
+
55
+ afterEach(() => {
56
+ vi.restoreAllMocks();
57
+ });
58
+
59
+ it("fetchProtectionLogs returns parsed logs", async () => {
60
+ const mockFetch = vi.fn().mockResolvedValue({
61
+ ok: true,
62
+ json: () => Promise.resolve(MOCK_PROTECT_RESPONSE),
63
+ });
64
+ vi.stubGlobal("fetch", mockFetch);
65
+
66
+ const logs = await client.fetchProtectionLogs("Test");
67
+ expect(logs).toHaveLength(2);
68
+ expect(logs[0].logId).toBe(100);
69
+ expect(logs[0].action).toBe("protect");
70
+ expect(logs[1].logId).toBe(101);
71
+ expect(logs[1].action).toBe("modify");
72
+ });
73
+
74
+ it("fetchProtectionLogs returns empty for no logs", async () => {
75
+ const mockFetch = vi.fn().mockResolvedValue({
76
+ ok: true,
77
+ json: () => Promise.resolve(MOCK_EMPTY_RESPONSE),
78
+ });
79
+ vi.stubGlobal("fetch", mockFetch);
80
+
81
+ const logs = await client.fetchProtectionLogs("Nonexistent");
82
+ expect(logs).toEqual([]);
83
+ });
84
+
85
+ it("revisions async iterator yields revisions", async () => {
86
+ const mockFetch = vi.fn().mockResolvedValue({
87
+ ok: true,
88
+ json: () => Promise.resolve(MOCK_PAGE_INFO_RESPONSE),
89
+ });
90
+ vi.stubGlobal("fetch", mockFetch);
91
+
92
+ const results: Array<{ revId: number }> = [];
93
+ for await (const rev of client.revisions("Test", { limit: 1 })) {
94
+ results.push(rev);
95
+ }
96
+ expect(results).toHaveLength(1);
97
+ expect(results[0].revId).toBe(1);
98
+ });
99
+
100
+ it("revisions returns empty when page is missing", async () => {
101
+ const mockFetch = vi.fn().mockResolvedValue({
102
+ ok: true,
103
+ json: () => Promise.resolve({ query: { pages: { "-1": { missing: "" } } } }),
104
+ });
105
+ vi.stubGlobal("fetch", mockFetch);
106
+
107
+ const revs: Array<{ revId: number }> = [];
108
+ for await (const rev of client.revisions("MissingPage", { limit: 1 })) {
109
+ revs.push(rev);
110
+ }
111
+ expect(revs).toEqual([]);
112
+ });
113
+ });
@@ -0,0 +1,31 @@
1
+ import { describe, expect, it } from "vitest";
2
+ import { MediaWikiClient } from "../mediawiki-client.js";
3
+
4
+ describe("Page move fetcher", () => {
5
+ it("fetches page moves and returns correctly shaped results", { timeout: 30000 }, async () => {
6
+ const client = new MediaWikiClient({ minDelayMs: 100 });
7
+ const moves = await client.fetchPageMoves("Earth");
8
+
9
+ expect(Array.isArray(moves)).toBe(true);
10
+
11
+ for (const move of moves) {
12
+ expect(move).toHaveProperty("oldTitle");
13
+ expect(move).toHaveProperty("newTitle");
14
+ expect(move).toHaveProperty("timestamp");
15
+ expect(move).toHaveProperty("revId");
16
+ expect(move).toHaveProperty("comment");
17
+
18
+ expect(typeof move.oldTitle).toBe("string");
19
+ expect(typeof move.newTitle).toBe("string");
20
+ expect(typeof move.timestamp).toBe("string");
21
+ expect(typeof move.revId).toBe("number");
22
+ expect(typeof move.comment).toBe("string");
23
+ }
24
+ });
25
+
26
+ it("returns empty array for a non-existent page", { timeout: 30000 }, async () => {
27
+ const client = new MediaWikiClient({ minDelayMs: 100 });
28
+ const moves = await client.fetchPageMoves("ThisPageDoesNotExistXYZ123!!!");
29
+ expect(moves).toEqual([]);
30
+ });
31
+ });
@@ -0,0 +1,30 @@
1
+ import { describe, expect, it } from "vitest";
2
+ import { RateLimiter } from "../rate-limiter.js";
3
+
4
+ describe("RateLimiter", () => {
5
+ it("acquires without error", async () => {
6
+ const limiter = new RateLimiter(1);
7
+ await limiter.acquire();
8
+ });
9
+
10
+ it("acquires multiple times sequentially", async () => {
11
+ const limiter = new RateLimiter(1);
12
+ await limiter.acquire();
13
+ await limiter.acquire();
14
+ await limiter.acquire();
15
+ });
16
+
17
+ it("enforces minimum delay between acquires", async () => {
18
+ const limiter = new RateLimiter(50);
19
+ const start = Date.now();
20
+ await limiter.acquire();
21
+ await limiter.acquire();
22
+ const elapsed = Date.now() - start;
23
+ expect(elapsed).toBeGreaterThanOrEqual(45);
24
+ });
25
+
26
+ it("accepts custom delay", async () => {
27
+ const limiter = new RateLimiter(10);
28
+ await limiter.acquire();
29
+ });
30
+ });
@@ -0,0 +1,46 @@
1
+ import { describe, expect, it } from "vitest";
2
+ import { MediaWikiClient } from "../mediawiki-client.js";
3
+
4
+ describe("Talk page fetcher", () => {
5
+ it("fetches talk page revisions for Earth (Talk:Earth)", { timeout: 60000 }, async () => {
6
+ const client = new MediaWikiClient({ minDelayMs: 100 });
7
+ const revisions = await client.fetchTalkRevisions("Earth", {
8
+ limit: 5,
9
+ direction: "newer",
10
+ });
11
+
12
+ expect(revisions.length).toBeGreaterThanOrEqual(1);
13
+
14
+ for (const rev of revisions) {
15
+ expect(rev).toHaveProperty("revId");
16
+ expect(rev).toHaveProperty("pageId");
17
+ expect(rev).toHaveProperty("pageTitle");
18
+ expect(rev).toHaveProperty("timestamp");
19
+ expect(rev).toHaveProperty("comment");
20
+ expect(rev).toHaveProperty("content");
21
+ expect(rev).toHaveProperty("size");
22
+ expect(rev).toHaveProperty("minor");
23
+
24
+ expect(typeof rev.revId).toBe("number");
25
+ expect(rev.revId).toBeGreaterThan(0);
26
+ expect(typeof rev.pageId).toBe("number");
27
+ expect(rev.pageId).toBeGreaterThan(0);
28
+ expect(rev.pageTitle).toContain("Talk:");
29
+ expect(typeof rev.timestamp).toBe("string");
30
+ expect(typeof rev.size).toBe("number");
31
+ expect(typeof rev.minor).toBe("boolean");
32
+
33
+ expect(rev.content.length).toBeGreaterThan(0);
34
+ }
35
+ });
36
+
37
+ it("accepts custom talk prefix for Project namespace talk pages", { timeout: 60000 }, async () => {
38
+ const client = new MediaWikiClient({ minDelayMs: 100 });
39
+ const revisions = await client.fetchTalkRevisions("About", { limit: 1, direction: "newer" }, "Wikipedia talk:");
40
+
41
+ expect(revisions.length).toBeGreaterThanOrEqual(1);
42
+ for (const rev of revisions) {
43
+ expect(rev.pageTitle).toContain("Wikipedia talk:");
44
+ }
45
+ });
46
+ });
@@ -0,0 +1,134 @@
1
+ import { describe, expect, it, vi } from "vitest";
2
+ import { fetchWikidataId, mapPagesToEntities, mapPageToEntity, wikidataEntityToEvents } from "../wikidata-mapper.js";
3
+
4
+ describe("fetchWikidataId", () => {
5
+ it("returns null when page has no Wikidata ID", async () => {
6
+ globalThis.fetch = vi.fn().mockResolvedValue({
7
+ ok: true,
8
+ json: async () => ({ query: { pages: { "-1": { title: "NoSuchPage" } } } }),
9
+ });
10
+
11
+ const result = await fetchWikidataId("NoSuchPage");
12
+ expect(result).toBeNull();
13
+ });
14
+
15
+ it("returns QID when pageprops contains wikibase_item", async () => {
16
+ globalThis.fetch = vi.fn().mockResolvedValue({
17
+ ok: true,
18
+ json: async () => ({
19
+ query: { pages: { "123": { pageprops: { wikibase_item: "Q42" } } } },
20
+ }),
21
+ });
22
+
23
+ const result = await fetchWikidataId("Douglas_Adams");
24
+ expect(result).toBe("Q42");
25
+ });
26
+
27
+ it("returns null on fetch error", async () => {
28
+ globalThis.fetch = vi.fn().mockResolvedValue({ ok: false, status: 404 });
29
+
30
+ const result = await fetchWikidataId("SomePage");
31
+ expect(result).toBeNull();
32
+ });
33
+ });
34
+
35
+ describe("mapPageToEntity", () => {
36
+ it("returns empty qid when no Wikidata ID found", async () => {
37
+ globalThis.fetch = vi.fn().mockResolvedValue({
38
+ ok: true,
39
+ json: async () => ({ query: { pages: { "-1": {} } } }),
40
+ });
41
+
42
+ const result = await mapPageToEntity("NoPage");
43
+ expect(result.qid).toBe("");
44
+ expect(result.pageTitle).toBe("NoPage");
45
+ expect(result.entity).toBeUndefined();
46
+ });
47
+
48
+ it("returns entity when Wikidata ID is found", async () => {
49
+ const fetchMock = vi.fn();
50
+ fetchMock
51
+ .mockResolvedValueOnce({
52
+ ok: true,
53
+ json: async () => ({
54
+ query: { pages: { "123": { pageprops: { wikibase_item: "Q1" } } } },
55
+ }),
56
+ })
57
+ .mockResolvedValueOnce({
58
+ ok: true,
59
+ json: async () => ({
60
+ entities: {
61
+ Q1: {
62
+ labels: { en: { value: "Universe" } },
63
+ descriptions: { en: { value: "everything" } },
64
+ aliases: {},
65
+ claims: {
66
+ P31: [
67
+ {
68
+ mainsnak: {
69
+ snaktype: "value",
70
+ datavalue: { type: "wikibase-item", value: { id: "Q2" } },
71
+ },
72
+ },
73
+ ],
74
+ P569: [
75
+ {
76
+ mainsnak: {
77
+ snaktype: "value",
78
+ datavalue: { type: "time", value: { time: "+1800-01-01T00:00:00Z" } },
79
+ },
80
+ },
81
+ ],
82
+ },
83
+ },
84
+ },
85
+ }),
86
+ });
87
+
88
+ globalThis.fetch = fetchMock;
89
+
90
+ const result = await mapPageToEntity("Universe");
91
+ expect(result.qid).toBe("Q1");
92
+ expect(result.entity?.label).toBe("Universe");
93
+ expect(result.entity?.description).toBe("everything");
94
+ expect(result.entity?.instanceOf).toEqual(["Q2"]);
95
+ expect(Object.keys(result.entity?.claims ?? {})).toHaveLength(2); // P31 (instance of) + P569 (date of birth)
96
+ });
97
+ });
98
+
99
+ describe("mapPagesToEntities", () => {
100
+ it("maps multiple pages concurrently", async () => {
101
+ globalThis.fetch = vi.fn().mockResolvedValue({
102
+ ok: true,
103
+ json: async () => ({ query: { pages: { "-1": {} } } }),
104
+ });
105
+
106
+ const results = await mapPagesToEntities(["PageA", "PageB", "PageC"], 2);
107
+ expect(results).toHaveLength(3);
108
+ expect(results.every((r) => r.qid === "")).toBe(true);
109
+ });
110
+ });
111
+
112
+ describe("wikidataEntityToEvents", () => {
113
+ it("generates events from entity data", () => {
114
+ const entity = {
115
+ qid: "Q42",
116
+ label: "Test",
117
+ description: "A test entity",
118
+ aliases: [],
119
+ instanceOf: ["Q5"],
120
+ claims: {
121
+ P31: {
122
+ property: "P31",
123
+ propertyLabel: "instance of",
124
+ values: [{ type: "wikibase-item" as const, value: "Q5" }],
125
+ },
126
+ },
127
+ };
128
+
129
+ const events = wikidataEntityToEvents(entity, "Test_Page");
130
+ expect(events.length).toBeGreaterThanOrEqual(1);
131
+ expect(events[0].eventType).toBe("sentence_first_seen");
132
+ expect(events[0].deterministicFacts[0].fact).toBe("wikidata_entity_linked");
133
+ });
134
+ });
@@ -0,0 +1,151 @@
1
+ import { mkdtempSync, rmSync, writeFileSync } from "node:fs";
2
+ import { tmpdir } from "node:os";
3
+ import { join } from "node:path";
4
+ import { describe, expect, it } from "vitest";
5
+ import { XmlDumpRevisionSource } from "../xml-dump-source.js";
6
+
7
+ const SAMPLE_DUMP = `<?xml version="1.0"?>
8
+ <mediawiki xmlns="http://www.mediawiki.org/xml/export-0.11/">
9
+ <page>
10
+ <title>Earth</title>
11
+ <ns>0</ns>
12
+ <id>1</id>
13
+ <revision>
14
+ <id>10</id>
15
+ <timestamp>2026-01-01T00:00:00Z</timestamp>
16
+ <contributor><username>Alice</username><id>100</id></contributor>
17
+ <comment>first revision</comment>
18
+ <text>Hello world</text>
19
+ <sha1>abc</sha1>
20
+ </revision>
21
+ <revision>
22
+ <id>20</id>
23
+ <timestamp>2026-02-01T00:00:00Z</timestamp>
24
+ <contributor><username>Bob</username><id>200</id></contributor>
25
+ <comment>added content</comment>
26
+ <text>Hello world. More content here.</text>
27
+ <sha1>def</sha1>
28
+ </revision>
29
+ </page>
30
+ <page>
31
+ <title>Mars</title>
32
+ <ns>0</ns>
33
+ <id>2</id>
34
+ <revision>
35
+ <id>30</id>
36
+ <timestamp>2026-03-01T00:00:00Z</timestamp>
37
+ <contributor><username>Carol</username><id>300</id></contributor>
38
+ <comment>mars revision</comment>
39
+ <text>Mars content</text>
40
+ <sha1>ghi</sha1>
41
+ </revision>
42
+ </page>
43
+ </mediawiki>`;
44
+
45
+ const SAMPLE_ENTITIES = `<?xml version="1.0"?>
46
+ <mediawiki xmlns="http://www.mediawiki.org/xml/export-0.11/">
47
+ <page>
48
+ <title>Foo &amp; Bar</title>
49
+ <ns>0</ns>
50
+ <id>3</id>
51
+ <revision>
52
+ <id>40</id>
53
+ <timestamp>2026-04-01T00:00:00Z</timestamp>
54
+ <contributor><username>Dave</username><id>400</id></contributor>
55
+ <comment>with &amp; entities</comment>
56
+ <text>Text with &lt;tags&gt; &amp; entities.</text>
57
+ <sha1>jkl</sha1>
58
+ </revision>
59
+ </page>
60
+ </mediawiki>`;
61
+
62
+ describe("XmlDumpRevisionSource", () => {
63
+ it("yields revisions for matching page title", async () => {
64
+ const dir = mkdtempSync(join(tmpdir(), "varia-xml-test-"));
65
+ const filePath = join(dir, "dump.xml");
66
+ writeFileSync(filePath, SAMPLE_DUMP, "utf-8");
67
+
68
+ const source = new XmlDumpRevisionSource(filePath);
69
+ const revs: Array<{ revId: number; pageTitle: string; content: string }> = [];
70
+ for await (const rev of source.revisions("Earth")) {
71
+ revs.push(rev);
72
+ }
73
+
74
+ rmSync(dir, { recursive: true });
75
+
76
+ expect(revs).toHaveLength(2);
77
+ expect(revs[0].revId).toBe(10);
78
+ expect(revs[0].pageTitle).toBe("Earth");
79
+ expect(revs[0].content).toBe("Hello world");
80
+ expect(revs[1].revId).toBe(20);
81
+ expect(revs[1].content).toBe("Hello world. More content here.");
82
+ });
83
+
84
+ it("returns empty for non-existent page", async () => {
85
+ const dir = mkdtempSync(join(tmpdir(), "varia-xml-test-"));
86
+ const filePath = join(dir, "dump.xml");
87
+ writeFileSync(filePath, SAMPLE_DUMP, "utf-8");
88
+
89
+ const source = new XmlDumpRevisionSource(filePath);
90
+ const revs: Array<unknown> = [];
91
+ for await (const rev of source.revisions("Venus")) {
92
+ revs.push(rev);
93
+ }
94
+
95
+ rmSync(dir, { recursive: true });
96
+ expect(revs).toEqual([]);
97
+ });
98
+
99
+ it("respects limit option", async () => {
100
+ const dir = mkdtempSync(join(tmpdir(), "varia-xml-test-"));
101
+ const filePath = join(dir, "dump.xml");
102
+ writeFileSync(filePath, SAMPLE_DUMP, "utf-8");
103
+
104
+ const source = new XmlDumpRevisionSource(filePath);
105
+ const revs: Array<{ revId: number }> = [];
106
+ for await (const rev of source.revisions("Earth", { limit: 1 })) {
107
+ revs.push(rev);
108
+ }
109
+
110
+ rmSync(dir, { recursive: true });
111
+ expect(revs).toHaveLength(1);
112
+ expect(revs[0].revId).toBe(10);
113
+ });
114
+
115
+ it("respects timestamp filtering", async () => {
116
+ const dir = mkdtempSync(join(tmpdir(), "varia-xml-test-"));
117
+ const filePath = join(dir, "dump.xml");
118
+ writeFileSync(filePath, SAMPLE_DUMP, "utf-8");
119
+
120
+ const source = new XmlDumpRevisionSource(filePath);
121
+ const revs: Array<{ revId: number }> = [];
122
+ for await (const rev of source.revisions("Earth", {
123
+ start: new Date("2026-01-15T00:00:00Z"),
124
+ })) {
125
+ revs.push(rev);
126
+ }
127
+
128
+ rmSync(dir, { recursive: true });
129
+ expect(revs).toHaveLength(1);
130
+ expect(revs[0].revId).toBe(20);
131
+ });
132
+
133
+ it("handles XML entities in title and content", async () => {
134
+ const dir = mkdtempSync(join(tmpdir(), "varia-xml-test-"));
135
+ const filePath = join(dir, "dump.xml");
136
+ writeFileSync(filePath, SAMPLE_ENTITIES, "utf-8");
137
+
138
+ const source = new XmlDumpRevisionSource(filePath);
139
+ const revs: Array<{ revId: number; pageTitle: string; content: string; comment: string }> = [];
140
+ for await (const rev of source.revisions("Foo & Bar")) {
141
+ revs.push(rev);
142
+ }
143
+
144
+ rmSync(dir, { recursive: true });
145
+
146
+ expect(revs).toHaveLength(1);
147
+ expect(revs[0].revId).toBe(40);
148
+ expect(revs[0].content).toBe("Text with <tags> & entities.");
149
+ expect(revs[0].comment).toBe("with & entities");
150
+ });
151
+ });
package/src/index.ts ADDED
@@ -0,0 +1,63 @@
1
+ import type { DiffResult, Revision } from "@refract-org/evidence-graph";
2
+
3
+ export interface AuthConfig {
4
+ apiKey?: string;
5
+ apiUser?: string;
6
+ apiPassword?: string;
7
+ oauthClientId?: string;
8
+ oauthClientSecret?: string;
9
+ }
10
+
11
+ export interface RevisionFetcher {
12
+ fetchRevisions(pageTitle: string, options?: RevisionOptions): Promise<Revision[]>;
13
+ }
14
+
15
+ export interface RevisionSource {
16
+ revisions(pageTitle: string, options?: RevisionOptions): AsyncIterable<Revision>;
17
+ }
18
+
19
+ export interface DiffFetcher {
20
+ fetchDiff(fromRevId: number, toRevId: number): Promise<DiffResult>;
21
+ }
22
+
23
+ export interface MoveFetcher {
24
+ fetchPageMoves(pageTitle: string): Promise<PageMove[]>;
25
+ }
26
+
27
+ export interface ProtectionLogEvent {
28
+ logId: number;
29
+ pageTitle: string;
30
+ timestamp: string;
31
+ comment: string;
32
+ action: "protect" | "unprotect" | "modify";
33
+ level?: string;
34
+ }
35
+
36
+ export interface PageMove {
37
+ oldTitle: string;
38
+ newTitle: string;
39
+ timestamp: string;
40
+ revId: number;
41
+ comment: string;
42
+ }
43
+
44
+ export interface RevisionOptions {
45
+ limit?: number;
46
+ start?: Date;
47
+ end?: Date;
48
+ direction?: "newer" | "older";
49
+ startRevId?: number;
50
+ endRevId?: number;
51
+ }
52
+
53
+ export { MediaWikiClient } from "./mediawiki-client.js";
54
+ export { RateLimiter } from "./rate-limiter.js";
55
+ export type { PageToEntityMap, WikidataClaim, WikidataEntity, WikidataValue } from "./wikidata-mapper.js";
56
+ export {
57
+ fetchWikidataEntity,
58
+ fetchWikidataId,
59
+ mapPagesToEntities,
60
+ mapPageToEntity,
61
+ wikidataEntityToEvents,
62
+ } from "./wikidata-mapper.js";
63
+ export { XmlDumpRevisionSource } from "./xml-dump-source.js";