@knowledgesdk/node 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +495 -0
- package/dist/api/classify.d.ts +24 -0
- package/dist/api/classify.js +19 -0
- package/dist/api/classify.js.map +1 -0
- package/dist/api/extract.d.ts +118 -0
- package/dist/api/extract.js +60 -0
- package/dist/api/extract.js.map +1 -0
- package/dist/api/jobs.d.ts +35 -0
- package/dist/api/jobs.js +43 -0
- package/dist/api/jobs.js.map +1 -0
- package/dist/api/scrape.d.ts +18 -0
- package/dist/api/scrape.js +18 -0
- package/dist/api/scrape.js.map +1 -0
- package/dist/api/screenshot.d.ts +15 -0
- package/dist/api/screenshot.js +18 -0
- package/dist/api/screenshot.js.map +1 -0
- package/dist/api/search.d.ts +29 -0
- package/dist/api/search.js +22 -0
- package/dist/api/search.js.map +1 -0
- package/dist/api/sitemap.d.ts +17 -0
- package/dist/api/sitemap.js +19 -0
- package/dist/api/sitemap.js.map +1 -0
- package/dist/api/webhooks.d.ts +40 -0
- package/dist/api/webhooks.js +39 -0
- package/dist/api/webhooks.js.map +1 -0
- package/dist/constants.d.ts +5 -0
- package/dist/constants.js +9 -0
- package/dist/constants.js.map +1 -0
- package/dist/errors.d.ts +32 -0
- package/dist/errors.js +52 -0
- package/dist/errors.js.map +1 -0
- package/dist/esm/api/classify.d.ts +24 -0
- package/dist/esm/api/classify.js +15 -0
- package/dist/esm/api/classify.js.map +1 -0
- package/dist/esm/api/extract.d.ts +118 -0
- package/dist/esm/api/extract.js +56 -0
- package/dist/esm/api/extract.js.map +1 -0
- package/dist/esm/api/jobs.d.ts +35 -0
- package/dist/esm/api/jobs.js +39 -0
- package/dist/esm/api/jobs.js.map +1 -0
- package/dist/esm/api/scrape.d.ts +18 -0
- package/dist/esm/api/scrape.js +14 -0
- package/dist/esm/api/scrape.js.map +1 -0
- package/dist/esm/api/screenshot.d.ts +15 -0
- package/dist/esm/api/screenshot.js +14 -0
- package/dist/esm/api/screenshot.js.map +1 -0
- package/dist/esm/api/search.d.ts +29 -0
- package/dist/esm/api/search.js +18 -0
- package/dist/esm/api/search.js.map +1 -0
- package/dist/esm/api/sitemap.d.ts +17 -0
- package/dist/esm/api/sitemap.js +15 -0
- package/dist/esm/api/sitemap.js.map +1 -0
- package/dist/esm/api/webhooks.d.ts +40 -0
- package/dist/esm/api/webhooks.js +35 -0
- package/dist/esm/api/webhooks.js.map +1 -0
- package/dist/esm/constants.d.ts +5 -0
- package/dist/esm/constants.js +6 -0
- package/dist/esm/constants.js.map +1 -0
- package/dist/esm/errors.d.ts +32 -0
- package/dist/esm/errors.js +43 -0
- package/dist/esm/errors.js.map +1 -0
- package/dist/esm/index.d.ts +100 -0
- package/dist/esm/index.js +91 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/utils/http-client.d.ts +62 -0
- package/dist/esm/utils/http-client.js +354 -0
- package/dist/esm/utils/http-client.js.map +1 -0
- package/dist/index.d.ts +100 -0
- package/dist/index.js +102 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +2 -0
- package/dist/index.mjs.map +1 -0
- package/dist/utils/http-client.d.ts +62 -0
- package/dist/utils/http-client.js +361 -0
- package/dist/utils/http-client.js.map +1 -0
- package/package.json +93 -0
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import { HttpClient } from '../utils/http-client';
|
|
2
|
+
export interface KnowledgeItem {
|
|
3
|
+
title: string;
|
|
4
|
+
description: string;
|
|
5
|
+
content: string;
|
|
6
|
+
category: string;
|
|
7
|
+
source: string;
|
|
8
|
+
}
|
|
9
|
+
export interface BusinessProfile {
|
|
10
|
+
businessName: string;
|
|
11
|
+
businessType: string;
|
|
12
|
+
industrySector: string;
|
|
13
|
+
targetAudience: string;
|
|
14
|
+
description: string;
|
|
15
|
+
valueProposition: string;
|
|
16
|
+
painPoints: string[];
|
|
17
|
+
uniqueSellingPoints: string[];
|
|
18
|
+
keyInsights: string[];
|
|
19
|
+
confidenceScore: number;
|
|
20
|
+
}
|
|
21
|
+
export interface ExtractResult {
|
|
22
|
+
business: BusinessProfile;
|
|
23
|
+
knowledgeItems: KnowledgeItem[];
|
|
24
|
+
pagesScraped: number;
|
|
25
|
+
sitemapUrls: number;
|
|
26
|
+
}
|
|
27
|
+
export interface ExtractOptions {
|
|
28
|
+
maxPages?: number;
|
|
29
|
+
}
|
|
30
|
+
export interface ExtractStreamOptions {
|
|
31
|
+
maxPages?: number;
|
|
32
|
+
}
|
|
33
|
+
export type ExtractStreamEvent = {
|
|
34
|
+
type: 'connected';
|
|
35
|
+
message: string;
|
|
36
|
+
} | {
|
|
37
|
+
type: 'progress';
|
|
38
|
+
message: string;
|
|
39
|
+
} | {
|
|
40
|
+
type: 'business_classified';
|
|
41
|
+
business: {
|
|
42
|
+
businessName: string;
|
|
43
|
+
businessType: string;
|
|
44
|
+
industry: string;
|
|
45
|
+
description: string;
|
|
46
|
+
};
|
|
47
|
+
} | {
|
|
48
|
+
type: 'pages_planned';
|
|
49
|
+
pages: Array<{
|
|
50
|
+
url: string;
|
|
51
|
+
purpose: string;
|
|
52
|
+
}>;
|
|
53
|
+
} | {
|
|
54
|
+
type: 'page_scraped';
|
|
55
|
+
url: string;
|
|
56
|
+
index: number;
|
|
57
|
+
total: number;
|
|
58
|
+
status: 'done' | 'failed';
|
|
59
|
+
} | {
|
|
60
|
+
type: 'urls_triaged';
|
|
61
|
+
suggestedUrls: Array<{
|
|
62
|
+
url: string;
|
|
63
|
+
reason: string;
|
|
64
|
+
}>;
|
|
65
|
+
} | {
|
|
66
|
+
type: 'complete';
|
|
67
|
+
result: ExtractResult;
|
|
68
|
+
} | {
|
|
69
|
+
type: 'error';
|
|
70
|
+
message: string;
|
|
71
|
+
};
|
|
72
|
+
export interface ExtractAsyncOptions {
|
|
73
|
+
maxPages?: number;
|
|
74
|
+
callbackUrl?: string;
|
|
75
|
+
}
|
|
76
|
+
export interface ExtractAsyncResult {
|
|
77
|
+
jobId: string;
|
|
78
|
+
status: string;
|
|
79
|
+
}
|
|
80
|
+
export declare class Extract {
|
|
81
|
+
private httpClient;
|
|
82
|
+
constructor(httpClient: HttpClient);
|
|
83
|
+
/**
|
|
84
|
+
* Run a synchronous extraction pipeline against a URL.
|
|
85
|
+
* Scrapes the site, classifies the business, and returns structured knowledge items.
|
|
86
|
+
* @param url The URL to extract knowledge from
|
|
87
|
+
* @param options Optional extraction options
|
|
88
|
+
* @returns The full extraction result including business profile and knowledge items
|
|
89
|
+
*/
|
|
90
|
+
run(url: string, options?: ExtractOptions): Promise<ExtractResult>;
|
|
91
|
+
/**
|
|
92
|
+
* Start an asynchronous extraction pipeline and return a job ID.
|
|
93
|
+
* Use jobs.poll() or jobs.get() to retrieve the result when complete.
|
|
94
|
+
* @param url The URL to extract knowledge from
|
|
95
|
+
* @param options Optional async extraction options including a callbackUrl
|
|
96
|
+
* @returns The job ID and initial status
|
|
97
|
+
*/
|
|
98
|
+
runAsync(url: string, options?: ExtractAsyncOptions): Promise<ExtractAsyncResult>;
|
|
99
|
+
/**
|
|
100
|
+
* Stream extraction progress as server-sent events.
|
|
101
|
+
* Yields typed events as the pipeline runs: classification, page discovery,
|
|
102
|
+
* per-page scraping, and the final complete result.
|
|
103
|
+
* Requires Node.js 18+ (native fetch).
|
|
104
|
+
*
|
|
105
|
+
* @example
|
|
106
|
+
* ```typescript
|
|
107
|
+
* for await (const event of client.extract.runStream('https://stripe.com')) {
|
|
108
|
+
* if (event.type === 'page_scraped') {
|
|
109
|
+
* console.log(`Scraped ${event.index + 1}/${event.total}: ${event.url}`);
|
|
110
|
+
* }
|
|
111
|
+
* if (event.type === 'complete') {
|
|
112
|
+
* console.log(event.result.knowledgeItems);
|
|
113
|
+
* }
|
|
114
|
+
* }
|
|
115
|
+
* ```
|
|
116
|
+
*/
|
|
117
|
+
runStream(url: string, options?: ExtractStreamOptions): AsyncGenerator<ExtractStreamEvent>;
|
|
118
|
+
}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
export class Extract {
|
|
2
|
+
constructor(httpClient) {
|
|
3
|
+
this.httpClient = httpClient;
|
|
4
|
+
}
|
|
5
|
+
/**
|
|
6
|
+
* Run a synchronous extraction pipeline against a URL.
|
|
7
|
+
* Scrapes the site, classifies the business, and returns structured knowledge items.
|
|
8
|
+
* @param url The URL to extract knowledge from
|
|
9
|
+
* @param options Optional extraction options
|
|
10
|
+
* @returns The full extraction result including business profile and knowledge items
|
|
11
|
+
*/
|
|
12
|
+
async run(url, options) {
|
|
13
|
+
return this.httpClient.post('/extract', {
|
|
14
|
+
url,
|
|
15
|
+
...options,
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Start an asynchronous extraction pipeline and return a job ID.
|
|
20
|
+
* Use jobs.poll() or jobs.get() to retrieve the result when complete.
|
|
21
|
+
* @param url The URL to extract knowledge from
|
|
22
|
+
* @param options Optional async extraction options including a callbackUrl
|
|
23
|
+
* @returns The job ID and initial status
|
|
24
|
+
*/
|
|
25
|
+
async runAsync(url, options) {
|
|
26
|
+
return this.httpClient.post('/extract/async', {
|
|
27
|
+
url,
|
|
28
|
+
...options,
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Stream extraction progress as server-sent events.
|
|
33
|
+
* Yields typed events as the pipeline runs: classification, page discovery,
|
|
34
|
+
* per-page scraping, and the final complete result.
|
|
35
|
+
* Requires Node.js 18+ (native fetch).
|
|
36
|
+
*
|
|
37
|
+
* @example
|
|
38
|
+
* ```typescript
|
|
39
|
+
* for await (const event of client.extract.runStream('https://stripe.com')) {
|
|
40
|
+
* if (event.type === 'page_scraped') {
|
|
41
|
+
* console.log(`Scraped ${event.index + 1}/${event.total}: ${event.url}`);
|
|
42
|
+
* }
|
|
43
|
+
* if (event.type === 'complete') {
|
|
44
|
+
* console.log(event.result.knowledgeItems);
|
|
45
|
+
* }
|
|
46
|
+
* }
|
|
47
|
+
* ```
|
|
48
|
+
*/
|
|
49
|
+
async *runStream(url, options) {
|
|
50
|
+
yield* this.httpClient.streamPost('/extract/stream', {
|
|
51
|
+
url,
|
|
52
|
+
...options,
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
//# sourceMappingURL=extract.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"extract.js","sourceRoot":"","sources":["../../../src/api/extract.ts"],"names":[],"mappings":"AA0DA,MAAM,OAAO,OAAO;IAGlB,YAAY,UAAsB;QAChC,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,GAAG,CAAC,GAAW,EAAE,OAAwB;QAC7C,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAgB,UAAU,EAAE;YACrD,GAAG;YACH,GAAG,OAAO;SACX,CAAC,CAAC;IACL,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,QAAQ,CAAC,GAAW,EAAE,OAA6B;QACvD,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAqB,gBAAgB,EAAE;YAChE,GAAG;YACH,GAAG,OAAO;SACX,CAAC,CAAC;IACL,CAAC;IAED;;;;;;;;;;;;;;;;;OAiBG;IACH,KAAK,CAAC,CAAC,SAAS,CAAC,GAAW,EAAE,OAA8B;QAC1D,KAAK,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,UAAU,CAAqB,iBAAiB,EAAE;YACvE,GAAG;YACH,GAAG,OAAO;SACX,CAAC,CAAC;IACL,CAAC;CACF"}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { HttpClient } from '../utils/http-client';
|
|
2
|
+
export type JobStatus = 'pending' | 'processing' | 'completed' | 'failed';
|
|
3
|
+
export interface JobResult {
|
|
4
|
+
jobId: string;
|
|
5
|
+
status: JobStatus;
|
|
6
|
+
result?: any;
|
|
7
|
+
error?: string;
|
|
8
|
+
createdAt: string;
|
|
9
|
+
updatedAt: string;
|
|
10
|
+
completedAt?: string;
|
|
11
|
+
}
|
|
12
|
+
export interface PollOptions {
|
|
13
|
+
/** Interval in milliseconds between status checks. Defaults to 2000ms. */
|
|
14
|
+
intervalMs?: number;
|
|
15
|
+
/** Maximum time in milliseconds to wait before throwing a TimeoutError. Defaults to 120000ms (2 minutes). */
|
|
16
|
+
timeoutMs?: number;
|
|
17
|
+
}
|
|
18
|
+
export declare class Jobs {
|
|
19
|
+
private httpClient;
|
|
20
|
+
constructor(httpClient: HttpClient);
|
|
21
|
+
/**
|
|
22
|
+
* Get the current status and result of an async job.
|
|
23
|
+
* @param jobId The job ID returned from an async operation (e.g. extract.runAsync)
|
|
24
|
+
* @returns The current job status and result if completed
|
|
25
|
+
*/
|
|
26
|
+
get(jobId: string): Promise<JobResult>;
|
|
27
|
+
/**
|
|
28
|
+
* Poll a job until it completes or fails, then return the result.
|
|
29
|
+
* Throws a TimeoutError if the job does not complete within the timeout window.
|
|
30
|
+
* @param jobId The job ID to poll
|
|
31
|
+
* @param options Optional polling configuration
|
|
32
|
+
* @returns The final job result once status is 'completed' or 'failed'
|
|
33
|
+
*/
|
|
34
|
+
poll(jobId: string, options?: PollOptions): Promise<JobResult>;
|
|
35
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { TimeoutError } from '../errors';
|
|
2
|
+
export class Jobs {
|
|
3
|
+
constructor(httpClient) {
|
|
4
|
+
this.httpClient = httpClient;
|
|
5
|
+
}
|
|
6
|
+
/**
|
|
7
|
+
* Get the current status and result of an async job.
|
|
8
|
+
* @param jobId The job ID returned from an async operation (e.g. extract.runAsync)
|
|
9
|
+
* @returns The current job status and result if completed
|
|
10
|
+
*/
|
|
11
|
+
async get(jobId) {
|
|
12
|
+
return this.httpClient.get(`/jobs/${jobId}`);
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Poll a job until it completes or fails, then return the result.
|
|
16
|
+
* Throws a TimeoutError if the job does not complete within the timeout window.
|
|
17
|
+
* @param jobId The job ID to poll
|
|
18
|
+
* @param options Optional polling configuration
|
|
19
|
+
* @returns The final job result once status is 'completed' or 'failed'
|
|
20
|
+
*/
|
|
21
|
+
async poll(jobId, options) {
|
|
22
|
+
var _a, _b;
|
|
23
|
+
const intervalMs = (_a = options === null || options === void 0 ? void 0 : options.intervalMs) !== null && _a !== void 0 ? _a : 2000;
|
|
24
|
+
const timeoutMs = (_b = options === null || options === void 0 ? void 0 : options.timeoutMs) !== null && _b !== void 0 ? _b : 120000;
|
|
25
|
+
const startTime = Date.now();
|
|
26
|
+
while (true) {
|
|
27
|
+
const job = await this.get(jobId);
|
|
28
|
+
if (job.status === 'completed' || job.status === 'failed') {
|
|
29
|
+
return job;
|
|
30
|
+
}
|
|
31
|
+
const elapsed = Date.now() - startTime;
|
|
32
|
+
if (elapsed + intervalMs >= timeoutMs) {
|
|
33
|
+
throw new TimeoutError(`Job ${jobId} did not complete within ${timeoutMs}ms. Last status: ${job.status}`, { code: 'job_timeout', data: { jobId, status: job.status } });
|
|
34
|
+
}
|
|
35
|
+
await new Promise((resolve) => setTimeout(resolve, intervalMs));
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
//# sourceMappingURL=jobs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"jobs.js","sourceRoot":"","sources":["../../../src/api/jobs.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,YAAY,EAAE,MAAM,WAAW,CAAC;AAqBzC,MAAM,OAAO,IAAI;IAGf,YAAY,UAAsB;QAChC,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,GAAG,CAAC,KAAa;QACrB,OAAO,IAAI,CAAC,UAAU,CAAC,GAAG,CAAY,SAAS,KAAK,EAAE,CAAC,CAAC;IAC1D,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,IAAI,CAAC,KAAa,EAAE,OAAqB;;QAC7C,MAAM,UAAU,GAAG,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,UAAU,mCAAI,IAAI,CAAC;QAC/C,MAAM,SAAS,GAAG,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,mCAAI,MAAM,CAAC;QAE/C,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAE7B,OAAO,IAAI,EAAE,CAAC;YACZ,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YAElC,IAAI,GAAG,CAAC,MAAM,KAAK,WAAW,IAAI,GAAG,CAAC,MAAM,KAAK,QAAQ,EAAE,CAAC;gBAC1D,OAAO,GAAG,CAAC;YACb,CAAC;YAED,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;YACvC,IAAI,OAAO,GAAG,UAAU,IAAI,SAAS,EAAE,CAAC;gBACtC,MAAM,IAAI,YAAY,CACpB,OAAO,KAAK,4BAA4B,SAAS,oBAAoB,GAAG,CAAC,MAAM,EAAE,EACjF,EAAE,IAAI,EAAE,aAAa,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,CAAC,MAAM,EAAE,EAAE,CAC7D,CAAC;YACJ,CAAC;YAED,MAAM,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,CAAC,CAAC;QAClE,CAAC;IACH,CAAC;CACF"}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { HttpClient } from '../utils/http-client';
|
|
2
|
+
export interface ScrapeResult {
|
|
3
|
+
url: string;
|
|
4
|
+
markdown: string;
|
|
5
|
+
title: string | null;
|
|
6
|
+
description: string | null;
|
|
7
|
+
links: string[];
|
|
8
|
+
}
|
|
9
|
+
export declare class Scrape {
|
|
10
|
+
private httpClient;
|
|
11
|
+
constructor(httpClient: HttpClient);
|
|
12
|
+
/**
|
|
13
|
+
* Scrape a single URL and return its content as Markdown along with metadata.
|
|
14
|
+
* @param url The URL to scrape
|
|
15
|
+
* @returns The scraped content including markdown, title, description and extracted links
|
|
16
|
+
*/
|
|
17
|
+
run(url: string): Promise<ScrapeResult>;
|
|
18
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export class Scrape {
|
|
2
|
+
constructor(httpClient) {
|
|
3
|
+
this.httpClient = httpClient;
|
|
4
|
+
}
|
|
5
|
+
/**
|
|
6
|
+
* Scrape a single URL and return its content as Markdown along with metadata.
|
|
7
|
+
* @param url The URL to scrape
|
|
8
|
+
* @returns The scraped content including markdown, title, description and extracted links
|
|
9
|
+
*/
|
|
10
|
+
async run(url) {
|
|
11
|
+
return this.httpClient.post('/scrape', { url });
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
//# sourceMappingURL=scrape.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"scrape.js","sourceRoot":"","sources":["../../../src/api/scrape.ts"],"names":[],"mappings":"AAUA,MAAM,OAAO,MAAM;IAGjB,YAAY,UAAsB;QAChC,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,GAAG,CAAC,GAAW;QACnB,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAe,SAAS,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC;IAChE,CAAC;CACF"}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { HttpClient } from '../utils/http-client';
|
|
2
|
+
export interface ScreenshotResult {
|
|
3
|
+
url: string;
|
|
4
|
+
screenshot: string;
|
|
5
|
+
}
|
|
6
|
+
export declare class Screenshot {
|
|
7
|
+
private httpClient;
|
|
8
|
+
constructor(httpClient: HttpClient);
|
|
9
|
+
/**
|
|
10
|
+
* Capture a full-page screenshot of a URL.
|
|
11
|
+
* @param url The URL to screenshot
|
|
12
|
+
* @returns The original URL and a base64-encoded PNG screenshot
|
|
13
|
+
*/
|
|
14
|
+
run(url: string): Promise<ScreenshotResult>;
|
|
15
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export class Screenshot {
|
|
2
|
+
constructor(httpClient) {
|
|
3
|
+
this.httpClient = httpClient;
|
|
4
|
+
}
|
|
5
|
+
/**
|
|
6
|
+
* Capture a full-page screenshot of a URL.
|
|
7
|
+
* @param url The URL to screenshot
|
|
8
|
+
* @returns The original URL and a base64-encoded PNG screenshot
|
|
9
|
+
*/
|
|
10
|
+
async run(url) {
|
|
11
|
+
return this.httpClient.post('/screenshot', { url });
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
//# sourceMappingURL=screenshot.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"screenshot.js","sourceRoot":"","sources":["../../../src/api/screenshot.ts"],"names":[],"mappings":"AAOA,MAAM,OAAO,UAAU;IAGrB,YAAY,UAAsB;QAChC,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,GAAG,CAAC,GAAW;QACnB,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAmB,aAAa,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC;IACxE,CAAC;CACF"}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { HttpClient } from '../utils/http-client';
|
|
2
|
+
export interface SearchHit {
|
|
3
|
+
id: string;
|
|
4
|
+
title: string;
|
|
5
|
+
content: string;
|
|
6
|
+
category: string;
|
|
7
|
+
source: string;
|
|
8
|
+
score: number;
|
|
9
|
+
}
|
|
10
|
+
export interface SearchResult {
|
|
11
|
+
hits: SearchHit[];
|
|
12
|
+
total: number;
|
|
13
|
+
query: string;
|
|
14
|
+
}
|
|
15
|
+
export interface SearchOptions {
|
|
16
|
+
limit?: number;
|
|
17
|
+
apiKeyId?: string;
|
|
18
|
+
}
|
|
19
|
+
export declare class Search {
|
|
20
|
+
private httpClient;
|
|
21
|
+
constructor(httpClient: HttpClient);
|
|
22
|
+
/**
|
|
23
|
+
* Search across indexed knowledge items using a natural language query.
|
|
24
|
+
* @param query The search query string
|
|
25
|
+
* @param options Optional search parameters including result limit and API key ID filter
|
|
26
|
+
* @returns Matching knowledge items ranked by relevance score
|
|
27
|
+
*/
|
|
28
|
+
run(query: string, options?: SearchOptions): Promise<SearchResult>;
|
|
29
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
export class Search {
|
|
2
|
+
constructor(httpClient) {
|
|
3
|
+
this.httpClient = httpClient;
|
|
4
|
+
}
|
|
5
|
+
/**
|
|
6
|
+
* Search across indexed knowledge items using a natural language query.
|
|
7
|
+
* @param query The search query string
|
|
8
|
+
* @param options Optional search parameters including result limit and API key ID filter
|
|
9
|
+
* @returns Matching knowledge items ranked by relevance score
|
|
10
|
+
*/
|
|
11
|
+
async run(query, options) {
|
|
12
|
+
return this.httpClient.post('/search', {
|
|
13
|
+
query,
|
|
14
|
+
...options,
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
//# sourceMappingURL=search.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"search.js","sourceRoot":"","sources":["../../../src/api/search.ts"],"names":[],"mappings":"AAsBA,MAAM,OAAO,MAAM;IAGjB,YAAY,UAAsB;QAChC,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,GAAG,CAAC,KAAa,EAAE,OAAuB;QAC9C,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAe,SAAS,EAAE;YACnD,KAAK;YACL,GAAG,OAAO;SACX,CAAC,CAAC;IACL,CAAC;CACF"}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { HttpClient } from '../utils/http-client';
|
|
2
|
+
export interface SitemapResult {
|
|
3
|
+
url: string;
|
|
4
|
+
urls: string[];
|
|
5
|
+
count: number;
|
|
6
|
+
}
|
|
7
|
+
export declare class Sitemap {
|
|
8
|
+
private httpClient;
|
|
9
|
+
constructor(httpClient: HttpClient);
|
|
10
|
+
/**
|
|
11
|
+
* Discover all URLs for a given website by crawling its sitemap(s).
|
|
12
|
+
* Falls back to crawling the site if no sitemap.xml is found.
|
|
13
|
+
* @param url The root URL of the website
|
|
14
|
+
* @returns The root URL, a list of discovered URLs, and the total count
|
|
15
|
+
*/
|
|
16
|
+
run(url: string): Promise<SitemapResult>;
|
|
17
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
export class Sitemap {
|
|
2
|
+
constructor(httpClient) {
|
|
3
|
+
this.httpClient = httpClient;
|
|
4
|
+
}
|
|
5
|
+
/**
|
|
6
|
+
* Discover all URLs for a given website by crawling its sitemap(s).
|
|
7
|
+
* Falls back to crawling the site if no sitemap.xml is found.
|
|
8
|
+
* @param url The root URL of the website
|
|
9
|
+
* @returns The root URL, a list of discovered URLs, and the total count
|
|
10
|
+
*/
|
|
11
|
+
async run(url) {
|
|
12
|
+
return this.httpClient.post('/sitemap', { url });
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
//# sourceMappingURL=sitemap.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sitemap.js","sourceRoot":"","sources":["../../../src/api/sitemap.ts"],"names":[],"mappings":"AAQA,MAAM,OAAO,OAAO;IAGlB,YAAY,UAAsB;QAChC,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,GAAG,CAAC,GAAW;QACnB,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAgB,UAAU,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC;IAClE,CAAC;CACF"}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { HttpClient } from '../utils/http-client';
|
|
2
|
+
export interface WebhookCreateOptions {
|
|
3
|
+
url: string;
|
|
4
|
+
events: string[];
|
|
5
|
+
displayName?: string;
|
|
6
|
+
}
|
|
7
|
+
export interface WebhookFull {
|
|
8
|
+
id: string;
|
|
9
|
+
url: string;
|
|
10
|
+
events: string[];
|
|
11
|
+
displayName?: string;
|
|
12
|
+
status: string;
|
|
13
|
+
createdAt: string;
|
|
14
|
+
updatedAt: string;
|
|
15
|
+
}
|
|
16
|
+
export declare class Webhooks {
|
|
17
|
+
private httpClient;
|
|
18
|
+
constructor(httpClient: HttpClient);
|
|
19
|
+
/**
|
|
20
|
+
* Create a new webhook endpoint.
|
|
21
|
+
* @param options The webhook creation options including URL, events, and optional display name
|
|
22
|
+
* @returns The created webhook
|
|
23
|
+
*/
|
|
24
|
+
create(options: WebhookCreateOptions): Promise<WebhookFull>;
|
|
25
|
+
/**
|
|
26
|
+
* List all registered webhooks.
|
|
27
|
+
* @returns An array of all webhooks
|
|
28
|
+
*/
|
|
29
|
+
list(): Promise<WebhookFull[]>;
|
|
30
|
+
/**
|
|
31
|
+
* Delete a webhook by ID.
|
|
32
|
+
* @param webhookId The ID of the webhook to delete
|
|
33
|
+
*/
|
|
34
|
+
delete(webhookId: string): Promise<void>;
|
|
35
|
+
/**
|
|
36
|
+
* Send a test event to a webhook to verify it is working.
|
|
37
|
+
* @param webhookId The ID of the webhook to test
|
|
38
|
+
*/
|
|
39
|
+
test(webhookId: string): Promise<void>;
|
|
40
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
export class Webhooks {
|
|
2
|
+
constructor(httpClient) {
|
|
3
|
+
this.httpClient = httpClient;
|
|
4
|
+
}
|
|
5
|
+
/**
|
|
6
|
+
* Create a new webhook endpoint.
|
|
7
|
+
* @param options The webhook creation options including URL, events, and optional display name
|
|
8
|
+
* @returns The created webhook
|
|
9
|
+
*/
|
|
10
|
+
async create(options) {
|
|
11
|
+
return this.httpClient.post('/webhooks', options);
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* List all registered webhooks.
|
|
15
|
+
* @returns An array of all webhooks
|
|
16
|
+
*/
|
|
17
|
+
async list() {
|
|
18
|
+
return this.httpClient.get('/webhooks');
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Delete a webhook by ID.
|
|
22
|
+
* @param webhookId The ID of the webhook to delete
|
|
23
|
+
*/
|
|
24
|
+
async delete(webhookId) {
|
|
25
|
+
return this.httpClient.delete(`/webhooks/${webhookId}`);
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Send a test event to a webhook to verify it is working.
|
|
29
|
+
* @param webhookId The ID of the webhook to test
|
|
30
|
+
*/
|
|
31
|
+
async test(webhookId) {
|
|
32
|
+
return this.httpClient.post(`/webhooks/${webhookId}/test`);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
//# sourceMappingURL=webhooks.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"webhooks.js","sourceRoot":"","sources":["../../../src/api/webhooks.ts"],"names":[],"mappings":"AAkBA,MAAM,OAAO,QAAQ;IAGnB,YAAY,UAAsB;QAChC,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,MAAM,CAAC,OAA6B;QACxC,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAc,WAAW,EAAE,OAAO,CAAC,CAAC;IACjE,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,IAAI;QACR,OAAO,IAAI,CAAC,UAAU,CAAC,GAAG,CAAgB,WAAW,CAAC,CAAC;IACzD,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,MAAM,CAAC,SAAiB;QAC5B,OAAO,IAAI,CAAC,UAAU,CAAC,MAAM,CAAO,aAAa,SAAS,EAAE,CAAC,CAAC;IAChE,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,IAAI,CAAC,SAAiB;QAC1B,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAO,aAAa,SAAS,OAAO,CAAC,CAAC;IACnE,CAAC;CACF"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,MAAM,OAAO,GAAG,OAAO,CAAC;AAE/B,MAAM,CAAC,MAAM,gBAAgB,GAAG,8BAA8B,CAAC;AAC/D,MAAM,CAAC,MAAM,mBAAmB,GAAG,CAAC,CAAC;AACrC,MAAM,CAAC,MAAM,eAAe,GAAG,KAAK,CAAC;AACrC,MAAM,CAAC,MAAM,mBAAmB,GAAG,IAAI,CAAC"}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
export interface ErrorOptions {
|
|
2
|
+
code?: string;
|
|
3
|
+
statusCode?: number;
|
|
4
|
+
requestId?: string;
|
|
5
|
+
cause?: Error;
|
|
6
|
+
data?: any;
|
|
7
|
+
retryAfter?: number;
|
|
8
|
+
}
|
|
9
|
+
export declare class KnowledgeSDKError extends Error {
|
|
10
|
+
readonly code?: string;
|
|
11
|
+
readonly statusCode?: number;
|
|
12
|
+
readonly requestId?: string;
|
|
13
|
+
readonly cause?: Error;
|
|
14
|
+
readonly data?: any;
|
|
15
|
+
constructor(message: string, options?: ErrorOptions);
|
|
16
|
+
}
|
|
17
|
+
export declare class APIError extends KnowledgeSDKError {
|
|
18
|
+
constructor(message: string, options?: ErrorOptions);
|
|
19
|
+
}
|
|
20
|
+
export declare class AuthenticationError extends KnowledgeSDKError {
|
|
21
|
+
constructor(message: string, options?: ErrorOptions);
|
|
22
|
+
}
|
|
23
|
+
export declare class NetworkError extends KnowledgeSDKError {
|
|
24
|
+
constructor(message: string, options?: ErrorOptions);
|
|
25
|
+
}
|
|
26
|
+
export declare class RateLimitError extends KnowledgeSDKError {
|
|
27
|
+
readonly retryAfter?: number;
|
|
28
|
+
constructor(message: string, options?: ErrorOptions);
|
|
29
|
+
}
|
|
30
|
+
export declare class TimeoutError extends KnowledgeSDKError {
|
|
31
|
+
constructor(message: string, options?: ErrorOptions);
|
|
32
|
+
}
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
export class KnowledgeSDKError extends Error {
|
|
2
|
+
constructor(message, options = {}) {
|
|
3
|
+
super(message);
|
|
4
|
+
this.name = 'KnowledgeSDKError';
|
|
5
|
+
this.code = options.code;
|
|
6
|
+
this.statusCode = options.statusCode;
|
|
7
|
+
this.requestId = options.requestId;
|
|
8
|
+
this.cause = options.cause;
|
|
9
|
+
this.data = options.data;
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
export class APIError extends KnowledgeSDKError {
|
|
13
|
+
constructor(message, options = {}) {
|
|
14
|
+
super(message, options);
|
|
15
|
+
this.name = 'APIError';
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
export class AuthenticationError extends KnowledgeSDKError {
|
|
19
|
+
constructor(message, options = {}) {
|
|
20
|
+
super(message, options);
|
|
21
|
+
this.name = 'AuthenticationError';
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
export class NetworkError extends KnowledgeSDKError {
|
|
25
|
+
constructor(message, options = {}) {
|
|
26
|
+
super(message, options);
|
|
27
|
+
this.name = 'NetworkError';
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
export class RateLimitError extends KnowledgeSDKError {
|
|
31
|
+
constructor(message, options = {}) {
|
|
32
|
+
super(message, options);
|
|
33
|
+
this.name = 'RateLimitError';
|
|
34
|
+
this.retryAfter = typeof options.retryAfter === 'number' ? options.retryAfter : undefined;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
export class TimeoutError extends KnowledgeSDKError {
|
|
38
|
+
constructor(message, options = {}) {
|
|
39
|
+
super(message, options);
|
|
40
|
+
this.name = 'TimeoutError';
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
//# sourceMappingURL=errors.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"errors.js","sourceRoot":"","sources":["../../src/errors.ts"],"names":[],"mappings":"AASA,MAAM,OAAO,iBAAkB,SAAQ,KAAK;IAO1C,YAAY,OAAe,EAAE,UAAwB,EAAE;QACrD,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,mBAAmB,CAAC;QAChC,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;QACzB,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;QACrC,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC;QACnC,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC;QAC3B,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAC3B,CAAC;CACF;AAED,MAAM,OAAO,QAAS,SAAQ,iBAAiB;IAC7C,YAAY,OAAe,EAAE,UAAwB,EAAE;QACrD,KAAK,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QACxB,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;IACzB,CAAC;CACF;AAED,MAAM,OAAO,mBAAoB,SAAQ,iBAAiB;IACxD,YAAY,OAAe,EAAE,UAAwB,EAAE;QACrD,KAAK,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QACxB,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC;IACpC,CAAC;CACF;AAED,MAAM,OAAO,YAAa,SAAQ,iBAAiB;IACjD,YAAY,OAAe,EAAE,UAAwB,EAAE;QACrD,KAAK,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QACxB,IAAI,CAAC,IAAI,GAAG,cAAc,CAAC;IAC7B,CAAC;CACF;AAED,MAAM,OAAO,cAAe,SAAQ,iBAAiB;IAGnD,YAAY,OAAe,EAAE,UAAwB,EAAE;QACrD,KAAK,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QACxB,IAAI,CAAC,IAAI,GAAG,gBAAgB,CAAC;QAC7B,IAAI,CAAC,UAAU,GAAG,OAAO,OAAO,CAAC,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5F,CAAC;CACF;AAED,MAAM,OAAO,YAAa,SAAQ,iBAAiB;IACjD,YAAY,OAAe,EAAE,UAAwB,EAAE;QACrD,KAAK,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QACxB,IAAI,CAAC,IAAI,GAAG,cAAc,CAAC;IAC7B,CAAC;CACF"}
|