@credal/actions 0.2.123 → 0.2.125
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/actions/autogen/templates.js +8 -8
- package/dist/actions/autogen/types.js +7 -8
- package/dist/actions/providers/confluence/updatePage.js +14 -15
- package/dist/actions/providers/generic/fillTemplateAction.d.ts +7 -0
- package/dist/actions/providers/generic/fillTemplateAction.js +18 -0
- package/dist/actions/providers/generic/genericApiCall.d.ts +3 -0
- package/dist/actions/providers/generic/genericApiCall.js +38 -0
- package/dist/actions/providers/google-oauth/getDriveContentById.d.ts +3 -0
- package/dist/actions/providers/google-oauth/getDriveContentById.js +161 -0
- package/dist/actions/providers/google-oauth/searchAndGetDriveContentByKeywords.d.ts +3 -0
- package/dist/actions/providers/google-oauth/searchAndGetDriveContentByKeywords.js +47 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByKeywords.d.ts +3 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByKeywords.js +110 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByQuery.d.ts +3 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByQuery.js +78 -0
- package/dist/actions/providers/google-oauth/utils/extractContentFromDriveFileId.d.ts +15 -0
- package/dist/actions/providers/google-oauth/utils/extractContentFromDriveFileId.js +129 -0
- package/dist/actions/providers/googlemaps/nearbysearch.d.ts +3 -0
- package/dist/actions/providers/googlemaps/nearbysearch.js +96 -0
- package/dist/actions/providers/slackUser/searchSlack.js +10 -5
- package/dist/actions/providers/snowflake/runSnowflakeQueryWriteResultsToS3.d.ts +3 -0
- package/dist/actions/providers/snowflake/runSnowflakeQueryWriteResultsToS3.js +154 -0
- package/dist/actions/providers/x/scrapeTweetDataWithNitter.d.ts +3 -0
- package/dist/actions/providers/x/scrapeTweetDataWithNitter.js +45 -0
- package/dist/utils/pdf.d.ts +1 -1
- package/dist/utils/pdf.js +27 -16
- package/package.json +1 -3
- package/dist/actions/providers/jamf/types.d.ts +0 -8
- package/dist/actions/providers/jamf/types.js +0 -7
|
@@ -761,7 +761,7 @@ export const slackUserSearchSlackDefinition = {
|
|
|
761
761
|
default: "latest",
|
|
762
762
|
},
|
|
763
763
|
limit: {
|
|
764
|
-
type: "
|
|
764
|
+
type: "number",
|
|
765
765
|
description: "Max matches to request (passed to Slack search; results are then hydrated and sorted newest-first).",
|
|
766
766
|
minimum: 1,
|
|
767
767
|
maximum: 100,
|
|
@@ -4638,12 +4638,12 @@ export const googleOauthScheduleCalendarMeetingDefinition = {
|
|
|
4638
4638
|
description: "How often the meeting repeats",
|
|
4639
4639
|
},
|
|
4640
4640
|
interval: {
|
|
4641
|
-
type: "
|
|
4641
|
+
type: "number",
|
|
4642
4642
|
minimum: 1,
|
|
4643
4643
|
description: "The interval between recurrences (e.g., every 2 weeks)",
|
|
4644
4644
|
},
|
|
4645
4645
|
count: {
|
|
4646
|
-
type: "
|
|
4646
|
+
type: "number",
|
|
4647
4647
|
minimum: 1,
|
|
4648
4648
|
description: "Number of occurrences after which to stop the recurrence",
|
|
4649
4649
|
},
|
|
@@ -4663,7 +4663,7 @@ export const googleOauthScheduleCalendarMeetingDefinition = {
|
|
|
4663
4663
|
type: "array",
|
|
4664
4664
|
description: "Days of the month when the meeting occurs (for MONTHLY frequency)",
|
|
4665
4665
|
items: {
|
|
4666
|
-
type: "
|
|
4666
|
+
type: "number",
|
|
4667
4667
|
minimum: 1,
|
|
4668
4668
|
maximum: 31,
|
|
4669
4669
|
},
|
|
@@ -11429,7 +11429,7 @@ export const githubGetBranchDefinition = {
|
|
|
11429
11429
|
description: "The commit URL",
|
|
11430
11430
|
},
|
|
11431
11431
|
comment_count: {
|
|
11432
|
-
type: "
|
|
11432
|
+
type: "number",
|
|
11433
11433
|
description: "Number of comments on the commit",
|
|
11434
11434
|
},
|
|
11435
11435
|
},
|
|
@@ -11443,7 +11443,7 @@ export const githubGetBranchDefinition = {
|
|
|
11443
11443
|
type: "string",
|
|
11444
11444
|
},
|
|
11445
11445
|
id: {
|
|
11446
|
-
type: "
|
|
11446
|
+
type: "number",
|
|
11447
11447
|
},
|
|
11448
11448
|
node_id: {
|
|
11449
11449
|
type: "string",
|
|
@@ -11468,7 +11468,7 @@ export const githubGetBranchDefinition = {
|
|
|
11468
11468
|
type: "string",
|
|
11469
11469
|
},
|
|
11470
11470
|
id: {
|
|
11471
|
-
type: "
|
|
11471
|
+
type: "number",
|
|
11472
11472
|
},
|
|
11473
11473
|
node_id: {
|
|
11474
11474
|
type: "string",
|
|
@@ -12203,7 +12203,7 @@ export const gitlabGetFileContentDefinition = {
|
|
|
12203
12203
|
required: ["project_id", "path"],
|
|
12204
12204
|
properties: {
|
|
12205
12205
|
project_id: {
|
|
12206
|
-
type: "
|
|
12206
|
+
type: "number",
|
|
12207
12207
|
description: "Numeric project ID in GitLab (unique per project)",
|
|
12208
12208
|
},
|
|
12209
12209
|
path: {
|
|
@@ -289,7 +289,6 @@ export const slackUserSearchSlackParamsSchema = z.object({
|
|
|
289
289
|
.default("latest"),
|
|
290
290
|
limit: z
|
|
291
291
|
.number()
|
|
292
|
-
.int()
|
|
293
292
|
.gte(1)
|
|
294
293
|
.lte(100)
|
|
295
294
|
.describe("Max matches to request (passed to Slack search; results are then hydrated and sorted newest-first).")
|
|
@@ -1601,15 +1600,15 @@ export const googleOauthScheduleCalendarMeetingParamsSchema = z.object({
|
|
|
1601
1600
|
recurrence: z
|
|
1602
1601
|
.object({
|
|
1603
1602
|
frequency: z.enum(["DAILY", "WEEKLY", "MONTHLY", "YEARLY"]).describe("How often the meeting repeats").optional(),
|
|
1604
|
-
interval: z.number().
|
|
1605
|
-
count: z.number().
|
|
1603
|
+
interval: z.number().gte(1).describe("The interval between recurrences (e.g., every 2 weeks)").optional(),
|
|
1604
|
+
count: z.number().gte(1).describe("Number of occurrences after which to stop the recurrence").optional(),
|
|
1606
1605
|
until: z.string().describe("End date for the recurrence in RFC3339 format (YYYY-MM-DD)").optional(),
|
|
1607
1606
|
byDay: z
|
|
1608
1607
|
.array(z.enum(["MO", "TU", "WE", "TH", "FR", "SA", "SU"]))
|
|
1609
1608
|
.describe("Days of the week when the meeting occurs (for WEEKLY frequency)")
|
|
1610
1609
|
.optional(),
|
|
1611
1610
|
byMonthDay: z
|
|
1612
|
-
.array(z.number().
|
|
1611
|
+
.array(z.number().gte(1).lte(31))
|
|
1613
1612
|
.describe("Days of the month when the meeting occurs (for MONTHLY frequency)")
|
|
1614
1613
|
.optional(),
|
|
1615
1614
|
})
|
|
@@ -3965,14 +3964,14 @@ export const githubGetBranchOutputSchema = z.object({
|
|
|
3965
3964
|
.describe("The commit tree")
|
|
3966
3965
|
.optional(),
|
|
3967
3966
|
url: z.string().describe("The commit URL").optional(),
|
|
3968
|
-
comment_count: z.number().
|
|
3967
|
+
comment_count: z.number().describe("Number of comments on the commit").optional(),
|
|
3969
3968
|
})
|
|
3970
3969
|
.describe("The git commit object")
|
|
3971
3970
|
.optional(),
|
|
3972
3971
|
author: z
|
|
3973
3972
|
.object({
|
|
3974
3973
|
login: z.string().optional(),
|
|
3975
|
-
id: z.number().
|
|
3974
|
+
id: z.number().optional(),
|
|
3976
3975
|
node_id: z.string().optional(),
|
|
3977
3976
|
avatar_url: z.string().optional(),
|
|
3978
3977
|
html_url: z.string().optional(),
|
|
@@ -3984,7 +3983,7 @@ export const githubGetBranchOutputSchema = z.object({
|
|
|
3984
3983
|
committer: z
|
|
3985
3984
|
.object({
|
|
3986
3985
|
login: z.string().optional(),
|
|
3987
|
-
id: z.number().
|
|
3986
|
+
id: z.number().optional(),
|
|
3988
3987
|
node_id: z.string().optional(),
|
|
3989
3988
|
avatar_url: z.string().optional(),
|
|
3990
3989
|
html_url: z.string().optional(),
|
|
@@ -4216,7 +4215,7 @@ export const gitlabSearchGroupOutputSchema = z.object({
|
|
|
4216
4215
|
.optional(),
|
|
4217
4216
|
});
|
|
4218
4217
|
export const gitlabGetFileContentParamsSchema = z.object({
|
|
4219
|
-
project_id: z.number().
|
|
4218
|
+
project_id: z.number().describe("Numeric project ID in GitLab (unique per project)"),
|
|
4220
4219
|
path: z.string().describe("The file path to get content from (e.g., src/index.js)"),
|
|
4221
4220
|
ref: z
|
|
4222
4221
|
.string()
|
|
@@ -8,30 +8,28 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
8
8
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
9
|
});
|
|
10
10
|
};
|
|
11
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
-
};
|
|
14
11
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
-
const
|
|
16
|
-
function
|
|
17
|
-
|
|
12
|
+
const axiosClient_1 = require("../../util/axiosClient");
|
|
13
|
+
function getConfluenceRequestConfig(baseUrl, username, apiToken) {
|
|
14
|
+
return {
|
|
18
15
|
baseURL: baseUrl,
|
|
19
16
|
headers: {
|
|
20
17
|
Accept: "application/json",
|
|
21
|
-
// Tokens are associated with a specific user.
|
|
22
18
|
Authorization: `Basic ${Buffer.from(`${username}:${apiToken}`).toString("base64")}`,
|
|
23
19
|
},
|
|
24
|
-
}
|
|
25
|
-
return api;
|
|
20
|
+
};
|
|
26
21
|
}
|
|
27
22
|
const confluenceUpdatePage = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
28
|
-
const { pageId,
|
|
29
|
-
const { baseUrl, authToken } = authParams;
|
|
30
|
-
|
|
23
|
+
const { pageId, content, title } = params;
|
|
24
|
+
const { baseUrl, authToken, username } = authParams;
|
|
25
|
+
if (!baseUrl || !authToken || !username) {
|
|
26
|
+
throw new Error("Missing required authentication information");
|
|
27
|
+
}
|
|
28
|
+
const config = getConfluenceRequestConfig(baseUrl, username, authToken);
|
|
31
29
|
// Get current version number
|
|
32
|
-
const response = yield
|
|
30
|
+
const response = yield axiosClient_1.axiosClient.get(`/api/v2/pages/${pageId}`, config);
|
|
33
31
|
const currVersion = response.data.version.number;
|
|
34
|
-
|
|
32
|
+
const payload = {
|
|
35
33
|
id: pageId,
|
|
36
34
|
status: "current",
|
|
37
35
|
title,
|
|
@@ -42,6 +40,7 @@ const confluenceUpdatePage = (_a) => __awaiter(void 0, [_a], void 0, function* (
|
|
|
42
40
|
version: {
|
|
43
41
|
number: currVersion + 1,
|
|
44
42
|
},
|
|
45
|
-
}
|
|
43
|
+
};
|
|
44
|
+
yield axiosClient_1.axiosClient.put(`/api/v2/pages/${pageId}`, payload, config);
|
|
46
45
|
});
|
|
47
46
|
exports.default = confluenceUpdatePage;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
const fillTemplateAction = (_a) => __awaiter(void 0, [_a], void 0, function* ({ template }) {
|
|
13
|
+
// Simply return the template without any modification
|
|
14
|
+
return {
|
|
15
|
+
result: template,
|
|
16
|
+
};
|
|
17
|
+
});
|
|
18
|
+
exports.default = fillTemplateAction;
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
const axios_1 = __importDefault(require("axios"));
|
|
16
|
+
const genericApiCall = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, }) {
|
|
17
|
+
try {
|
|
18
|
+
const { endpoint, method, headers, body } = params;
|
|
19
|
+
const response = yield (0, axios_1.default)({
|
|
20
|
+
url: endpoint,
|
|
21
|
+
method,
|
|
22
|
+
headers,
|
|
23
|
+
data: method !== "GET" ? body : undefined,
|
|
24
|
+
});
|
|
25
|
+
return {
|
|
26
|
+
statusCode: response.status,
|
|
27
|
+
headers: response.headers,
|
|
28
|
+
data: response.data,
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
catch (error) {
|
|
32
|
+
if (axios_1.default.isAxiosError(error)) {
|
|
33
|
+
throw Error("Axios Error: " + (error.message || "Failed to make API call"));
|
|
34
|
+
}
|
|
35
|
+
throw Error("Error: " + (error || "Failed to make API call"));
|
|
36
|
+
}
|
|
37
|
+
});
|
|
38
|
+
exports.default = genericApiCall;
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
+
});
|
|
9
|
+
};
|
|
10
|
+
import pdf from "pdf-parse/lib/pdf-parse.js";
|
|
11
|
+
import { axiosClient } from "../../util/axiosClient.js";
|
|
12
|
+
import mammoth from "mammoth";
|
|
13
|
+
import { MISSING_AUTH_TOKEN } from "../../util/missingAuthConstants.js";
|
|
14
|
+
const getDriveFileContentByID = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
15
|
+
if (!authParams.authToken) {
|
|
16
|
+
return { success: false, error: MISSING_AUTH_TOKEN };
|
|
17
|
+
}
|
|
18
|
+
const { fileId, limit } = params;
|
|
19
|
+
try {
|
|
20
|
+
// First, get file metadata to determine the file type
|
|
21
|
+
const metadataUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}?fields=name,mimeType,size`;
|
|
22
|
+
const metadataRes = yield axiosClient.get(metadataUrl, {
|
|
23
|
+
headers: {
|
|
24
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
|
25
|
+
},
|
|
26
|
+
});
|
|
27
|
+
const { name: fileName, mimeType, size } = metadataRes.data;
|
|
28
|
+
// Check if file is too large (50MB limit for safety)
|
|
29
|
+
if (size && parseInt(size) > 50 * 1024 * 1024) {
|
|
30
|
+
return {
|
|
31
|
+
success: false,
|
|
32
|
+
error: "File too large (>50MB)",
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
let content = "";
|
|
36
|
+
// Handle different file types - read content directly
|
|
37
|
+
if (mimeType === "application/vnd.google-apps.document") {
|
|
38
|
+
// Google Docs - download as plain text
|
|
39
|
+
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}?alt=media&format=txt`;
|
|
40
|
+
const downloadRes = yield axiosClient.get(downloadUrl, {
|
|
41
|
+
headers: {
|
|
42
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
|
43
|
+
},
|
|
44
|
+
responseType: 'text',
|
|
45
|
+
});
|
|
46
|
+
content = downloadRes.data;
|
|
47
|
+
}
|
|
48
|
+
else if (mimeType === "application/vnd.google-apps.spreadsheet") {
|
|
49
|
+
// Google Sheets - download as CSV
|
|
50
|
+
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}?alt=media&format=csv`;
|
|
51
|
+
const downloadRes = yield axiosClient.get(downloadUrl, {
|
|
52
|
+
headers: {
|
|
53
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
|
54
|
+
},
|
|
55
|
+
responseType: 'text',
|
|
56
|
+
});
|
|
57
|
+
content = downloadRes.data;
|
|
58
|
+
}
|
|
59
|
+
else if (mimeType === "application/vnd.google-apps.presentation") {
|
|
60
|
+
// Google Slides - download as plain text
|
|
61
|
+
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}?alt=media&format=txt`;
|
|
62
|
+
const downloadRes = yield axiosClient.get(downloadUrl, {
|
|
63
|
+
headers: {
|
|
64
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
|
65
|
+
},
|
|
66
|
+
responseType: 'text',
|
|
67
|
+
});
|
|
68
|
+
content = downloadRes.data;
|
|
69
|
+
}
|
|
70
|
+
else if (mimeType === "application/pdf") {
|
|
71
|
+
// PDF files - use pdf-parse
|
|
72
|
+
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}?alt=media`;
|
|
73
|
+
const downloadRes = yield axiosClient.get(downloadUrl, {
|
|
74
|
+
headers: {
|
|
75
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
|
76
|
+
},
|
|
77
|
+
responseType: 'arraybuffer',
|
|
78
|
+
});
|
|
79
|
+
try {
|
|
80
|
+
const pdfData = yield pdf(downloadRes.data);
|
|
81
|
+
content = pdfData.text;
|
|
82
|
+
}
|
|
83
|
+
catch (pdfError) {
|
|
84
|
+
return {
|
|
85
|
+
success: false,
|
|
86
|
+
error: `Failed to parse PDF: ${pdfError instanceof Error ? pdfError.message : 'Unknown PDF error'}`,
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
else if (mimeType === "application/vnd.openxmlformats-officedocument.wordprocessingml.document" ||
|
|
91
|
+
mimeType === "application/msword") {
|
|
92
|
+
// Word documents (.docx or .doc) - download and extract text using mammoth
|
|
93
|
+
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}?alt=media`;
|
|
94
|
+
const downloadRes = yield axiosClient.get(downloadUrl, {
|
|
95
|
+
headers: {
|
|
96
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
|
97
|
+
},
|
|
98
|
+
responseType: 'arraybuffer',
|
|
99
|
+
});
|
|
100
|
+
try {
|
|
101
|
+
// mammoth works with .docx files. It will ignore formatting and return raw text
|
|
102
|
+
const result = yield mammoth.extractRawText({ buffer: Buffer.from(downloadRes.data) });
|
|
103
|
+
content = result.value; // raw text
|
|
104
|
+
}
|
|
105
|
+
catch (wordError) {
|
|
106
|
+
return {
|
|
107
|
+
success: false,
|
|
108
|
+
error: `Failed to parse Word document: ${wordError instanceof Error ? wordError.message : 'Unknown Word error'}`,
|
|
109
|
+
};
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
else if (mimeType === "text/plain" ||
|
|
113
|
+
mimeType === "text/html" ||
|
|
114
|
+
mimeType === "application/rtf" ||
|
|
115
|
+
(mimeType === null || mimeType === void 0 ? void 0 : mimeType.startsWith("text/"))) {
|
|
116
|
+
// Text-based files
|
|
117
|
+
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}?alt=media`;
|
|
118
|
+
const downloadRes = yield axiosClient.get(downloadUrl, {
|
|
119
|
+
headers: {
|
|
120
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
|
121
|
+
},
|
|
122
|
+
responseType: 'text',
|
|
123
|
+
});
|
|
124
|
+
content = downloadRes.data;
|
|
125
|
+
}
|
|
126
|
+
else if (mimeType === null || mimeType === void 0 ? void 0 : mimeType.startsWith("image/")) {
|
|
127
|
+
// Skip images
|
|
128
|
+
return {
|
|
129
|
+
success: false,
|
|
130
|
+
error: "Image files are not supported for text extraction",
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
else {
|
|
134
|
+
// Unsupported file type
|
|
135
|
+
return {
|
|
136
|
+
success: false,
|
|
137
|
+
error: `Unsupported file type: ${mimeType}`,
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
content = content.trim();
|
|
141
|
+
const originalLength = content.length;
|
|
142
|
+
// Naive way to truncate content
|
|
143
|
+
if (limit && content.length > limit) {
|
|
144
|
+
content = content.substring(0, limit);
|
|
145
|
+
}
|
|
146
|
+
return {
|
|
147
|
+
success: true,
|
|
148
|
+
content,
|
|
149
|
+
fileName,
|
|
150
|
+
fileLength: originalLength,
|
|
151
|
+
};
|
|
152
|
+
}
|
|
153
|
+
catch (error) {
|
|
154
|
+
console.error("Error getting Google Drive file content", error);
|
|
155
|
+
return {
|
|
156
|
+
success: false,
|
|
157
|
+
error: error instanceof Error ? error.message : "Unknown error",
|
|
158
|
+
};
|
|
159
|
+
}
|
|
160
|
+
});
|
|
161
|
+
export default getDriveFileContentByID;
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
+
});
|
|
9
|
+
};
|
|
10
|
+
import { axiosClient } from "../../util/axiosClient.js";
|
|
11
|
+
import { MISSING_AUTH_TOKEN } from "../../util/missingAuthConstants.js";
|
|
12
|
+
const searchDriveByKeywords = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
13
|
+
var _b;
|
|
14
|
+
if (!authParams.authToken) {
|
|
15
|
+
return { success: false, error: MISSING_AUTH_TOKEN, files: [] };
|
|
16
|
+
}
|
|
17
|
+
const { keywords, limit } = params;
|
|
18
|
+
// Build the query: fullText contains 'keyword1' or fullText contains 'keyword2' ...
|
|
19
|
+
const query = keywords.map(kw => `fullText contains '${kw.replace(/'/g, "\\'")}'`).join(" or ");
|
|
20
|
+
const url = `https://www.googleapis.com/drive/v3/files?q=${encodeURIComponent(query)}&fields=files(id,name,mimeType,webViewLink)&supportsAllDrives=true&includeItemsFromAllDrives=true`;
|
|
21
|
+
// 1. Get the file metadata from google drive search
|
|
22
|
+
let files = [];
|
|
23
|
+
try {
|
|
24
|
+
const res = yield axiosClient.get(url, {
|
|
25
|
+
headers: {
|
|
26
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
|
27
|
+
},
|
|
28
|
+
});
|
|
29
|
+
files =
|
|
30
|
+
((_b = res.data.files) === null || _b === void 0 ? void 0 : _b.map((file) => ({
|
|
31
|
+
id: file.id || "",
|
|
32
|
+
name: file.name || "",
|
|
33
|
+
mimeType: file.mimeType || "",
|
|
34
|
+
url: file.webViewLink || "",
|
|
35
|
+
}))) || [];
|
|
36
|
+
}
|
|
37
|
+
catch (error) {
|
|
38
|
+
console.error("Error searching Google Drive", error);
|
|
39
|
+
return {
|
|
40
|
+
success: false,
|
|
41
|
+
error: error instanceof Error ? error.message : "Unknown error",
|
|
42
|
+
files: [],
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
files = limit ? files.splice(0, limit) : files;
|
|
46
|
+
});
|
|
47
|
+
export default searchDriveByKeywords;
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
+
});
|
|
9
|
+
};
|
|
10
|
+
import { axiosClient } from "../../util/axiosClient.js";
|
|
11
|
+
import { MISSING_AUTH_TOKEN } from "../../util/missingAuthConstants.js";
|
|
12
|
+
import extractContentFromDriveFileId from "./utils/extractContentFromDriveFileId.js";
|
|
13
|
+
import { normalizeText } from "../../../utils/string.js";
|
|
14
|
+
/** Intelligently selects a section of text around the median occurrence of keywords */
|
|
15
|
+
const intelligentSelectByMedianSection = (text, keywords, limit) => {
|
|
16
|
+
if (!text || text.length <= limit)
|
|
17
|
+
return text;
|
|
18
|
+
if (!(keywords === null || keywords === void 0 ? void 0 : keywords.length))
|
|
19
|
+
return text.substring(0, limit);
|
|
20
|
+
// Find all keyword positions (case-insensitive, limited to first 1000 matches)
|
|
21
|
+
const positions = [];
|
|
22
|
+
const normalizedText = normalizeText(text);
|
|
23
|
+
for (const keyword of keywords) {
|
|
24
|
+
if (keyword.length < 3)
|
|
25
|
+
continue; // Skip very short keywords
|
|
26
|
+
let pos = -1;
|
|
27
|
+
const normalizedKeyword = normalizeText(keyword);
|
|
28
|
+
while ((pos = normalizedText.indexOf(normalizedKeyword, pos + 1)) !== -1 && positions.length < 1000) {
|
|
29
|
+
positions.push(pos);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
if (!positions.length)
|
|
33
|
+
return text.substring(0, limit);
|
|
34
|
+
// Find median position (using sort for simplicity, still fast for 1000 elements)
|
|
35
|
+
positions.sort((a, b) => a - b);
|
|
36
|
+
const medianPos = positions[Math.floor(positions.length / 2)];
|
|
37
|
+
// Return window around median
|
|
38
|
+
const half = Math.floor(limit / 2);
|
|
39
|
+
const start = Math.max(0, medianPos - half);
|
|
40
|
+
const end = Math.min(text.length, start + limit);
|
|
41
|
+
return text.substring(start, end);
|
|
42
|
+
};
|
|
43
|
+
const searchDriveAndGetContentByKeywords = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
44
|
+
var _b;
|
|
45
|
+
if (!authParams.authToken) {
|
|
46
|
+
return { success: false, error: MISSING_AUTH_TOKEN, files: [] };
|
|
47
|
+
}
|
|
48
|
+
const { keywords, fileLimit, fileSizeLimit } = params;
|
|
49
|
+
let files = [];
|
|
50
|
+
// 1. Search for files and get their metadata
|
|
51
|
+
// Build the query: fullText contains 'keyword1' or fullText contains 'keyword2' ...
|
|
52
|
+
const query = keywords.map(kw => `fullText contains '${kw.replace(/'/g, "\\'")}'`).join(" or ");
|
|
53
|
+
const url = `https://www.googleapis.com/drive/v3/files?q=${encodeURIComponent(query)}&fields=files(id,name,mimeType,webViewLink)&supportsAllDrives=true&includeItemsFromAllDrives=true`;
|
|
54
|
+
try {
|
|
55
|
+
const res = yield axiosClient.get(url, {
|
|
56
|
+
headers: {
|
|
57
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
|
58
|
+
},
|
|
59
|
+
});
|
|
60
|
+
files =
|
|
61
|
+
((_b = res.data.files) === null || _b === void 0 ? void 0 : _b.map((file) => ({
|
|
62
|
+
id: file.id,
|
|
63
|
+
name: file.name,
|
|
64
|
+
mimeType: file.mimeType,
|
|
65
|
+
url: file.webViewLink,
|
|
66
|
+
}))) || [];
|
|
67
|
+
}
|
|
68
|
+
catch (error) {
|
|
69
|
+
console.error("Error searching Google Drive", error);
|
|
70
|
+
return {
|
|
71
|
+
success: false,
|
|
72
|
+
error: error instanceof Error ? error.message : "Unknown error",
|
|
73
|
+
files: [],
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
files = fileLimit ? files.splice(0, fileLimit) : files;
|
|
77
|
+
// 2. Extract content from files and do some smart range selection
|
|
78
|
+
const processedFiles = yield Promise.all(files
|
|
79
|
+
.filter((file) => file.id && file.mimeType)
|
|
80
|
+
.map((file) => __awaiter(void 0, void 0, void 0, function* () {
|
|
81
|
+
const content = yield extractContentFromDriveFileId({
|
|
82
|
+
params: { fileId: file.id, mimeType: file.mimeType },
|
|
83
|
+
authParams,
|
|
84
|
+
});
|
|
85
|
+
if (content.success) {
|
|
86
|
+
let selectedContent = content.content;
|
|
87
|
+
if (fileSizeLimit && selectedContent && selectedContent.length > fileSizeLimit) {
|
|
88
|
+
selectedContent = intelligentSelectByMedianSection(selectedContent, keywords, fileSizeLimit);
|
|
89
|
+
}
|
|
90
|
+
return {
|
|
91
|
+
id: file.id || "",
|
|
92
|
+
name: file.name || "",
|
|
93
|
+
mimeType: file.mimeType || "",
|
|
94
|
+
url: file.url || "",
|
|
95
|
+
content: selectedContent,
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
else {
|
|
99
|
+
return {
|
|
100
|
+
id: file.id || "",
|
|
101
|
+
name: file.name || "",
|
|
102
|
+
mimeType: file.mimeType || "",
|
|
103
|
+
url: file.url || "",
|
|
104
|
+
error: content.error,
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
})));
|
|
108
|
+
return { success: true, files: processedFiles };
|
|
109
|
+
});
|
|
110
|
+
export default searchDriveAndGetContentByKeywords;
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
+
});
|
|
9
|
+
};
|
|
10
|
+
import { axiosClient } from "../../util/axiosClient.js";
|
|
11
|
+
import { MISSING_AUTH_TOKEN } from "../../util/missingAuthConstants.js";
|
|
12
|
+
import extractContentFromDriveFileId from "./utils/extractContentFromDriveFileId.js";
|
|
13
|
+
const searchDriveAndGetContentByQuery = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
14
|
+
var _b;
|
|
15
|
+
if (!authParams.authToken) {
|
|
16
|
+
return { success: false, error: MISSING_AUTH_TOKEN, files: [] };
|
|
17
|
+
}
|
|
18
|
+
const { query, fileLimit, fileSizeLimit } = params;
|
|
19
|
+
let files = [];
|
|
20
|
+
// 1. Search for files and get their metadata
|
|
21
|
+
const url = `https://www.googleapis.com/drive/v3/files?q=${encodeURIComponent(query)}&fields=files(id,name,mimeType,webViewLink)&supportsAllDrives=true&includeItemsFromAllDrives=true&corpora=allDrives`;
|
|
22
|
+
try {
|
|
23
|
+
const res = yield axiosClient.get(url, {
|
|
24
|
+
headers: {
|
|
25
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
|
26
|
+
},
|
|
27
|
+
});
|
|
28
|
+
files =
|
|
29
|
+
((_b = res.data.files) === null || _b === void 0 ? void 0 : _b.map((file) => ({
|
|
30
|
+
id: file.id,
|
|
31
|
+
name: file.name,
|
|
32
|
+
mimeType: file.mimeType,
|
|
33
|
+
url: file.webViewLink,
|
|
34
|
+
}))) || [];
|
|
35
|
+
}
|
|
36
|
+
catch (error) {
|
|
37
|
+
console.error("Error searching Google Drive", error);
|
|
38
|
+
return {
|
|
39
|
+
success: false,
|
|
40
|
+
error: error instanceof Error ? error.message : "Unknown error",
|
|
41
|
+
files: [],
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
files = fileLimit ? files.splice(0, fileLimit) : files;
|
|
45
|
+
// 2. Extract content from files and do some smart range selection
|
|
46
|
+
const processedFiles = yield Promise.all(files
|
|
47
|
+
.filter((file) => file.id && file.mimeType)
|
|
48
|
+
.map((file) => __awaiter(void 0, void 0, void 0, function* () {
|
|
49
|
+
const content = yield extractContentFromDriveFileId({
|
|
50
|
+
params: { fileId: file.id, mimeType: file.mimeType },
|
|
51
|
+
authParams,
|
|
52
|
+
});
|
|
53
|
+
if (content.success) {
|
|
54
|
+
let selectedContent = content.content;
|
|
55
|
+
if (fileSizeLimit && selectedContent && selectedContent.length > fileSizeLimit) {
|
|
56
|
+
selectedContent = selectedContent.substring(0, fileSizeLimit);
|
|
57
|
+
}
|
|
58
|
+
return {
|
|
59
|
+
id: file.id || "",
|
|
60
|
+
name: file.name || "",
|
|
61
|
+
mimeType: file.mimeType || "",
|
|
62
|
+
url: file.url || "",
|
|
63
|
+
content: selectedContent,
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
else {
|
|
67
|
+
return {
|
|
68
|
+
id: file.id || "",
|
|
69
|
+
name: file.name || "",
|
|
70
|
+
mimeType: file.mimeType || "",
|
|
71
|
+
url: file.url || "",
|
|
72
|
+
error: content.error,
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
})));
|
|
76
|
+
return { success: true, files: processedFiles };
|
|
77
|
+
});
|
|
78
|
+
export default searchDriveAndGetContentByQuery;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type { AuthParamsType } from "../../../autogen/types.js";
|
|
2
|
+
export type getDriveFileContentParams = {
|
|
3
|
+
fileId: string;
|
|
4
|
+
mimeType: string;
|
|
5
|
+
};
|
|
6
|
+
export type getDriveFileContentOutput = {
|
|
7
|
+
success: boolean;
|
|
8
|
+
content?: string;
|
|
9
|
+
error?: string;
|
|
10
|
+
};
|
|
11
|
+
declare const extractContentFromDriveFileId: ({ params, authParams, }: {
|
|
12
|
+
params: getDriveFileContentParams;
|
|
13
|
+
authParams: AuthParamsType;
|
|
14
|
+
}) => Promise<getDriveFileContentOutput>;
|
|
15
|
+
export default extractContentFromDriveFileId;
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
+
});
|
|
9
|
+
};
|
|
10
|
+
import { axiosClient } from "../../../util/axiosClient.js";
|
|
11
|
+
import mammoth from "mammoth";
|
|
12
|
+
import { MISSING_AUTH_TOKEN } from "../../../util/missingAuthConstants.js";
|
|
13
|
+
const extractContentFromDriveFileId = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
14
|
+
if (!authParams.authToken) {
|
|
15
|
+
return { success: false, error: MISSING_AUTH_TOKEN };
|
|
16
|
+
}
|
|
17
|
+
const { fileId, mimeType } = params;
|
|
18
|
+
let content = "";
|
|
19
|
+
try {
|
|
20
|
+
// Handle different file types - read content directly
|
|
21
|
+
if (mimeType === "application/vnd.google-apps.document") {
|
|
22
|
+
// Google Docs - download as plain text
|
|
23
|
+
const exportUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}/export?mimeType=text/plain`;
|
|
24
|
+
const exportRes = yield axiosClient.get(exportUrl, {
|
|
25
|
+
headers: {
|
|
26
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
|
27
|
+
},
|
|
28
|
+
responseType: "text",
|
|
29
|
+
});
|
|
30
|
+
content = exportRes.data;
|
|
31
|
+
}
|
|
32
|
+
else if (mimeType === "application/vnd.google-apps.spreadsheet") {
|
|
33
|
+
// Google Sheets - download as CSV
|
|
34
|
+
const exportUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}/export?mimeType=text/csv`;
|
|
35
|
+
const exportRes = yield axiosClient.get(exportUrl, {
|
|
36
|
+
headers: {
|
|
37
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
|
38
|
+
},
|
|
39
|
+
responseType: "text",
|
|
40
|
+
});
|
|
41
|
+
// Clean up excessive commas from empty columns
|
|
42
|
+
content = exportRes.data
|
|
43
|
+
.split("\n")
|
|
44
|
+
.map((line) => line.replace(/,+$/, "")) // Remove trailing commas
|
|
45
|
+
.map((line) => line.replace(/,{2,}/g, ",")) // Replace multiple commas with single comma
|
|
46
|
+
.join("\n");
|
|
47
|
+
}
|
|
48
|
+
else if (mimeType === "application/vnd.google-apps.presentation") {
|
|
49
|
+
// Google Slides - download as plain text
|
|
50
|
+
const exportUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}/export?mimeType=text/plain`;
|
|
51
|
+
const exportRes = yield axiosClient.get(exportUrl, {
|
|
52
|
+
headers: {
|
|
53
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
|
54
|
+
},
|
|
55
|
+
responseType: "text",
|
|
56
|
+
});
|
|
57
|
+
content = exportRes.data;
|
|
58
|
+
}
|
|
59
|
+
else if (mimeType === "application/pdf") {
|
|
60
|
+
return {
|
|
61
|
+
success: false,
|
|
62
|
+
error: "PDF files are not supported for text extraction",
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
else if (mimeType === "application/vnd.openxmlformats-officedocument.wordprocessingml.document" ||
|
|
66
|
+
mimeType === "application/msword") {
|
|
67
|
+
// Word documents (.docx or .doc) - download and extract text using mammoth
|
|
68
|
+
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}?alt=media`;
|
|
69
|
+
const downloadRes = yield axiosClient.get(downloadUrl, {
|
|
70
|
+
headers: {
|
|
71
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
|
72
|
+
},
|
|
73
|
+
responseType: "arraybuffer",
|
|
74
|
+
});
|
|
75
|
+
try {
|
|
76
|
+
// mammoth works with .docx files. It will ignore formatting and return raw text
|
|
77
|
+
const result = yield mammoth.extractRawText({ buffer: Buffer.from(downloadRes.data) });
|
|
78
|
+
content = result.value; // raw text
|
|
79
|
+
}
|
|
80
|
+
catch (wordError) {
|
|
81
|
+
return {
|
|
82
|
+
success: false,
|
|
83
|
+
error: `Failed to parse Word document: ${wordError instanceof Error ? wordError.message : "Unknown Word error"}`,
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
else if (mimeType === "text/plain" ||
|
|
88
|
+
mimeType === "text/html" ||
|
|
89
|
+
mimeType === "application/rtf" ||
|
|
90
|
+
(mimeType === null || mimeType === void 0 ? void 0 : mimeType.startsWith("text/"))) {
|
|
91
|
+
// Text-based files
|
|
92
|
+
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}?alt=media`;
|
|
93
|
+
const downloadRes = yield axiosClient.get(downloadUrl, {
|
|
94
|
+
headers: {
|
|
95
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
|
96
|
+
},
|
|
97
|
+
responseType: "text",
|
|
98
|
+
});
|
|
99
|
+
content = downloadRes.data;
|
|
100
|
+
}
|
|
101
|
+
else if (mimeType === null || mimeType === void 0 ? void 0 : mimeType.startsWith("image/")) {
|
|
102
|
+
// Skip images
|
|
103
|
+
return {
|
|
104
|
+
success: false,
|
|
105
|
+
error: "Image files are not supported for text extraction",
|
|
106
|
+
};
|
|
107
|
+
}
|
|
108
|
+
else {
|
|
109
|
+
// Unsupported file type
|
|
110
|
+
return {
|
|
111
|
+
success: false,
|
|
112
|
+
error: `Unsupported file type: ${mimeType}`,
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
content = content.trim();
|
|
116
|
+
return {
|
|
117
|
+
success: true,
|
|
118
|
+
content,
|
|
119
|
+
};
|
|
120
|
+
}
|
|
121
|
+
catch (error) {
|
|
122
|
+
console.error("Error getting Google Drive file content", error);
|
|
123
|
+
return {
|
|
124
|
+
success: false,
|
|
125
|
+
error: error instanceof Error ? error.message : "Unknown error",
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
});
|
|
129
|
+
export default extractContentFromDriveFileId;
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
const axios_1 = __importDefault(require("axios"));
|
|
16
|
+
const types_1 = require("../../autogen/types");
|
|
17
|
+
const INCLUDED_TYPES = [
|
|
18
|
+
"monument",
|
|
19
|
+
"museum",
|
|
20
|
+
"art_gallery",
|
|
21
|
+
"sculpture",
|
|
22
|
+
"cultural_landmark",
|
|
23
|
+
"historical_place",
|
|
24
|
+
"performing_arts_theater",
|
|
25
|
+
"university",
|
|
26
|
+
"aquarium",
|
|
27
|
+
"botanical_garden",
|
|
28
|
+
"comedy_club",
|
|
29
|
+
"park",
|
|
30
|
+
"movie_theater",
|
|
31
|
+
"national_park",
|
|
32
|
+
"garden",
|
|
33
|
+
"night_club",
|
|
34
|
+
"tourist_attraction",
|
|
35
|
+
"water_park",
|
|
36
|
+
"zoo",
|
|
37
|
+
"bar",
|
|
38
|
+
"restaurant",
|
|
39
|
+
"food_court",
|
|
40
|
+
"bakery",
|
|
41
|
+
"cafe",
|
|
42
|
+
"coffee_shop",
|
|
43
|
+
"pub",
|
|
44
|
+
"wine_bar",
|
|
45
|
+
"spa",
|
|
46
|
+
"beach",
|
|
47
|
+
"market",
|
|
48
|
+
"shopping_mall",
|
|
49
|
+
"stadium",
|
|
50
|
+
];
|
|
51
|
+
const nearbysearch = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
52
|
+
const url = `https://places.googleapis.com/v1/places:searchNearby`;
|
|
53
|
+
const fieldMask = [
|
|
54
|
+
"places.displayName",
|
|
55
|
+
"places.formattedAddress",
|
|
56
|
+
"places.priceLevel",
|
|
57
|
+
"places.rating",
|
|
58
|
+
"places.primaryTypeDisplayName",
|
|
59
|
+
"places.editorialSummary",
|
|
60
|
+
"places.regularOpeningHours",
|
|
61
|
+
].join(",");
|
|
62
|
+
const response = yield axios_1.default.post(url, {
|
|
63
|
+
maxResultCount: 20,
|
|
64
|
+
includedTypes: INCLUDED_TYPES,
|
|
65
|
+
locationRestriction: {
|
|
66
|
+
circle: {
|
|
67
|
+
center: {
|
|
68
|
+
latitude: params.latitude,
|
|
69
|
+
longitude: params.longitude,
|
|
70
|
+
},
|
|
71
|
+
radius: 10000,
|
|
72
|
+
},
|
|
73
|
+
},
|
|
74
|
+
}, {
|
|
75
|
+
headers: {
|
|
76
|
+
"X-Goog-Api-Key": authParams.apiKey,
|
|
77
|
+
"X-Goog-FieldMask": fieldMask,
|
|
78
|
+
"Content-Type": "application/json",
|
|
79
|
+
},
|
|
80
|
+
});
|
|
81
|
+
return types_1.googlemapsNearbysearchOutputSchema.parse({
|
|
82
|
+
results: response.data.places.map((place) => {
|
|
83
|
+
var _a, _b;
|
|
84
|
+
return ({
|
|
85
|
+
name: place.displayName.text,
|
|
86
|
+
address: place.formattedAddress,
|
|
87
|
+
priceLevel: place.priceLevel,
|
|
88
|
+
rating: place.rating,
|
|
89
|
+
primaryType: place.primaryTypeDisplayName.text,
|
|
90
|
+
editorialSummary: ((_a = place.editorialSummary) === null || _a === void 0 ? void 0 : _a.text) || "",
|
|
91
|
+
openingHours: ((_b = place.regularOpeningHours) === null || _b === void 0 ? void 0 : _b.weekdayDescriptions.join("\n")) || "",
|
|
92
|
+
});
|
|
93
|
+
}),
|
|
94
|
+
});
|
|
95
|
+
});
|
|
96
|
+
exports.default = nearbysearch;
|
|
@@ -10,7 +10,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
10
10
|
import { WebClient } from "@slack/web-api";
|
|
11
11
|
import { MISSING_AUTH_TOKEN } from "../../util/missingAuthConstants.js";
|
|
12
12
|
import pLimit from "p-limit";
|
|
13
|
-
const HIT_ENRICH_POOL =
|
|
13
|
+
const HIT_ENRICH_POOL = 5;
|
|
14
14
|
const limitHit = pLimit(HIT_ENRICH_POOL);
|
|
15
15
|
class SlackUserCache {
|
|
16
16
|
constructor(client) {
|
|
@@ -47,16 +47,21 @@ function normalizeChannelOperand(ch) {
|
|
|
47
47
|
return s;
|
|
48
48
|
return s.replace(/^#/, "");
|
|
49
49
|
}
|
|
50
|
+
function fmtDaysAgo(n) {
|
|
51
|
+
const d = new Date();
|
|
52
|
+
d.setDate(d.getDate() - n);
|
|
53
|
+
return d.toISOString().slice(0, 10); // YYYY-MM-DD
|
|
54
|
+
}
|
|
50
55
|
function timeFilter(range) {
|
|
51
56
|
switch (range) {
|
|
52
57
|
case "today":
|
|
53
|
-
return "
|
|
58
|
+
return "on:today";
|
|
54
59
|
case "yesterday":
|
|
55
|
-
return "
|
|
60
|
+
return "on:yesterday";
|
|
56
61
|
case "last_7d":
|
|
57
|
-
return
|
|
62
|
+
return `after:${fmtDaysAgo(7)}`;
|
|
58
63
|
case "last_30d":
|
|
59
|
-
return
|
|
64
|
+
return `after:${fmtDaysAgo(30)}`;
|
|
60
65
|
default:
|
|
61
66
|
return "";
|
|
62
67
|
}
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
const snowflake_sdk_1 = __importDefault(require("snowflake-sdk"));
|
|
16
|
+
const crypto_1 = __importDefault(require("crypto"));
|
|
17
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
18
|
+
const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
|
|
19
|
+
const uuid_1 = require("uuid");
|
|
20
|
+
// Only log errors.
|
|
21
|
+
snowflake_sdk_1.default.configure({ logLevel: "ERROR" });
|
|
22
|
+
const runSnowflakeQueryWriteResultsToS3 = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
23
|
+
const { databaseName, warehouse, query, user, accountName, s3BucketName, s3Region, outputFormat = "json" } = params;
|
|
24
|
+
const { apiKey: privateKey, awsAccessKeyId, awsSecretAccessKey } = authParams;
|
|
25
|
+
if (!privateKey) {
|
|
26
|
+
throw new Error("Snowflake private key is required");
|
|
27
|
+
}
|
|
28
|
+
if (!awsAccessKeyId || !awsSecretAccessKey) {
|
|
29
|
+
throw new Error("AWS credentials are required");
|
|
30
|
+
}
|
|
31
|
+
if (!accountName || !user || !databaseName || !warehouse || !query || !s3BucketName) {
|
|
32
|
+
throw new Error("Missing required parameters for Snowflake query or S3 destination");
|
|
33
|
+
}
|
|
34
|
+
const getPrivateKeyCorrectFormat = (privateKey) => {
|
|
35
|
+
const buffer = Buffer.from(privateKey);
|
|
36
|
+
const privateKeyObject = crypto_1.default.createPrivateKey({
|
|
37
|
+
key: buffer,
|
|
38
|
+
format: "pem",
|
|
39
|
+
passphrase: "password",
|
|
40
|
+
});
|
|
41
|
+
const privateKeyCorrectFormat = privateKeyObject.export({
|
|
42
|
+
format: "pem",
|
|
43
|
+
type: "pkcs8",
|
|
44
|
+
});
|
|
45
|
+
return privateKeyCorrectFormat.toString();
|
|
46
|
+
};
|
|
47
|
+
const executeQueryAndFormatData = () => __awaiter(void 0, void 0, void 0, function* () {
|
|
48
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
49
|
+
const queryResults = yield new Promise((resolve, reject) => {
|
|
50
|
+
connection.execute({
|
|
51
|
+
sqlText: query,
|
|
52
|
+
complete: (err, stmt, rows) => {
|
|
53
|
+
if (err) {
|
|
54
|
+
return reject(err);
|
|
55
|
+
}
|
|
56
|
+
return resolve(rows || []);
|
|
57
|
+
},
|
|
58
|
+
});
|
|
59
|
+
});
|
|
60
|
+
// Format the results based on the output format
|
|
61
|
+
let formattedData;
|
|
62
|
+
if (outputFormat.toLowerCase() === "csv") {
|
|
63
|
+
if (queryResults.length === 0) {
|
|
64
|
+
formattedData = "";
|
|
65
|
+
}
|
|
66
|
+
else {
|
|
67
|
+
const headers = Object.keys(queryResults[0]).join(",");
|
|
68
|
+
const rows = queryResults.map(row => Object.values(row)
|
|
69
|
+
.map(value => (typeof value === "object" && value !== null ? JSON.stringify(value) : value))
|
|
70
|
+
.join(","));
|
|
71
|
+
formattedData = [headers, ...rows].join("\n");
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
// Default to JSON
|
|
76
|
+
formattedData = JSON.stringify(queryResults, null, 2);
|
|
77
|
+
}
|
|
78
|
+
return { formattedData, resultsLength: queryResults.length };
|
|
79
|
+
});
|
|
80
|
+
const uploadToS3AndGetURL = (formattedData) => __awaiter(void 0, void 0, void 0, function* () {
|
|
81
|
+
// Create S3 client
|
|
82
|
+
const s3Client = new client_s3_1.S3Client({
|
|
83
|
+
region: s3Region,
|
|
84
|
+
credentials: {
|
|
85
|
+
accessKeyId: awsAccessKeyId,
|
|
86
|
+
secretAccessKey: awsSecretAccessKey,
|
|
87
|
+
},
|
|
88
|
+
});
|
|
89
|
+
const contentType = outputFormat.toLowerCase() === "csv" ? "text/csv" : "application/json";
|
|
90
|
+
const fileExtension = outputFormat.toLowerCase() === "csv" ? "csv" : "json";
|
|
91
|
+
const finalKey = `${databaseName}/${(0, uuid_1.v4)()}.${fileExtension}`;
|
|
92
|
+
// Upload to S3 without ACL
|
|
93
|
+
const uploadCommand = new client_s3_1.PutObjectCommand({
|
|
94
|
+
Bucket: s3BucketName,
|
|
95
|
+
Key: finalKey,
|
|
96
|
+
Body: formattedData,
|
|
97
|
+
ContentType: contentType,
|
|
98
|
+
});
|
|
99
|
+
yield s3Client.send(uploadCommand);
|
|
100
|
+
// Generate a presigned URL (valid for an hour)
|
|
101
|
+
const getObjectCommand = new client_s3_1.GetObjectCommand({
|
|
102
|
+
Bucket: s3BucketName,
|
|
103
|
+
Key: finalKey,
|
|
104
|
+
});
|
|
105
|
+
const presignedUrl = yield (0, s3_request_presigner_1.getSignedUrl)(s3Client, getObjectCommand, { expiresIn: 3600 });
|
|
106
|
+
return presignedUrl;
|
|
107
|
+
});
|
|
108
|
+
// Process the private key
|
|
109
|
+
const privateKeyCorrectFormatString = getPrivateKeyCorrectFormat(privateKey);
|
|
110
|
+
// Set up a connection using snowflake-sdk
|
|
111
|
+
const connection = snowflake_sdk_1.default.createConnection({
|
|
112
|
+
account: accountName,
|
|
113
|
+
username: user,
|
|
114
|
+
privateKey: privateKeyCorrectFormatString,
|
|
115
|
+
authenticator: "SNOWFLAKE_JWT",
|
|
116
|
+
role: "ACCOUNTADMIN",
|
|
117
|
+
warehouse: warehouse,
|
|
118
|
+
database: databaseName,
|
|
119
|
+
});
|
|
120
|
+
try {
|
|
121
|
+
// Connect to Snowflake
|
|
122
|
+
yield new Promise((resolve, reject) => {
|
|
123
|
+
connection.connect((err, conn) => {
|
|
124
|
+
if (err) {
|
|
125
|
+
console.error("Unable to connect to Snowflake:", err.message);
|
|
126
|
+
return reject(err);
|
|
127
|
+
}
|
|
128
|
+
resolve(conn);
|
|
129
|
+
});
|
|
130
|
+
});
|
|
131
|
+
const { formattedData, resultsLength } = yield executeQueryAndFormatData();
|
|
132
|
+
const presignedUrl = yield uploadToS3AndGetURL(formattedData);
|
|
133
|
+
// Return fields to match schema definition
|
|
134
|
+
connection.destroy(err => {
|
|
135
|
+
if (err) {
|
|
136
|
+
console.log("Failed to disconnect from Snowflake:", err);
|
|
137
|
+
}
|
|
138
|
+
});
|
|
139
|
+
return {
|
|
140
|
+
bucketUrl: presignedUrl,
|
|
141
|
+
message: `Query results successfully written to S3. URL valid for 1 hour.`,
|
|
142
|
+
rowCount: resultsLength,
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
catch (error) {
|
|
146
|
+
connection.destroy(err => {
|
|
147
|
+
if (err) {
|
|
148
|
+
console.log("Failed to disconnect from Snowflake:", err);
|
|
149
|
+
}
|
|
150
|
+
});
|
|
151
|
+
throw Error(`An error occurred: ${error}`);
|
|
152
|
+
}
|
|
153
|
+
});
|
|
154
|
+
exports.default = runSnowflakeQueryWriteResultsToS3;
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
const firecrawl_js_1 = __importDefault(require("@mendable/firecrawl-js"));
|
|
16
|
+
const scrapeTweetDataWithNitter = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
17
|
+
const tweetUrlRegex = /^(?:https?:\/\/)?(?:www\.)?(?:twitter\.com|x\.com)\/([a-zA-Z0-9_]+)\/status\/(\d+)(?:\?.*)?$/;
|
|
18
|
+
if (!tweetUrlRegex.test(params.tweetUrl)) {
|
|
19
|
+
throw new Error("Invalid tweet URL. Expected format: https://twitter.com/username/status/id or https://x.com/username/status/id");
|
|
20
|
+
}
|
|
21
|
+
const nitterUrl = params.tweetUrl.replace(/^(?:https?:\/\/)?(?:www\.)?(?:twitter\.com|x\.com)/i, "https://nitter.net");
|
|
22
|
+
// Initialize Firecrawl
|
|
23
|
+
if (!authParams.apiKey) {
|
|
24
|
+
throw new Error("API key is required for X+Nitter+Firecrawl");
|
|
25
|
+
}
|
|
26
|
+
const firecrawl = new firecrawl_js_1.default({
|
|
27
|
+
apiKey: authParams.apiKey,
|
|
28
|
+
});
|
|
29
|
+
try {
|
|
30
|
+
// Scrape the Nitter URL
|
|
31
|
+
const result = yield firecrawl.scrapeUrl(nitterUrl);
|
|
32
|
+
if (!result.success) {
|
|
33
|
+
throw new Error(`Failed to scrape tweet: ${result.error || "Unknown error"}`);
|
|
34
|
+
}
|
|
35
|
+
// Extract the tweet text from the scraped content - simple approach - in practice, you might need more robust parsing based on nitter html structure
|
|
36
|
+
const tweetContent = result.markdown;
|
|
37
|
+
return {
|
|
38
|
+
text: tweetContent || "Error scraping with firecrawl",
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
catch (error) {
|
|
42
|
+
throw new Error(`Error scraping tweet: ${error instanceof Error ? error.message : error}`);
|
|
43
|
+
}
|
|
44
|
+
});
|
|
45
|
+
exports.default = scrapeTweetDataWithNitter;
|
package/dist/utils/pdf.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare function extractTextFromPdf(
|
|
1
|
+
export declare function extractTextFromPdf(buffer: ArrayBuffer): Promise<string>;
|
package/dist/utils/pdf.js
CHANGED
|
@@ -7,23 +7,34 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
7
7
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
8
|
});
|
|
9
9
|
};
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
export function extractTextFromPdf(input) {
|
|
10
|
+
import PDFParser from "pdf2json";
|
|
11
|
+
export function extractTextFromPdf(buffer) {
|
|
13
12
|
return __awaiter(this, void 0, void 0, function* () {
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
13
|
+
try {
|
|
14
|
+
const extractedText = yield new Promise((resolve, reject) => {
|
|
15
|
+
const pdfParser = new PDFParser();
|
|
16
|
+
pdfParser.on("pdfParser_dataError", (errData) => {
|
|
17
|
+
reject(errData.parserError || new Error("PDF parsing failed"));
|
|
18
|
+
});
|
|
19
|
+
pdfParser.on("pdfParser_dataReady", (pdfData) => {
|
|
20
|
+
try {
|
|
21
|
+
const text = pdfData.Pages.map((page) => page.Texts.map((textItem) => {
|
|
22
|
+
// Handle cases where R array might be empty or have multiple runs
|
|
23
|
+
return textItem.R.map((run) => decodeURIComponent(run.T)).join("");
|
|
24
|
+
}).join("")).join("\n");
|
|
25
|
+
resolve(text);
|
|
26
|
+
}
|
|
27
|
+
catch (error) {
|
|
28
|
+
reject(error);
|
|
29
|
+
}
|
|
30
|
+
});
|
|
31
|
+
pdfParser.parseBuffer(Buffer.from(buffer));
|
|
32
|
+
});
|
|
33
|
+
return extractedText;
|
|
34
|
+
}
|
|
35
|
+
catch (error) {
|
|
36
|
+
console.error("Error extracting PDF text:", error);
|
|
37
|
+
throw error;
|
|
25
38
|
}
|
|
26
|
-
yield pdf.destroy();
|
|
27
|
-
return pages.join("\n\n");
|
|
28
39
|
});
|
|
29
40
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@credal/actions",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.125",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "AI Actions by Credal AI",
|
|
6
6
|
"sideEffects": false,
|
|
@@ -33,7 +33,6 @@
|
|
|
33
33
|
"@types/jsonwebtoken": "^9.0.9",
|
|
34
34
|
"@types/node": "^22.10.1",
|
|
35
35
|
"@types/node-forge": "^1.3.11",
|
|
36
|
-
"@types/pdf-parse": "^1.1.5",
|
|
37
36
|
"@typescript-eslint/eslint-plugin": "^8.18.0",
|
|
38
37
|
"@typescript-eslint/parser": "^8.18.0",
|
|
39
38
|
"eslint": "^9.16.0",
|
|
@@ -69,7 +68,6 @@
|
|
|
69
68
|
"node-forge": "^1.3.1",
|
|
70
69
|
"p-limit": "^7.1.1",
|
|
71
70
|
"pdf2json": "^3.1.6",
|
|
72
|
-
"pdfjs-dist": "^5.4.149",
|
|
73
71
|
"resend": "^4.7.0",
|
|
74
72
|
"snowflake-sdk": "^2.0.2",
|
|
75
73
|
"ts-node": "^10.9.2",
|