@credal/actions 0.2.41 → 0.2.43
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/actions/autogen/templates.js +5 -1
- package/dist/actions/autogen/types.d.ts +3 -0
- package/dist/actions/autogen/types.js +1 -0
- package/dist/actions/providers/confluence/updatePage.d.ts +3 -0
- package/dist/actions/providers/confluence/updatePage.js +46 -0
- package/dist/actions/providers/generic/fillTemplateAction.d.ts +7 -0
- package/dist/actions/providers/generic/fillTemplateAction.js +18 -0
- package/dist/actions/providers/generic/genericApiCall.d.ts +3 -0
- package/dist/actions/providers/generic/genericApiCall.js +38 -0
- package/dist/actions/providers/github/searchRepository.js +1 -5
- package/dist/actions/providers/google-oauth/getDriveContentById.d.ts +3 -0
- package/dist/actions/providers/google-oauth/getDriveContentById.js +161 -0
- package/dist/actions/providers/google-oauth/getDriveFileContentById.js +10 -8
- package/dist/actions/providers/google-oauth/searchAndGetDriveContentByKeywords.d.ts +3 -0
- package/dist/actions/providers/google-oauth/searchAndGetDriveContentByKeywords.js +47 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByKeywords.d.ts +3 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByKeywords.js +110 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByQuery.d.ts +3 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByQuery.js +78 -0
- package/dist/actions/providers/google-oauth/searchDriveByQuery.js +105 -15
- package/dist/actions/providers/google-oauth/utils/extractContentFromDriveFileId.d.ts +15 -0
- package/dist/actions/providers/google-oauth/utils/extractContentFromDriveFileId.js +129 -0
- package/dist/actions/providers/googlemaps/nearbysearch.d.ts +3 -0
- package/dist/actions/providers/googlemaps/nearbysearch.js +96 -0
- package/dist/actions/providers/snowflake/runSnowflakeQueryWriteResultsToS3.d.ts +3 -0
- package/dist/actions/providers/snowflake/runSnowflakeQueryWriteResultsToS3.js +154 -0
- package/dist/actions/providers/x/scrapeTweetDataWithNitter.d.ts +3 -0
- package/dist/actions/providers/x/scrapeTweetDataWithNitter.js +45 -0
- package/package.json +1 -1
- package/dist/actions/providers/google-oauth/listGmailThreads.d.ts +0 -3
- package/dist/actions/providers/google-oauth/listGmailThreads.js +0 -98
- package/dist/actions/providers/google-oauth/searchGmailMessages.d.ts +0 -3
- package/dist/actions/providers/google-oauth/searchGmailMessages.js +0 -91
- package/dist/actions/providers/jamf/getComputerInventory.d.ts +0 -3
- package/dist/actions/providers/jamf/getComputerInventory.js +0 -45
- package/dist/actions/providers/jamf/getFileVaultRecoveryKey.d.ts +0 -3
- package/dist/actions/providers/jamf/getFileVaultRecoveryKey.js +0 -40
- package/dist/actions/providers/jamf/restartJamfComputerById.d.ts +0 -3
- package/dist/actions/providers/jamf/restartJamfComputerById.js +0 -37
- package/dist/actions/providers/jamf/types.d.ts +0 -8
- package/dist/actions/providers/jamf/types.js +0 -7
@@ -0,0 +1,78 @@
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
8
|
+
});
|
9
|
+
};
|
10
|
+
import { axiosClient } from "../../util/axiosClient.js";
|
11
|
+
import { MISSING_AUTH_TOKEN } from "../../util/missingAuthConstants.js";
|
12
|
+
import extractContentFromDriveFileId from "./utils/extractContentFromDriveFileId.js";
|
13
|
+
const searchDriveAndGetContentByQuery = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
14
|
+
var _b;
|
15
|
+
if (!authParams.authToken) {
|
16
|
+
return { success: false, error: MISSING_AUTH_TOKEN, files: [] };
|
17
|
+
}
|
18
|
+
const { query, fileLimit, fileSizeLimit } = params;
|
19
|
+
let files = [];
|
20
|
+
// 1. Search for files and get their metadata
|
21
|
+
const url = `https://www.googleapis.com/drive/v3/files?q=${encodeURIComponent(query)}&fields=files(id,name,mimeType,webViewLink)&supportsAllDrives=true&includeItemsFromAllDrives=true&corpora=allDrives`;
|
22
|
+
try {
|
23
|
+
const res = yield axiosClient.get(url, {
|
24
|
+
headers: {
|
25
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
26
|
+
},
|
27
|
+
});
|
28
|
+
files =
|
29
|
+
((_b = res.data.files) === null || _b === void 0 ? void 0 : _b.map((file) => ({
|
30
|
+
id: file.id,
|
31
|
+
name: file.name,
|
32
|
+
mimeType: file.mimeType,
|
33
|
+
url: file.webViewLink,
|
34
|
+
}))) || [];
|
35
|
+
}
|
36
|
+
catch (error) {
|
37
|
+
console.error("Error searching Google Drive", error);
|
38
|
+
return {
|
39
|
+
success: false,
|
40
|
+
error: error instanceof Error ? error.message : "Unknown error",
|
41
|
+
files: [],
|
42
|
+
};
|
43
|
+
}
|
44
|
+
files = fileLimit ? files.splice(0, fileLimit) : files;
|
45
|
+
// 2. Extract content from files and do some smart range selection
|
46
|
+
const processedFiles = yield Promise.all(files
|
47
|
+
.filter((file) => file.id && file.mimeType)
|
48
|
+
.map((file) => __awaiter(void 0, void 0, void 0, function* () {
|
49
|
+
const content = yield extractContentFromDriveFileId({
|
50
|
+
params: { fileId: file.id, mimeType: file.mimeType },
|
51
|
+
authParams,
|
52
|
+
});
|
53
|
+
if (content.success) {
|
54
|
+
let selectedContent = content.content;
|
55
|
+
if (fileSizeLimit && selectedContent && selectedContent.length > fileSizeLimit) {
|
56
|
+
selectedContent = selectedContent.substring(0, fileSizeLimit);
|
57
|
+
}
|
58
|
+
return {
|
59
|
+
id: file.id || "",
|
60
|
+
name: file.name || "",
|
61
|
+
mimeType: file.mimeType || "",
|
62
|
+
url: file.url || "",
|
63
|
+
content: selectedContent,
|
64
|
+
};
|
65
|
+
}
|
66
|
+
else {
|
67
|
+
return {
|
68
|
+
id: file.id || "",
|
69
|
+
name: file.name || "",
|
70
|
+
mimeType: file.mimeType || "",
|
71
|
+
url: file.url || "",
|
72
|
+
error: content.error,
|
73
|
+
};
|
74
|
+
}
|
75
|
+
})));
|
76
|
+
return { success: true, files: processedFiles };
|
77
|
+
});
|
78
|
+
export default searchDriveAndGetContentByQuery;
|
@@ -10,25 +10,17 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
10
10
|
import { axiosClient } from "../../util/axiosClient.js";
|
11
11
|
import { MISSING_AUTH_TOKEN } from "../../util/missingAuthConstants.js";
|
12
12
|
const searchDriveByQuery = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
13
|
-
var _b;
|
14
13
|
if (!authParams.authToken) {
|
15
14
|
return { success: false, error: MISSING_AUTH_TOKEN, files: [] };
|
16
15
|
}
|
17
|
-
const { query, limit } = params;
|
18
|
-
const url = `https://www.googleapis.com/drive/v3/files?q=${encodeURIComponent(query)}&fields=files(id,name,mimeType,webViewLink)&supportsAllDrives=true&includeItemsFromAllDrives=true&corpora=allDrives`;
|
16
|
+
const { query, limit, searchDriveByDrive } = params;
|
19
17
|
try {
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
id: file.id || "",
|
27
|
-
name: file.name || "",
|
28
|
-
mimeType: file.mimeType || "",
|
29
|
-
url: file.webViewLink || "",
|
30
|
-
}))) || [];
|
31
|
-
return { success: true, files: limit ? files.splice(0, limit) : files };
|
18
|
+
if (searchDriveByDrive) {
|
19
|
+
return yield searchAllDrivesIndividually(query, authParams.authToken, limit);
|
20
|
+
}
|
21
|
+
else {
|
22
|
+
return yield searchAllDrivesAtOnce(query, authParams.authToken, limit);
|
23
|
+
}
|
32
24
|
}
|
33
25
|
catch (error) {
|
34
26
|
console.error("Error searching Google Drive", error);
|
@@ -39,4 +31,102 @@ const searchDriveByQuery = (_a) => __awaiter(void 0, [_a], void 0, function* ({
|
|
39
31
|
};
|
40
32
|
}
|
41
33
|
});
|
34
|
+
// Original search method - search all drives at once
|
35
|
+
const searchAllDrivesAtOnce = (query, authToken, limit) => __awaiter(void 0, void 0, void 0, function* () {
|
36
|
+
var _a;
|
37
|
+
const url = `https://www.googleapis.com/drive/v3/files?q=${encodeURIComponent(query)}&fields=files(id,name,mimeType,webViewLink)&supportsAllDrives=true&includeItemsFromAllDrives=true&corpora=allDrives&pageSize=1000`;
|
38
|
+
const res = yield axiosClient.get(url, {
|
39
|
+
headers: {
|
40
|
+
Authorization: `Bearer ${authToken}`,
|
41
|
+
},
|
42
|
+
});
|
43
|
+
const files = ((_a = res.data.files) === null || _a === void 0 ? void 0 : _a.map((file) => ({
|
44
|
+
id: file.id || "",
|
45
|
+
name: file.name || "",
|
46
|
+
mimeType: file.mimeType || "",
|
47
|
+
url: file.webViewLink || "",
|
48
|
+
}))) || [];
|
49
|
+
return {
|
50
|
+
success: true,
|
51
|
+
files: limit ? files.slice(0, limit) : files,
|
52
|
+
};
|
53
|
+
});
|
54
|
+
// New search method - search each drive individually and aggregate results
|
55
|
+
const searchAllDrivesIndividually = (query, authToken, limit) => __awaiter(void 0, void 0, void 0, function* () {
|
56
|
+
const drives = yield getAllDrives(authToken);
|
57
|
+
let allFiles = [];
|
58
|
+
// Search each drive individually
|
59
|
+
for (const drive of drives) {
|
60
|
+
try {
|
61
|
+
const driveFiles = yield searchSingleDrive(query, drive.id, authToken);
|
62
|
+
allFiles = allFiles.concat(driveFiles);
|
63
|
+
// If we have a limit and we've reached it, break early
|
64
|
+
if (limit && allFiles.length >= limit) {
|
65
|
+
break;
|
66
|
+
}
|
67
|
+
}
|
68
|
+
catch (error) {
|
69
|
+
console.error(`Error searching drive ${drive.name} (${drive.id}):`, error);
|
70
|
+
}
|
71
|
+
}
|
72
|
+
return {
|
73
|
+
success: true,
|
74
|
+
files: limit ? allFiles.slice(0, limit) : allFiles,
|
75
|
+
};
|
76
|
+
});
|
77
|
+
// Get all drives (shared drives + user's drive)
|
78
|
+
const getAllDrives = (authToken) => __awaiter(void 0, void 0, void 0, function* () {
|
79
|
+
var _a;
|
80
|
+
const drives = [];
|
81
|
+
// Add user's personal drive (My Drive)
|
82
|
+
drives.push({ id: "root", name: "My Drive" });
|
83
|
+
// Get all shared drives
|
84
|
+
let nextPageToken;
|
85
|
+
do {
|
86
|
+
const url = `https://www.googleapis.com/drive/v3/drives?pageSize=100${nextPageToken ? `&pageToken=${nextPageToken}` : ""}`;
|
87
|
+
const res = yield axiosClient.get(url, {
|
88
|
+
headers: {
|
89
|
+
Authorization: `Bearer ${authToken}`,
|
90
|
+
},
|
91
|
+
});
|
92
|
+
const sharedDrives = ((_a = res.data.drives) === null || _a === void 0 ? void 0 : _a.map((drive) => ({
|
93
|
+
id: drive.id || "",
|
94
|
+
name: drive.name || "",
|
95
|
+
}))) || [];
|
96
|
+
drives.push(...sharedDrives);
|
97
|
+
nextPageToken = res.data.nextPageToken;
|
98
|
+
} while (nextPageToken);
|
99
|
+
return drives;
|
100
|
+
});
|
101
|
+
// Search a single drive
|
102
|
+
const searchSingleDrive = (query, driveId, authToken) => __awaiter(void 0, void 0, void 0, function* () {
|
103
|
+
var _a;
|
104
|
+
const files = [];
|
105
|
+
let nextPageToken;
|
106
|
+
do {
|
107
|
+
let url;
|
108
|
+
if (driveId === "root") {
|
109
|
+
// Search in user's personal drive
|
110
|
+
url = `https://www.googleapis.com/drive/v3/files?q=${encodeURIComponent(query)}&fields=files(id,name,mimeType,webViewLink),nextPageToken&pageSize=1000${nextPageToken ? `&pageToken=${nextPageToken}` : ""}`;
|
111
|
+
}
|
112
|
+
else {
|
113
|
+
// Search in specific shared drive
|
114
|
+
url = `https://www.googleapis.com/drive/v3/files?q=${encodeURIComponent(`${query} and parents in '${driveId}'`)}&fields=files(id,name,mimeType,webViewLink),nextPageToken&supportsAllDrives=true&includeItemsFromAllDrives=true&corpora=drive&driveId=${driveId}&pageSize=1000${nextPageToken ? `&pageToken=${nextPageToken}` : ""}`;
|
115
|
+
}
|
116
|
+
const res = yield axiosClient.get(url, {
|
117
|
+
headers: {
|
118
|
+
Authorization: `Bearer ${authToken}`,
|
119
|
+
},
|
120
|
+
});
|
121
|
+
const driveFiles = ((_a = res.data.files) === null || _a === void 0 ? void 0 : _a.map((file) => ({
|
122
|
+
id: file.id || "",
|
123
|
+
name: file.name || "",
|
124
|
+
mimeType: file.mimeType || "",
|
125
|
+
url: file.webViewLink || "",
|
126
|
+
}))) || [];
|
127
|
+
files.push(...driveFiles);
|
128
|
+
nextPageToken = res.data.nextPageToken;
|
129
|
+
} while (nextPageToken);
|
130
|
+
return files;
|
131
|
+
});
|
42
132
|
export default searchDriveByQuery;
|
@@ -0,0 +1,15 @@
|
|
1
|
+
import type { AuthParamsType } from "../../../autogen/types.js";
|
2
|
+
export type getDriveFileContentParams = {
|
3
|
+
fileId: string;
|
4
|
+
mimeType: string;
|
5
|
+
};
|
6
|
+
export type getDriveFileContentOutput = {
|
7
|
+
success: boolean;
|
8
|
+
content?: string;
|
9
|
+
error?: string;
|
10
|
+
};
|
11
|
+
declare const extractContentFromDriveFileId: ({ params, authParams, }: {
|
12
|
+
params: getDriveFileContentParams;
|
13
|
+
authParams: AuthParamsType;
|
14
|
+
}) => Promise<getDriveFileContentOutput>;
|
15
|
+
export default extractContentFromDriveFileId;
|
@@ -0,0 +1,129 @@
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
8
|
+
});
|
9
|
+
};
|
10
|
+
import { axiosClient } from "../../../util/axiosClient.js";
|
11
|
+
import mammoth from "mammoth";
|
12
|
+
import { MISSING_AUTH_TOKEN } from "../../../util/missingAuthConstants.js";
|
13
|
+
const extractContentFromDriveFileId = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
14
|
+
if (!authParams.authToken) {
|
15
|
+
return { success: false, error: MISSING_AUTH_TOKEN };
|
16
|
+
}
|
17
|
+
const { fileId, mimeType } = params;
|
18
|
+
let content = "";
|
19
|
+
try {
|
20
|
+
// Handle different file types - read content directly
|
21
|
+
if (mimeType === "application/vnd.google-apps.document") {
|
22
|
+
// Google Docs - download as plain text
|
23
|
+
const exportUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}/export?mimeType=text/plain`;
|
24
|
+
const exportRes = yield axiosClient.get(exportUrl, {
|
25
|
+
headers: {
|
26
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
27
|
+
},
|
28
|
+
responseType: "text",
|
29
|
+
});
|
30
|
+
content = exportRes.data;
|
31
|
+
}
|
32
|
+
else if (mimeType === "application/vnd.google-apps.spreadsheet") {
|
33
|
+
// Google Sheets - download as CSV
|
34
|
+
const exportUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}/export?mimeType=text/csv`;
|
35
|
+
const exportRes = yield axiosClient.get(exportUrl, {
|
36
|
+
headers: {
|
37
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
38
|
+
},
|
39
|
+
responseType: "text",
|
40
|
+
});
|
41
|
+
// Clean up excessive commas from empty columns
|
42
|
+
content = exportRes.data
|
43
|
+
.split("\n")
|
44
|
+
.map((line) => line.replace(/,+$/, "")) // Remove trailing commas
|
45
|
+
.map((line) => line.replace(/,{2,}/g, ",")) // Replace multiple commas with single comma
|
46
|
+
.join("\n");
|
47
|
+
}
|
48
|
+
else if (mimeType === "application/vnd.google-apps.presentation") {
|
49
|
+
// Google Slides - download as plain text
|
50
|
+
const exportUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}/export?mimeType=text/plain`;
|
51
|
+
const exportRes = yield axiosClient.get(exportUrl, {
|
52
|
+
headers: {
|
53
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
54
|
+
},
|
55
|
+
responseType: "text",
|
56
|
+
});
|
57
|
+
content = exportRes.data;
|
58
|
+
}
|
59
|
+
else if (mimeType === "application/pdf") {
|
60
|
+
return {
|
61
|
+
success: false,
|
62
|
+
error: "PDF files are not supported for text extraction",
|
63
|
+
};
|
64
|
+
}
|
65
|
+
else if (mimeType === "application/vnd.openxmlformats-officedocument.wordprocessingml.document" ||
|
66
|
+
mimeType === "application/msword") {
|
67
|
+
// Word documents (.docx or .doc) - download and extract text using mammoth
|
68
|
+
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}?alt=media`;
|
69
|
+
const downloadRes = yield axiosClient.get(downloadUrl, {
|
70
|
+
headers: {
|
71
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
72
|
+
},
|
73
|
+
responseType: "arraybuffer",
|
74
|
+
});
|
75
|
+
try {
|
76
|
+
// mammoth works with .docx files. It will ignore formatting and return raw text
|
77
|
+
const result = yield mammoth.extractRawText({ buffer: Buffer.from(downloadRes.data) });
|
78
|
+
content = result.value; // raw text
|
79
|
+
}
|
80
|
+
catch (wordError) {
|
81
|
+
return {
|
82
|
+
success: false,
|
83
|
+
error: `Failed to parse Word document: ${wordError instanceof Error ? wordError.message : "Unknown Word error"}`,
|
84
|
+
};
|
85
|
+
}
|
86
|
+
}
|
87
|
+
else if (mimeType === "text/plain" ||
|
88
|
+
mimeType === "text/html" ||
|
89
|
+
mimeType === "application/rtf" ||
|
90
|
+
(mimeType === null || mimeType === void 0 ? void 0 : mimeType.startsWith("text/"))) {
|
91
|
+
// Text-based files
|
92
|
+
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}?alt=media`;
|
93
|
+
const downloadRes = yield axiosClient.get(downloadUrl, {
|
94
|
+
headers: {
|
95
|
+
Authorization: `Bearer ${authParams.authToken}`,
|
96
|
+
},
|
97
|
+
responseType: "text",
|
98
|
+
});
|
99
|
+
content = downloadRes.data;
|
100
|
+
}
|
101
|
+
else if (mimeType === null || mimeType === void 0 ? void 0 : mimeType.startsWith("image/")) {
|
102
|
+
// Skip images
|
103
|
+
return {
|
104
|
+
success: false,
|
105
|
+
error: "Image files are not supported for text extraction",
|
106
|
+
};
|
107
|
+
}
|
108
|
+
else {
|
109
|
+
// Unsupported file type
|
110
|
+
return {
|
111
|
+
success: false,
|
112
|
+
error: `Unsupported file type: ${mimeType}`,
|
113
|
+
};
|
114
|
+
}
|
115
|
+
content = content.trim();
|
116
|
+
return {
|
117
|
+
success: true,
|
118
|
+
content,
|
119
|
+
};
|
120
|
+
}
|
121
|
+
catch (error) {
|
122
|
+
console.error("Error getting Google Drive file content", error);
|
123
|
+
return {
|
124
|
+
success: false,
|
125
|
+
error: error instanceof Error ? error.message : "Unknown error",
|
126
|
+
};
|
127
|
+
}
|
128
|
+
});
|
129
|
+
export default extractContentFromDriveFileId;
|
@@ -0,0 +1,96 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
+
};
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
15
|
+
const axios_1 = __importDefault(require("axios"));
|
16
|
+
const types_1 = require("../../autogen/types");
|
17
|
+
const INCLUDED_TYPES = [
|
18
|
+
"monument",
|
19
|
+
"museum",
|
20
|
+
"art_gallery",
|
21
|
+
"sculpture",
|
22
|
+
"cultural_landmark",
|
23
|
+
"historical_place",
|
24
|
+
"performing_arts_theater",
|
25
|
+
"university",
|
26
|
+
"aquarium",
|
27
|
+
"botanical_garden",
|
28
|
+
"comedy_club",
|
29
|
+
"park",
|
30
|
+
"movie_theater",
|
31
|
+
"national_park",
|
32
|
+
"garden",
|
33
|
+
"night_club",
|
34
|
+
"tourist_attraction",
|
35
|
+
"water_park",
|
36
|
+
"zoo",
|
37
|
+
"bar",
|
38
|
+
"restaurant",
|
39
|
+
"food_court",
|
40
|
+
"bakery",
|
41
|
+
"cafe",
|
42
|
+
"coffee_shop",
|
43
|
+
"pub",
|
44
|
+
"wine_bar",
|
45
|
+
"spa",
|
46
|
+
"beach",
|
47
|
+
"market",
|
48
|
+
"shopping_mall",
|
49
|
+
"stadium",
|
50
|
+
];
|
51
|
+
const nearbysearch = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
52
|
+
const url = `https://places.googleapis.com/v1/places:searchNearby`;
|
53
|
+
const fieldMask = [
|
54
|
+
"places.displayName",
|
55
|
+
"places.formattedAddress",
|
56
|
+
"places.priceLevel",
|
57
|
+
"places.rating",
|
58
|
+
"places.primaryTypeDisplayName",
|
59
|
+
"places.editorialSummary",
|
60
|
+
"places.regularOpeningHours",
|
61
|
+
].join(",");
|
62
|
+
const response = yield axios_1.default.post(url, {
|
63
|
+
maxResultCount: 20,
|
64
|
+
includedTypes: INCLUDED_TYPES,
|
65
|
+
locationRestriction: {
|
66
|
+
circle: {
|
67
|
+
center: {
|
68
|
+
latitude: params.latitude,
|
69
|
+
longitude: params.longitude,
|
70
|
+
},
|
71
|
+
radius: 10000,
|
72
|
+
},
|
73
|
+
},
|
74
|
+
}, {
|
75
|
+
headers: {
|
76
|
+
"X-Goog-Api-Key": authParams.apiKey,
|
77
|
+
"X-Goog-FieldMask": fieldMask,
|
78
|
+
"Content-Type": "application/json",
|
79
|
+
},
|
80
|
+
});
|
81
|
+
return types_1.googlemapsNearbysearchOutputSchema.parse({
|
82
|
+
results: response.data.places.map((place) => {
|
83
|
+
var _a, _b;
|
84
|
+
return ({
|
85
|
+
name: place.displayName.text,
|
86
|
+
address: place.formattedAddress,
|
87
|
+
priceLevel: place.priceLevel,
|
88
|
+
rating: place.rating,
|
89
|
+
primaryType: place.primaryTypeDisplayName.text,
|
90
|
+
editorialSummary: ((_a = place.editorialSummary) === null || _a === void 0 ? void 0 : _a.text) || "",
|
91
|
+
openingHours: ((_b = place.regularOpeningHours) === null || _b === void 0 ? void 0 : _b.weekdayDescriptions.join("\n")) || "",
|
92
|
+
});
|
93
|
+
}),
|
94
|
+
});
|
95
|
+
});
|
96
|
+
exports.default = nearbysearch;
|
@@ -0,0 +1,154 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
+
};
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
15
|
+
const snowflake_sdk_1 = __importDefault(require("snowflake-sdk"));
|
16
|
+
const crypto_1 = __importDefault(require("crypto"));
|
17
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
18
|
+
const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
|
19
|
+
const uuid_1 = require("uuid");
|
20
|
+
// Only log errors.
|
21
|
+
snowflake_sdk_1.default.configure({ logLevel: "ERROR" });
|
22
|
+
const runSnowflakeQueryWriteResultsToS3 = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
23
|
+
const { databaseName, warehouse, query, user, accountName, s3BucketName, s3Region, outputFormat = "json" } = params;
|
24
|
+
const { apiKey: privateKey, awsAccessKeyId, awsSecretAccessKey } = authParams;
|
25
|
+
if (!privateKey) {
|
26
|
+
throw new Error("Snowflake private key is required");
|
27
|
+
}
|
28
|
+
if (!awsAccessKeyId || !awsSecretAccessKey) {
|
29
|
+
throw new Error("AWS credentials are required");
|
30
|
+
}
|
31
|
+
if (!accountName || !user || !databaseName || !warehouse || !query || !s3BucketName) {
|
32
|
+
throw new Error("Missing required parameters for Snowflake query or S3 destination");
|
33
|
+
}
|
34
|
+
const getPrivateKeyCorrectFormat = (privateKey) => {
|
35
|
+
const buffer = Buffer.from(privateKey);
|
36
|
+
const privateKeyObject = crypto_1.default.createPrivateKey({
|
37
|
+
key: buffer,
|
38
|
+
format: "pem",
|
39
|
+
passphrase: "password",
|
40
|
+
});
|
41
|
+
const privateKeyCorrectFormat = privateKeyObject.export({
|
42
|
+
format: "pem",
|
43
|
+
type: "pkcs8",
|
44
|
+
});
|
45
|
+
return privateKeyCorrectFormat.toString();
|
46
|
+
};
|
47
|
+
const executeQueryAndFormatData = () => __awaiter(void 0, void 0, void 0, function* () {
|
48
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
49
|
+
const queryResults = yield new Promise((resolve, reject) => {
|
50
|
+
connection.execute({
|
51
|
+
sqlText: query,
|
52
|
+
complete: (err, stmt, rows) => {
|
53
|
+
if (err) {
|
54
|
+
return reject(err);
|
55
|
+
}
|
56
|
+
return resolve(rows || []);
|
57
|
+
},
|
58
|
+
});
|
59
|
+
});
|
60
|
+
// Format the results based on the output format
|
61
|
+
let formattedData;
|
62
|
+
if (outputFormat.toLowerCase() === "csv") {
|
63
|
+
if (queryResults.length === 0) {
|
64
|
+
formattedData = "";
|
65
|
+
}
|
66
|
+
else {
|
67
|
+
const headers = Object.keys(queryResults[0]).join(",");
|
68
|
+
const rows = queryResults.map(row => Object.values(row)
|
69
|
+
.map(value => (typeof value === "object" && value !== null ? JSON.stringify(value) : value))
|
70
|
+
.join(","));
|
71
|
+
formattedData = [headers, ...rows].join("\n");
|
72
|
+
}
|
73
|
+
}
|
74
|
+
else {
|
75
|
+
// Default to JSON
|
76
|
+
formattedData = JSON.stringify(queryResults, null, 2);
|
77
|
+
}
|
78
|
+
return { formattedData, resultsLength: queryResults.length };
|
79
|
+
});
|
80
|
+
const uploadToS3AndGetURL = (formattedData) => __awaiter(void 0, void 0, void 0, function* () {
|
81
|
+
// Create S3 client
|
82
|
+
const s3Client = new client_s3_1.S3Client({
|
83
|
+
region: s3Region,
|
84
|
+
credentials: {
|
85
|
+
accessKeyId: awsAccessKeyId,
|
86
|
+
secretAccessKey: awsSecretAccessKey,
|
87
|
+
},
|
88
|
+
});
|
89
|
+
const contentType = outputFormat.toLowerCase() === "csv" ? "text/csv" : "application/json";
|
90
|
+
const fileExtension = outputFormat.toLowerCase() === "csv" ? "csv" : "json";
|
91
|
+
const finalKey = `${databaseName}/${(0, uuid_1.v4)()}.${fileExtension}`;
|
92
|
+
// Upload to S3 without ACL
|
93
|
+
const uploadCommand = new client_s3_1.PutObjectCommand({
|
94
|
+
Bucket: s3BucketName,
|
95
|
+
Key: finalKey,
|
96
|
+
Body: formattedData,
|
97
|
+
ContentType: contentType,
|
98
|
+
});
|
99
|
+
yield s3Client.send(uploadCommand);
|
100
|
+
// Generate a presigned URL (valid for an hour)
|
101
|
+
const getObjectCommand = new client_s3_1.GetObjectCommand({
|
102
|
+
Bucket: s3BucketName,
|
103
|
+
Key: finalKey,
|
104
|
+
});
|
105
|
+
const presignedUrl = yield (0, s3_request_presigner_1.getSignedUrl)(s3Client, getObjectCommand, { expiresIn: 3600 });
|
106
|
+
return presignedUrl;
|
107
|
+
});
|
108
|
+
// Process the private key
|
109
|
+
const privateKeyCorrectFormatString = getPrivateKeyCorrectFormat(privateKey);
|
110
|
+
// Set up a connection using snowflake-sdk
|
111
|
+
const connection = snowflake_sdk_1.default.createConnection({
|
112
|
+
account: accountName,
|
113
|
+
username: user,
|
114
|
+
privateKey: privateKeyCorrectFormatString,
|
115
|
+
authenticator: "SNOWFLAKE_JWT",
|
116
|
+
role: "ACCOUNTADMIN",
|
117
|
+
warehouse: warehouse,
|
118
|
+
database: databaseName,
|
119
|
+
});
|
120
|
+
try {
|
121
|
+
// Connect to Snowflake
|
122
|
+
yield new Promise((resolve, reject) => {
|
123
|
+
connection.connect((err, conn) => {
|
124
|
+
if (err) {
|
125
|
+
console.error("Unable to connect to Snowflake:", err.message);
|
126
|
+
return reject(err);
|
127
|
+
}
|
128
|
+
resolve(conn);
|
129
|
+
});
|
130
|
+
});
|
131
|
+
const { formattedData, resultsLength } = yield executeQueryAndFormatData();
|
132
|
+
const presignedUrl = yield uploadToS3AndGetURL(formattedData);
|
133
|
+
// Return fields to match schema definition
|
134
|
+
connection.destroy(err => {
|
135
|
+
if (err) {
|
136
|
+
console.log("Failed to disconnect from Snowflake:", err);
|
137
|
+
}
|
138
|
+
});
|
139
|
+
return {
|
140
|
+
bucketUrl: presignedUrl,
|
141
|
+
message: `Query results successfully written to S3. URL valid for 1 hour.`,
|
142
|
+
rowCount: resultsLength,
|
143
|
+
};
|
144
|
+
}
|
145
|
+
catch (error) {
|
146
|
+
connection.destroy(err => {
|
147
|
+
if (err) {
|
148
|
+
console.log("Failed to disconnect from Snowflake:", err);
|
149
|
+
}
|
150
|
+
});
|
151
|
+
throw Error(`An error occurred: ${error}`);
|
152
|
+
}
|
153
|
+
});
|
154
|
+
exports.default = runSnowflakeQueryWriteResultsToS3;
|
@@ -0,0 +1,45 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
+
};
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
15
|
+
const firecrawl_js_1 = __importDefault(require("@mendable/firecrawl-js"));
|
16
|
+
const scrapeTweetDataWithNitter = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
17
|
+
const tweetUrlRegex = /^(?:https?:\/\/)?(?:www\.)?(?:twitter\.com|x\.com)\/([a-zA-Z0-9_]+)\/status\/(\d+)(?:\?.*)?$/;
|
18
|
+
if (!tweetUrlRegex.test(params.tweetUrl)) {
|
19
|
+
throw new Error("Invalid tweet URL. Expected format: https://twitter.com/username/status/id or https://x.com/username/status/id");
|
20
|
+
}
|
21
|
+
const nitterUrl = params.tweetUrl.replace(/^(?:https?:\/\/)?(?:www\.)?(?:twitter\.com|x\.com)/i, "https://nitter.net");
|
22
|
+
// Initialize Firecrawl
|
23
|
+
if (!authParams.apiKey) {
|
24
|
+
throw new Error("API key is required for X+Nitter+Firecrawl");
|
25
|
+
}
|
26
|
+
const firecrawl = new firecrawl_js_1.default({
|
27
|
+
apiKey: authParams.apiKey,
|
28
|
+
});
|
29
|
+
try {
|
30
|
+
// Scrape the Nitter URL
|
31
|
+
const result = yield firecrawl.scrapeUrl(nitterUrl);
|
32
|
+
if (!result.success) {
|
33
|
+
throw new Error(`Failed to scrape tweet: ${result.error || "Unknown error"}`);
|
34
|
+
}
|
35
|
+
// Extract the tweet text from the scraped content - simple approach - in practice, you might need more robust parsing based on nitter html structure
|
36
|
+
const tweetContent = result.markdown;
|
37
|
+
return {
|
38
|
+
text: tweetContent || "Error scraping with firecrawl",
|
39
|
+
};
|
40
|
+
}
|
41
|
+
catch (error) {
|
42
|
+
throw new Error(`Error scraping tweet: ${error instanceof Error ? error.message : error}`);
|
43
|
+
}
|
44
|
+
});
|
45
|
+
exports.default = scrapeTweetDataWithNitter;
|