@credal/actions 0.1.46 → 0.1.48
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/actions/autogen/templates.js +1 -9
- package/dist/actions/autogen/types.d.ts +0 -6
- package/dist/actions/autogen/types.js +0 -2
- package/dist/actions/providers/confluence/updatePage.d.ts +3 -0
- package/dist/actions/providers/confluence/updatePage.js +46 -0
- package/dist/actions/providers/generic/fillTemplateAction.d.ts +7 -0
- package/dist/actions/providers/generic/fillTemplateAction.js +18 -0
- package/dist/actions/providers/generic/genericApiCall.d.ts +3 -0
- package/dist/actions/providers/generic/genericApiCall.js +38 -0
- package/dist/actions/providers/googlemaps/nearbysearch.d.ts +3 -0
- package/dist/actions/providers/googlemaps/nearbysearch.js +96 -0
- package/dist/actions/providers/snowflake/getRowByFieldValue.js +3 -3
- package/dist/actions/providers/snowflake/runSnowflakeQuery.js +5 -4
- package/dist/actions/providers/snowflake/runSnowflakeQueryWriteResultsToS3.d.ts +3 -0
- package/dist/actions/providers/snowflake/runSnowflakeQueryWriteResultsToS3.js +154 -0
- package/dist/actions/providers/x/scrapeTweetDataWithNitter.d.ts +3 -0
- package/dist/actions/providers/x/scrapeTweetDataWithNitter.js +45 -0
- package/package.json +1 -1
- package/dist/actions/providers/salesforce/getSalesforceRecordByQuery.d.ts +0 -3
- package/dist/actions/providers/salesforce/getSalesforceRecordByQuery.js +0 -43
@@ -1346,10 +1346,6 @@ exports.snowflakeGetRowByFieldValueDefinition = {
|
|
1346
1346
|
type: "string",
|
1347
1347
|
description: "The name of the Snowflake account",
|
1348
1348
|
},
|
1349
|
-
user: {
|
1350
|
-
type: "string",
|
1351
|
-
description: "The user to authenticate with",
|
1352
|
-
},
|
1353
1349
|
warehouse: {
|
1354
1350
|
type: "string",
|
1355
1351
|
description: "The warehouse to use",
|
@@ -1384,7 +1380,7 @@ exports.snowflakeRunSnowflakeQueryDefinition = {
|
|
1384
1380
|
scopes: [],
|
1385
1381
|
parameters: {
|
1386
1382
|
type: "object",
|
1387
|
-
required: ["databaseName", "warehouse", "query", "
|
1383
|
+
required: ["databaseName", "warehouse", "query", "accountName"],
|
1388
1384
|
properties: {
|
1389
1385
|
databaseName: {
|
1390
1386
|
type: "string",
|
@@ -1398,10 +1394,6 @@ exports.snowflakeRunSnowflakeQueryDefinition = {
|
|
1398
1394
|
type: "string",
|
1399
1395
|
description: "The SQL query to execute",
|
1400
1396
|
},
|
1401
|
-
user: {
|
1402
|
-
type: "string",
|
1403
|
-
description: "The username to authenticate with",
|
1404
|
-
},
|
1405
1397
|
accountName: {
|
1406
1398
|
type: "string",
|
1407
1399
|
description: "The name of the Snowflake account",
|
@@ -1181,13 +1181,11 @@ export declare const snowflakeGetRowByFieldValueParamsSchema: z.ZodObject<{
|
|
1181
1181
|
fieldName: z.ZodString;
|
1182
1182
|
fieldValue: z.ZodString;
|
1183
1183
|
accountName: z.ZodOptional<z.ZodString>;
|
1184
|
-
user: z.ZodOptional<z.ZodString>;
|
1185
1184
|
warehouse: z.ZodOptional<z.ZodString>;
|
1186
1185
|
}, "strip", z.ZodTypeAny, {
|
1187
1186
|
tableName: string;
|
1188
1187
|
fieldName: string;
|
1189
1188
|
fieldValue: string;
|
1190
|
-
user?: string | undefined;
|
1191
1189
|
databaseName?: string | undefined;
|
1192
1190
|
accountName?: string | undefined;
|
1193
1191
|
warehouse?: string | undefined;
|
@@ -1195,7 +1193,6 @@ export declare const snowflakeGetRowByFieldValueParamsSchema: z.ZodObject<{
|
|
1195
1193
|
tableName: string;
|
1196
1194
|
fieldName: string;
|
1197
1195
|
fieldValue: string;
|
1198
|
-
user?: string | undefined;
|
1199
1196
|
databaseName?: string | undefined;
|
1200
1197
|
accountName?: string | undefined;
|
1201
1198
|
warehouse?: string | undefined;
|
@@ -1229,18 +1226,15 @@ export declare const snowflakeRunSnowflakeQueryParamsSchema: z.ZodObject<{
|
|
1229
1226
|
databaseName: z.ZodString;
|
1230
1227
|
warehouse: z.ZodString;
|
1231
1228
|
query: z.ZodString;
|
1232
|
-
user: z.ZodString;
|
1233
1229
|
accountName: z.ZodString;
|
1234
1230
|
outputFormat: z.ZodOptional<z.ZodEnum<["json", "csv"]>>;
|
1235
1231
|
}, "strip", z.ZodTypeAny, {
|
1236
|
-
user: string;
|
1237
1232
|
query: string;
|
1238
1233
|
databaseName: string;
|
1239
1234
|
accountName: string;
|
1240
1235
|
warehouse: string;
|
1241
1236
|
outputFormat?: "json" | "csv" | undefined;
|
1242
1237
|
}, {
|
1243
|
-
user: string;
|
1244
1238
|
query: string;
|
1245
1239
|
databaseName: string;
|
1246
1240
|
accountName: string;
|
@@ -410,7 +410,6 @@ exports.snowflakeGetRowByFieldValueParamsSchema = zod_1.z.object({
|
|
410
410
|
fieldName: zod_1.z.string().describe("The name of the field to query"),
|
411
411
|
fieldValue: zod_1.z.string().describe("The value of the field to query"),
|
412
412
|
accountName: zod_1.z.string().describe("The name of the Snowflake account").optional(),
|
413
|
-
user: zod_1.z.string().describe("The user to authenticate with").optional(),
|
414
413
|
warehouse: zod_1.z.string().describe("The warehouse to use").optional(),
|
415
414
|
});
|
416
415
|
exports.snowflakeGetRowByFieldValueOutputSchema = zod_1.z.object({
|
@@ -425,7 +424,6 @@ exports.snowflakeRunSnowflakeQueryParamsSchema = zod_1.z.object({
|
|
425
424
|
databaseName: zod_1.z.string().describe("The name of the database to query"),
|
426
425
|
warehouse: zod_1.z.string().describe("The warehouse to use for executing the query"),
|
427
426
|
query: zod_1.z.string().describe("The SQL query to execute"),
|
428
|
-
user: zod_1.z.string().describe("The username to authenticate with"),
|
429
427
|
accountName: zod_1.z.string().describe("The name of the Snowflake account"),
|
430
428
|
outputFormat: zod_1.z.enum(["json", "csv"]).describe("The format of the output").optional(),
|
431
429
|
});
|
@@ -0,0 +1,46 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
12
|
+
const axiosClient_1 = require("../../util/axiosClient");
|
13
|
+
function getConfluenceRequestConfig(baseUrl, username, apiToken) {
|
14
|
+
return {
|
15
|
+
baseURL: baseUrl,
|
16
|
+
headers: {
|
17
|
+
Accept: "application/json",
|
18
|
+
Authorization: `Basic ${Buffer.from(`${username}:${apiToken}`).toString("base64")}`,
|
19
|
+
},
|
20
|
+
};
|
21
|
+
}
|
22
|
+
const confluenceUpdatePage = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
23
|
+
const { pageId, content, title } = params;
|
24
|
+
const { baseUrl, authToken, username } = authParams;
|
25
|
+
if (!baseUrl || !authToken || !username) {
|
26
|
+
throw new Error("Missing required authentication information");
|
27
|
+
}
|
28
|
+
const config = getConfluenceRequestConfig(baseUrl, username, authToken);
|
29
|
+
// Get current version number
|
30
|
+
const response = yield axiosClient_1.axiosClient.get(`/api/v2/pages/${pageId}`, config);
|
31
|
+
const currVersion = response.data.version.number;
|
32
|
+
const payload = {
|
33
|
+
id: pageId,
|
34
|
+
status: "current",
|
35
|
+
title,
|
36
|
+
body: {
|
37
|
+
representation: "storage",
|
38
|
+
value: content,
|
39
|
+
},
|
40
|
+
version: {
|
41
|
+
number: currVersion + 1,
|
42
|
+
},
|
43
|
+
};
|
44
|
+
yield axiosClient_1.axiosClient.put(`/api/v2/pages/${pageId}`, payload, config);
|
45
|
+
});
|
46
|
+
exports.default = confluenceUpdatePage;
|
@@ -0,0 +1,18 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
12
|
+
const fillTemplateAction = (_a) => __awaiter(void 0, [_a], void 0, function* ({ template }) {
|
13
|
+
// Simply return the template without any modification
|
14
|
+
return {
|
15
|
+
result: template,
|
16
|
+
};
|
17
|
+
});
|
18
|
+
exports.default = fillTemplateAction;
|
@@ -0,0 +1,38 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
+
};
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
15
|
+
const axios_1 = __importDefault(require("axios"));
|
16
|
+
const genericApiCall = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, }) {
|
17
|
+
try {
|
18
|
+
const { endpoint, method, headers, body } = params;
|
19
|
+
const response = yield (0, axios_1.default)({
|
20
|
+
url: endpoint,
|
21
|
+
method,
|
22
|
+
headers,
|
23
|
+
data: method !== "GET" ? body : undefined,
|
24
|
+
});
|
25
|
+
return {
|
26
|
+
statusCode: response.status,
|
27
|
+
headers: response.headers,
|
28
|
+
data: response.data,
|
29
|
+
};
|
30
|
+
}
|
31
|
+
catch (error) {
|
32
|
+
if (axios_1.default.isAxiosError(error)) {
|
33
|
+
throw Error("Axios Error: " + (error.message || "Failed to make API call"));
|
34
|
+
}
|
35
|
+
throw Error("Error: " + (error || "Failed to make API call"));
|
36
|
+
}
|
37
|
+
});
|
38
|
+
exports.default = genericApiCall;
|
@@ -0,0 +1,96 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
+
};
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
15
|
+
const axios_1 = __importDefault(require("axios"));
|
16
|
+
const types_1 = require("../../autogen/types");
|
17
|
+
const INCLUDED_TYPES = [
|
18
|
+
"monument",
|
19
|
+
"museum",
|
20
|
+
"art_gallery",
|
21
|
+
"sculpture",
|
22
|
+
"cultural_landmark",
|
23
|
+
"historical_place",
|
24
|
+
"performing_arts_theater",
|
25
|
+
"university",
|
26
|
+
"aquarium",
|
27
|
+
"botanical_garden",
|
28
|
+
"comedy_club",
|
29
|
+
"park",
|
30
|
+
"movie_theater",
|
31
|
+
"national_park",
|
32
|
+
"garden",
|
33
|
+
"night_club",
|
34
|
+
"tourist_attraction",
|
35
|
+
"water_park",
|
36
|
+
"zoo",
|
37
|
+
"bar",
|
38
|
+
"restaurant",
|
39
|
+
"food_court",
|
40
|
+
"bakery",
|
41
|
+
"cafe",
|
42
|
+
"coffee_shop",
|
43
|
+
"pub",
|
44
|
+
"wine_bar",
|
45
|
+
"spa",
|
46
|
+
"beach",
|
47
|
+
"market",
|
48
|
+
"shopping_mall",
|
49
|
+
"stadium",
|
50
|
+
];
|
51
|
+
const nearbysearch = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
52
|
+
const url = `https://places.googleapis.com/v1/places:searchNearby`;
|
53
|
+
const fieldMask = [
|
54
|
+
"places.displayName",
|
55
|
+
"places.formattedAddress",
|
56
|
+
"places.priceLevel",
|
57
|
+
"places.rating",
|
58
|
+
"places.primaryTypeDisplayName",
|
59
|
+
"places.editorialSummary",
|
60
|
+
"places.regularOpeningHours",
|
61
|
+
].join(",");
|
62
|
+
const response = yield axios_1.default.post(url, {
|
63
|
+
maxResultCount: 20,
|
64
|
+
includedTypes: INCLUDED_TYPES,
|
65
|
+
locationRestriction: {
|
66
|
+
circle: {
|
67
|
+
center: {
|
68
|
+
latitude: params.latitude,
|
69
|
+
longitude: params.longitude,
|
70
|
+
},
|
71
|
+
radius: 10000,
|
72
|
+
},
|
73
|
+
},
|
74
|
+
}, {
|
75
|
+
headers: {
|
76
|
+
"X-Goog-Api-Key": authParams.apiKey,
|
77
|
+
"X-Goog-FieldMask": fieldMask,
|
78
|
+
"Content-Type": "application/json",
|
79
|
+
},
|
80
|
+
});
|
81
|
+
return types_1.googlemapsNearbysearchOutputSchema.parse({
|
82
|
+
results: response.data.places.map((place) => {
|
83
|
+
var _a, _b;
|
84
|
+
return ({
|
85
|
+
name: place.displayName.text,
|
86
|
+
address: place.formattedAddress,
|
87
|
+
priceLevel: place.priceLevel,
|
88
|
+
rating: place.rating,
|
89
|
+
primaryType: place.primaryTypeDisplayName.text,
|
90
|
+
editorialSummary: ((_a = place.editorialSummary) === null || _a === void 0 ? void 0 : _a.text) || "",
|
91
|
+
openingHours: ((_b = place.regularOpeningHours) === null || _b === void 0 ? void 0 : _b.weekdayDescriptions.join("\n")) || "",
|
92
|
+
});
|
93
|
+
}),
|
94
|
+
});
|
95
|
+
});
|
96
|
+
exports.default = nearbysearch;
|
@@ -11,14 +11,14 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
11
11
|
Object.defineProperty(exports, "__esModule", { value: true });
|
12
12
|
const getSnowflakeConnection_1 = require("./auth/getSnowflakeConnection");
|
13
13
|
const getRowByFieldValue = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
14
|
-
const { databaseName, tableName, fieldName, warehouse, fieldValue,
|
15
|
-
if (!accountName || !
|
14
|
+
const { databaseName, tableName, fieldName, warehouse, fieldValue, accountName } = params;
|
15
|
+
if (!accountName || !databaseName || !warehouse || !tableName || !fieldName || !fieldValue) {
|
16
16
|
throw new Error("Account name and user are required");
|
17
17
|
}
|
18
18
|
// Set up a connection using snowflake-sdk
|
19
19
|
const connection = (0, getSnowflakeConnection_1.getSnowflakeConnection)({
|
20
20
|
account: accountName,
|
21
|
-
username:
|
21
|
+
username: authParams.username || "CREDAL_USER",
|
22
22
|
warehouse: warehouse,
|
23
23
|
database: databaseName,
|
24
24
|
}, {
|
@@ -17,15 +17,16 @@ const getSnowflakeConnection_1 = require("./auth/getSnowflakeConnection");
|
|
17
17
|
const formatDataForCodeInterpreter_1 = require("../../util/formatDataForCodeInterpreter");
|
18
18
|
snowflake_sdk_1.default.configure({ logLevel: "ERROR" });
|
19
19
|
const runSnowflakeQuery = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
20
|
-
const { databaseName, warehouse, query,
|
21
|
-
if (!accountName || !
|
20
|
+
const { databaseName, warehouse, query, accountName, outputFormat = "json" } = params;
|
21
|
+
if (!accountName || !databaseName || !warehouse || !query) {
|
22
22
|
throw new Error("Missing required parameters for Snowflake query");
|
23
23
|
}
|
24
24
|
const executeQueryAndFormatData = () => __awaiter(void 0, void 0, void 0, function* () {
|
25
|
+
const formattedQuery = query.trim().replace(/\s+/g, " "); // Normalize all whitespace to single spaces
|
25
26
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
26
27
|
const queryResults = yield new Promise((resolve, reject) => {
|
27
28
|
connection.execute({
|
28
|
-
sqlText:
|
29
|
+
sqlText: formattedQuery,
|
29
30
|
complete: (err, stmt, rows) => {
|
30
31
|
if (err) {
|
31
32
|
return reject(err);
|
@@ -41,7 +42,7 @@ const runSnowflakeQuery = (_a) => __awaiter(void 0, [_a], void 0, function* ({ p
|
|
41
42
|
// Set up a connection using snowflake-sdk
|
42
43
|
const connection = (0, getSnowflakeConnection_1.getSnowflakeConnection)({
|
43
44
|
account: accountName,
|
44
|
-
username:
|
45
|
+
username: authParams.username || "CREDAL_USER",
|
45
46
|
warehouse: warehouse,
|
46
47
|
database: databaseName,
|
47
48
|
}, { authToken: authParams.authToken, apiKey: authParams.apiKey });
|
@@ -0,0 +1,154 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
+
};
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
15
|
+
const snowflake_sdk_1 = __importDefault(require("snowflake-sdk"));
|
16
|
+
const crypto_1 = __importDefault(require("crypto"));
|
17
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
18
|
+
const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
|
19
|
+
const uuid_1 = require("uuid");
|
20
|
+
// Only log errors.
|
21
|
+
snowflake_sdk_1.default.configure({ logLevel: "ERROR" });
|
22
|
+
const runSnowflakeQueryWriteResultsToS3 = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
23
|
+
const { databaseName, warehouse, query, user, accountName, s3BucketName, s3Region, outputFormat = "json" } = params;
|
24
|
+
const { apiKey: privateKey, awsAccessKeyId, awsSecretAccessKey } = authParams;
|
25
|
+
if (!privateKey) {
|
26
|
+
throw new Error("Snowflake private key is required");
|
27
|
+
}
|
28
|
+
if (!awsAccessKeyId || !awsSecretAccessKey) {
|
29
|
+
throw new Error("AWS credentials are required");
|
30
|
+
}
|
31
|
+
if (!accountName || !user || !databaseName || !warehouse || !query || !s3BucketName) {
|
32
|
+
throw new Error("Missing required parameters for Snowflake query or S3 destination");
|
33
|
+
}
|
34
|
+
const getPrivateKeyCorrectFormat = (privateKey) => {
|
35
|
+
const buffer = Buffer.from(privateKey);
|
36
|
+
const privateKeyObject = crypto_1.default.createPrivateKey({
|
37
|
+
key: buffer,
|
38
|
+
format: "pem",
|
39
|
+
passphrase: "password",
|
40
|
+
});
|
41
|
+
const privateKeyCorrectFormat = privateKeyObject.export({
|
42
|
+
format: "pem",
|
43
|
+
type: "pkcs8",
|
44
|
+
});
|
45
|
+
return privateKeyCorrectFormat.toString();
|
46
|
+
};
|
47
|
+
const executeQueryAndFormatData = () => __awaiter(void 0, void 0, void 0, function* () {
|
48
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
49
|
+
const queryResults = yield new Promise((resolve, reject) => {
|
50
|
+
connection.execute({
|
51
|
+
sqlText: query,
|
52
|
+
complete: (err, stmt, rows) => {
|
53
|
+
if (err) {
|
54
|
+
return reject(err);
|
55
|
+
}
|
56
|
+
return resolve(rows || []);
|
57
|
+
},
|
58
|
+
});
|
59
|
+
});
|
60
|
+
// Format the results based on the output format
|
61
|
+
let formattedData;
|
62
|
+
if (outputFormat.toLowerCase() === "csv") {
|
63
|
+
if (queryResults.length === 0) {
|
64
|
+
formattedData = "";
|
65
|
+
}
|
66
|
+
else {
|
67
|
+
const headers = Object.keys(queryResults[0]).join(",");
|
68
|
+
const rows = queryResults.map(row => Object.values(row)
|
69
|
+
.map(value => (typeof value === "object" && value !== null ? JSON.stringify(value) : value))
|
70
|
+
.join(","));
|
71
|
+
formattedData = [headers, ...rows].join("\n");
|
72
|
+
}
|
73
|
+
}
|
74
|
+
else {
|
75
|
+
// Default to JSON
|
76
|
+
formattedData = JSON.stringify(queryResults, null, 2);
|
77
|
+
}
|
78
|
+
return { formattedData, resultsLength: queryResults.length };
|
79
|
+
});
|
80
|
+
const uploadToS3AndGetURL = (formattedData) => __awaiter(void 0, void 0, void 0, function* () {
|
81
|
+
// Create S3 client
|
82
|
+
const s3Client = new client_s3_1.S3Client({
|
83
|
+
region: s3Region,
|
84
|
+
credentials: {
|
85
|
+
accessKeyId: awsAccessKeyId,
|
86
|
+
secretAccessKey: awsSecretAccessKey,
|
87
|
+
},
|
88
|
+
});
|
89
|
+
const contentType = outputFormat.toLowerCase() === "csv" ? "text/csv" : "application/json";
|
90
|
+
const fileExtension = outputFormat.toLowerCase() === "csv" ? "csv" : "json";
|
91
|
+
const finalKey = `${databaseName}/${(0, uuid_1.v4)()}.${fileExtension}`;
|
92
|
+
// Upload to S3 without ACL
|
93
|
+
const uploadCommand = new client_s3_1.PutObjectCommand({
|
94
|
+
Bucket: s3BucketName,
|
95
|
+
Key: finalKey,
|
96
|
+
Body: formattedData,
|
97
|
+
ContentType: contentType,
|
98
|
+
});
|
99
|
+
yield s3Client.send(uploadCommand);
|
100
|
+
// Generate a presigned URL (valid for an hour)
|
101
|
+
const getObjectCommand = new client_s3_1.GetObjectCommand({
|
102
|
+
Bucket: s3BucketName,
|
103
|
+
Key: finalKey,
|
104
|
+
});
|
105
|
+
const presignedUrl = yield (0, s3_request_presigner_1.getSignedUrl)(s3Client, getObjectCommand, { expiresIn: 3600 });
|
106
|
+
return presignedUrl;
|
107
|
+
});
|
108
|
+
// Process the private key
|
109
|
+
const privateKeyCorrectFormatString = getPrivateKeyCorrectFormat(privateKey);
|
110
|
+
// Set up a connection using snowflake-sdk
|
111
|
+
const connection = snowflake_sdk_1.default.createConnection({
|
112
|
+
account: accountName,
|
113
|
+
username: user,
|
114
|
+
privateKey: privateKeyCorrectFormatString,
|
115
|
+
authenticator: "SNOWFLAKE_JWT",
|
116
|
+
role: "ACCOUNTADMIN",
|
117
|
+
warehouse: warehouse,
|
118
|
+
database: databaseName,
|
119
|
+
});
|
120
|
+
try {
|
121
|
+
// Connect to Snowflake
|
122
|
+
yield new Promise((resolve, reject) => {
|
123
|
+
connection.connect((err, conn) => {
|
124
|
+
if (err) {
|
125
|
+
console.error("Unable to connect to Snowflake:", err.message);
|
126
|
+
return reject(err);
|
127
|
+
}
|
128
|
+
resolve(conn);
|
129
|
+
});
|
130
|
+
});
|
131
|
+
const { formattedData, resultsLength } = yield executeQueryAndFormatData();
|
132
|
+
const presignedUrl = yield uploadToS3AndGetURL(formattedData);
|
133
|
+
// Return fields to match schema definition
|
134
|
+
connection.destroy(err => {
|
135
|
+
if (err) {
|
136
|
+
console.log("Failed to disconnect from Snowflake:", err);
|
137
|
+
}
|
138
|
+
});
|
139
|
+
return {
|
140
|
+
bucketUrl: presignedUrl,
|
141
|
+
message: `Query results successfully written to S3. URL valid for 1 hour.`,
|
142
|
+
rowCount: resultsLength,
|
143
|
+
};
|
144
|
+
}
|
145
|
+
catch (error) {
|
146
|
+
connection.destroy(err => {
|
147
|
+
if (err) {
|
148
|
+
console.log("Failed to disconnect from Snowflake:", err);
|
149
|
+
}
|
150
|
+
});
|
151
|
+
throw Error(`An error occurred: ${error}`);
|
152
|
+
}
|
153
|
+
});
|
154
|
+
exports.default = runSnowflakeQueryWriteResultsToS3;
|
@@ -0,0 +1,45 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
+
};
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
15
|
+
const firecrawl_js_1 = __importDefault(require("@mendable/firecrawl-js"));
|
16
|
+
const scrapeTweetDataWithNitter = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
17
|
+
const tweetUrlRegex = /^(?:https?:\/\/)?(?:www\.)?(?:twitter\.com|x\.com)\/([a-zA-Z0-9_]+)\/status\/(\d+)(?:\?.*)?$/;
|
18
|
+
if (!tweetUrlRegex.test(params.tweetUrl)) {
|
19
|
+
throw new Error("Invalid tweet URL. Expected format: https://twitter.com/username/status/id or https://x.com/username/status/id");
|
20
|
+
}
|
21
|
+
const nitterUrl = params.tweetUrl.replace(/^(?:https?:\/\/)?(?:www\.)?(?:twitter\.com|x\.com)/i, "https://nitter.net");
|
22
|
+
// Initialize Firecrawl
|
23
|
+
if (!authParams.apiKey) {
|
24
|
+
throw new Error("API key is required for X+Nitter+Firecrawl");
|
25
|
+
}
|
26
|
+
const firecrawl = new firecrawl_js_1.default({
|
27
|
+
apiKey: authParams.apiKey,
|
28
|
+
});
|
29
|
+
try {
|
30
|
+
// Scrape the Nitter URL
|
31
|
+
const result = yield firecrawl.scrapeUrl(nitterUrl);
|
32
|
+
if (!result.success) {
|
33
|
+
throw new Error(`Failed to scrape tweet: ${result.error || "Unknown error"}`);
|
34
|
+
}
|
35
|
+
// Extract the tweet text from the scraped content - simple approach - in practice, you might need more robust parsing based on nitter html structure
|
36
|
+
const tweetContent = result.markdown;
|
37
|
+
return {
|
38
|
+
text: tweetContent || "Error scraping with firecrawl",
|
39
|
+
};
|
40
|
+
}
|
41
|
+
catch (error) {
|
42
|
+
throw new Error(`Error scraping tweet: ${error instanceof Error ? error.message : error}`);
|
43
|
+
}
|
44
|
+
});
|
45
|
+
exports.default = scrapeTweetDataWithNitter;
|
package/package.json
CHANGED
@@ -1,43 +0,0 @@
|
|
1
|
-
"use strict";
|
2
|
-
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
-
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
-
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
-
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
-
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
-
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
-
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
-
});
|
10
|
-
};
|
11
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
12
|
-
const axiosClient_1 = require("../../util/axiosClient");
|
13
|
-
const getSalesforceRecordByQuery = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
14
|
-
const { authToken, baseUrl } = authParams;
|
15
|
-
const { query, limit } = params;
|
16
|
-
if (!authToken || !baseUrl) {
|
17
|
-
return {
|
18
|
-
success: false,
|
19
|
-
error: "authToken and baseUrl are required for Salesforce API",
|
20
|
-
};
|
21
|
-
}
|
22
|
-
// The API limits the maximum number of records returned to 2000, the limit lets the user set a smaller custom limit
|
23
|
-
const url = `${baseUrl}/services/data/v56.0/query/?q=${encodeURIComponent(query + " LIMIT " + (limit != undefined && limit <= 2000 ? limit : 2000))}`;
|
24
|
-
try {
|
25
|
-
const response = yield axiosClient_1.axiosClient.get(url, {
|
26
|
-
headers: {
|
27
|
-
Authorization: `Bearer ${authToken}`,
|
28
|
-
},
|
29
|
-
});
|
30
|
-
return {
|
31
|
-
success: true,
|
32
|
-
records: response.data,
|
33
|
-
};
|
34
|
-
}
|
35
|
-
catch (error) {
|
36
|
-
console.error("Error retrieving Salesforce record:", error);
|
37
|
-
return {
|
38
|
-
success: false,
|
39
|
-
error: error instanceof Error ? error.message : "An unknown error occurred",
|
40
|
-
};
|
41
|
-
}
|
42
|
-
});
|
43
|
-
exports.default = getSalesforceRecordByQuery;
|