@credal/actions 0.1.56 → 0.1.58
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -1
- package/dist/actions/autogen/templates.js +4 -0
- package/dist/actions/autogen/types.d.ts +3 -0
- package/dist/actions/autogen/types.js +1 -0
- package/dist/actions/providers/confluence/fetchPageContent.js +8 -13
- package/dist/actions/providers/confluence/helpers.d.ts +2 -0
- package/dist/actions/providers/confluence/helpers.js +12 -0
- package/dist/actions/providers/confluence/overwritePage.js +9 -14
- package/dist/actions/providers/confluence/updatePage.js +14 -15
- package/dist/actions/providers/generic/fillTemplateAction.d.ts +7 -0
- package/dist/actions/providers/generic/fillTemplateAction.js +18 -0
- package/dist/actions/providers/generic/genericApiCall.d.ts +3 -0
- package/dist/actions/providers/generic/genericApiCall.js +38 -0
- package/dist/actions/providers/googlemaps/nearbysearch.d.ts +3 -0
- package/dist/actions/providers/googlemaps/nearbysearch.js +96 -0
- package/dist/actions/providers/salesforce/createRecord.js +5 -1
- package/dist/actions/providers/snowflake/runSnowflakeQuery.js +4 -1
- package/dist/actions/providers/snowflake/runSnowflakeQueryWriteResultsToS3.d.ts +3 -0
- package/dist/actions/providers/snowflake/runSnowflakeQueryWriteResultsToS3.js +154 -0
- package/dist/actions/providers/x/scrapeTweetDataWithNitter.d.ts +3 -0
- package/dist/actions/providers/x/scrapeTweetDataWithNitter.js +45 -0
- package/package.json +1 -1
- package/dist/actions/providers/workday/requestTimeOff.d.ts +0 -23
- package/dist/actions/providers/workday/requestTimeOff.js +0 -88
package/README.md
CHANGED
@@ -6,6 +6,8 @@ Easily add custom actions for your Credal Copilots. Read more about Credal's Age
|
|
6
6
|
|
7
7
|
## Adding or updating actions
|
8
8
|
|
9
|
+
We strongly encourage you to develop actions that rely on oauth based credentials. This is to ensure we respect the permissions of the underlying systems the actions library interacts with. In some situations, oauth is not a valid option and so API keys are a good fallback.
|
10
|
+
|
9
11
|
1. Add or update the action in `src/actions/schema.yaml`
|
10
12
|
2. Run `npm run generate:types` to generate the new types
|
11
13
|
3. Run `npm run prettier-format` to format the new files
|
@@ -25,7 +27,7 @@ const result = await runAction(
|
|
25
27
|
"listConversations",
|
26
28
|
"slack",
|
27
29
|
{ authToken: "xoxb-..." },
|
28
|
-
{}
|
30
|
+
{}
|
29
31
|
);
|
30
32
|
```
|
31
33
|
|
@@ -1462,6 +1462,10 @@ exports.snowflakeRunSnowflakeQueryDefinition = {
|
|
1462
1462
|
description: "The format of the output",
|
1463
1463
|
enum: ["json", "csv"],
|
1464
1464
|
},
|
1465
|
+
limit: {
|
1466
|
+
type: "number",
|
1467
|
+
description: "A limit on the number of rows to return",
|
1468
|
+
},
|
1465
1469
|
},
|
1466
1470
|
},
|
1467
1471
|
output: {
|
@@ -1263,17 +1263,20 @@ export declare const snowflakeRunSnowflakeQueryParamsSchema: z.ZodObject<{
|
|
1263
1263
|
query: z.ZodString;
|
1264
1264
|
accountName: z.ZodString;
|
1265
1265
|
outputFormat: z.ZodOptional<z.ZodEnum<["json", "csv"]>>;
|
1266
|
+
limit: z.ZodOptional<z.ZodNumber>;
|
1266
1267
|
}, "strip", z.ZodTypeAny, {
|
1267
1268
|
query: string;
|
1268
1269
|
databaseName: string;
|
1269
1270
|
accountName: string;
|
1270
1271
|
warehouse: string;
|
1272
|
+
limit?: number | undefined;
|
1271
1273
|
outputFormat?: "json" | "csv" | undefined;
|
1272
1274
|
}, {
|
1273
1275
|
query: string;
|
1274
1276
|
databaseName: string;
|
1275
1277
|
accountName: string;
|
1276
1278
|
warehouse: string;
|
1279
|
+
limit?: number | undefined;
|
1277
1280
|
outputFormat?: "json" | "csv" | undefined;
|
1278
1281
|
}>;
|
1279
1282
|
export type snowflakeRunSnowflakeQueryParamsType = z.infer<typeof snowflakeRunSnowflakeQueryParamsSchema>;
|
@@ -438,6 +438,7 @@ exports.snowflakeRunSnowflakeQueryParamsSchema = zod_1.z.object({
|
|
438
438
|
query: zod_1.z.string().describe("The SQL query to execute"),
|
439
439
|
accountName: zod_1.z.string().describe("The name of the Snowflake account"),
|
440
440
|
outputFormat: zod_1.z.enum(["json", "csv"]).describe("The format of the output").optional(),
|
441
|
+
limit: zod_1.z.number().describe("A limit on the number of rows to return").optional(),
|
441
442
|
});
|
442
443
|
exports.snowflakeRunSnowflakeQueryOutputSchema = zod_1.z.object({
|
443
444
|
format: zod_1.z.enum(["json", "csv"]).describe("The format of the output"),
|
@@ -9,26 +9,21 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
9
9
|
});
|
10
10
|
};
|
11
11
|
Object.defineProperty(exports, "__esModule", { value: true });
|
12
|
+
const helpers_1 = require("./helpers");
|
12
13
|
const axiosClient_1 = require("../../util/axiosClient");
|
13
|
-
function getConfluenceRequestConfig(baseUrl, username, apiToken) {
|
14
|
-
return {
|
15
|
-
baseURL: baseUrl,
|
16
|
-
headers: {
|
17
|
-
Accept: "application/json",
|
18
|
-
Authorization: `Basic ${Buffer.from(`${username}:${apiToken}`).toString("base64")}`,
|
19
|
-
},
|
20
|
-
};
|
21
|
-
}
|
22
14
|
const confluenceFetchPageContent = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
23
15
|
var _b, _c;
|
24
16
|
const { pageId } = params;
|
25
|
-
const {
|
26
|
-
if (!
|
17
|
+
const { authToken } = authParams;
|
18
|
+
if (!authToken) {
|
27
19
|
throw new Error("Missing required authentication parameters");
|
28
20
|
}
|
29
|
-
const
|
21
|
+
const cloudDetails = yield axiosClient_1.axiosClient.get("https://api.atlassian.com/oauth/token/accessible-resources");
|
22
|
+
const cloudId = cloudDetails.data[0].id;
|
23
|
+
const baseUrl = `https://api.atlassian.com/ex/confluence/${cloudId}/api/v2`;
|
24
|
+
const config = (0, helpers_1.getConfluenceRequestConfig)(baseUrl, authToken);
|
30
25
|
// Get page content and metadata
|
31
|
-
const response = yield axiosClient_1.axiosClient.get(`/
|
26
|
+
const response = yield axiosClient_1.axiosClient.get(`/pages/${pageId}?body-format=storage`, config);
|
32
27
|
// Extract needed data from response
|
33
28
|
const title = response.data.title;
|
34
29
|
const content = ((_c = (_b = response.data.body) === null || _b === void 0 ? void 0 : _b.storage) === null || _c === void 0 ? void 0 : _c.value) || "";
|
@@ -0,0 +1,12 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.getConfluenceRequestConfig = getConfluenceRequestConfig;
|
4
|
+
function getConfluenceRequestConfig(baseUrl, authToken) {
|
5
|
+
return {
|
6
|
+
baseURL: baseUrl,
|
7
|
+
headers: {
|
8
|
+
Accept: "application/json",
|
9
|
+
Authorization: `Bearer ${authToken}`,
|
10
|
+
},
|
11
|
+
};
|
12
|
+
}
|
@@ -10,24 +10,19 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
10
10
|
};
|
11
11
|
Object.defineProperty(exports, "__esModule", { value: true });
|
12
12
|
const axiosClient_1 = require("../../util/axiosClient");
|
13
|
-
|
14
|
-
return {
|
15
|
-
baseURL: baseUrl,
|
16
|
-
headers: {
|
17
|
-
Accept: "application/json",
|
18
|
-
Authorization: `Basic ${Buffer.from(`${username}:${apiToken}`).toString("base64")}`,
|
19
|
-
},
|
20
|
-
};
|
21
|
-
}
|
13
|
+
const helpers_1 = require("./helpers");
|
22
14
|
const confluenceOverwritePage = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
23
15
|
const { pageId, content, title } = params;
|
24
|
-
const {
|
25
|
-
if (!
|
16
|
+
const { authToken } = authParams;
|
17
|
+
if (!authToken) {
|
26
18
|
throw new Error("Missing required authentication parameters");
|
27
19
|
}
|
28
|
-
const
|
20
|
+
const cloudDetails = yield axiosClient_1.axiosClient.get("https://api.atlassian.com/oauth/token/accessible-resources");
|
21
|
+
const cloudId = cloudDetails.data[0].id;
|
22
|
+
const baseUrl = `https://api.atlassian.com/ex/confluence/${cloudId}/api/v2`;
|
23
|
+
const config = (0, helpers_1.getConfluenceRequestConfig)(baseUrl, authToken);
|
29
24
|
// Get current page content and version number
|
30
|
-
const response = yield axiosClient_1.axiosClient.get(`/
|
25
|
+
const response = yield axiosClient_1.axiosClient.get(`/pages/${pageId}?body-format=storage`, config);
|
31
26
|
const currVersion = response.data.version.number;
|
32
27
|
const payload = {
|
33
28
|
id: pageId,
|
@@ -41,6 +36,6 @@ const confluenceOverwritePage = (_a) => __awaiter(void 0, [_a], void 0, function
|
|
41
36
|
number: currVersion + 1,
|
42
37
|
},
|
43
38
|
};
|
44
|
-
yield axiosClient_1.axiosClient.put(`/
|
39
|
+
yield axiosClient_1.axiosClient.put(`/pages/${pageId}`, payload, config);
|
45
40
|
});
|
46
41
|
exports.default = confluenceOverwritePage;
|
@@ -8,30 +8,28 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
8
8
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
9
|
});
|
10
10
|
};
|
11
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
-
};
|
14
11
|
Object.defineProperty(exports, "__esModule", { value: true });
|
15
|
-
const
|
16
|
-
function
|
17
|
-
|
12
|
+
const axiosClient_1 = require("../../util/axiosClient");
|
13
|
+
function getConfluenceRequestConfig(baseUrl, username, apiToken) {
|
14
|
+
return {
|
18
15
|
baseURL: baseUrl,
|
19
16
|
headers: {
|
20
17
|
Accept: "application/json",
|
21
|
-
// Tokens are associated with a specific user.
|
22
18
|
Authorization: `Basic ${Buffer.from(`${username}:${apiToken}`).toString("base64")}`,
|
23
19
|
},
|
24
|
-
}
|
25
|
-
return api;
|
20
|
+
};
|
26
21
|
}
|
27
22
|
const confluenceUpdatePage = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
28
|
-
const { pageId,
|
29
|
-
const { baseUrl, authToken } = authParams;
|
30
|
-
|
23
|
+
const { pageId, content, title } = params;
|
24
|
+
const { baseUrl, authToken, username } = authParams;
|
25
|
+
if (!baseUrl || !authToken || !username) {
|
26
|
+
throw new Error("Missing required authentication information");
|
27
|
+
}
|
28
|
+
const config = getConfluenceRequestConfig(baseUrl, username, authToken);
|
31
29
|
// Get current version number
|
32
|
-
const response = yield
|
30
|
+
const response = yield axiosClient_1.axiosClient.get(`/api/v2/pages/${pageId}`, config);
|
33
31
|
const currVersion = response.data.version.number;
|
34
|
-
|
32
|
+
const payload = {
|
35
33
|
id: pageId,
|
36
34
|
status: "current",
|
37
35
|
title,
|
@@ -42,6 +40,7 @@ const confluenceUpdatePage = (_a) => __awaiter(void 0, [_a], void 0, function* (
|
|
42
40
|
version: {
|
43
41
|
number: currVersion + 1,
|
44
42
|
},
|
45
|
-
}
|
43
|
+
};
|
44
|
+
yield axiosClient_1.axiosClient.put(`/api/v2/pages/${pageId}`, payload, config);
|
46
45
|
});
|
47
46
|
exports.default = confluenceUpdatePage;
|
@@ -0,0 +1,18 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
12
|
+
const fillTemplateAction = (_a) => __awaiter(void 0, [_a], void 0, function* ({ template }) {
|
13
|
+
// Simply return the template without any modification
|
14
|
+
return {
|
15
|
+
result: template,
|
16
|
+
};
|
17
|
+
});
|
18
|
+
exports.default = fillTemplateAction;
|
@@ -0,0 +1,38 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
+
};
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
15
|
+
const axios_1 = __importDefault(require("axios"));
|
16
|
+
const genericApiCall = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, }) {
|
17
|
+
try {
|
18
|
+
const { endpoint, method, headers, body } = params;
|
19
|
+
const response = yield (0, axios_1.default)({
|
20
|
+
url: endpoint,
|
21
|
+
method,
|
22
|
+
headers,
|
23
|
+
data: method !== "GET" ? body : undefined,
|
24
|
+
});
|
25
|
+
return {
|
26
|
+
statusCode: response.status,
|
27
|
+
headers: response.headers,
|
28
|
+
data: response.data,
|
29
|
+
};
|
30
|
+
}
|
31
|
+
catch (error) {
|
32
|
+
if (axios_1.default.isAxiosError(error)) {
|
33
|
+
throw Error("Axios Error: " + (error.message || "Failed to make API call"));
|
34
|
+
}
|
35
|
+
throw Error("Error: " + (error || "Failed to make API call"));
|
36
|
+
}
|
37
|
+
});
|
38
|
+
exports.default = genericApiCall;
|
@@ -0,0 +1,96 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
+
};
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
15
|
+
const axios_1 = __importDefault(require("axios"));
|
16
|
+
const types_1 = require("../../autogen/types");
|
17
|
+
const INCLUDED_TYPES = [
|
18
|
+
"monument",
|
19
|
+
"museum",
|
20
|
+
"art_gallery",
|
21
|
+
"sculpture",
|
22
|
+
"cultural_landmark",
|
23
|
+
"historical_place",
|
24
|
+
"performing_arts_theater",
|
25
|
+
"university",
|
26
|
+
"aquarium",
|
27
|
+
"botanical_garden",
|
28
|
+
"comedy_club",
|
29
|
+
"park",
|
30
|
+
"movie_theater",
|
31
|
+
"national_park",
|
32
|
+
"garden",
|
33
|
+
"night_club",
|
34
|
+
"tourist_attraction",
|
35
|
+
"water_park",
|
36
|
+
"zoo",
|
37
|
+
"bar",
|
38
|
+
"restaurant",
|
39
|
+
"food_court",
|
40
|
+
"bakery",
|
41
|
+
"cafe",
|
42
|
+
"coffee_shop",
|
43
|
+
"pub",
|
44
|
+
"wine_bar",
|
45
|
+
"spa",
|
46
|
+
"beach",
|
47
|
+
"market",
|
48
|
+
"shopping_mall",
|
49
|
+
"stadium",
|
50
|
+
];
|
51
|
+
const nearbysearch = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
52
|
+
const url = `https://places.googleapis.com/v1/places:searchNearby`;
|
53
|
+
const fieldMask = [
|
54
|
+
"places.displayName",
|
55
|
+
"places.formattedAddress",
|
56
|
+
"places.priceLevel",
|
57
|
+
"places.rating",
|
58
|
+
"places.primaryTypeDisplayName",
|
59
|
+
"places.editorialSummary",
|
60
|
+
"places.regularOpeningHours",
|
61
|
+
].join(",");
|
62
|
+
const response = yield axios_1.default.post(url, {
|
63
|
+
maxResultCount: 20,
|
64
|
+
includedTypes: INCLUDED_TYPES,
|
65
|
+
locationRestriction: {
|
66
|
+
circle: {
|
67
|
+
center: {
|
68
|
+
latitude: params.latitude,
|
69
|
+
longitude: params.longitude,
|
70
|
+
},
|
71
|
+
radius: 10000,
|
72
|
+
},
|
73
|
+
},
|
74
|
+
}, {
|
75
|
+
headers: {
|
76
|
+
"X-Goog-Api-Key": authParams.apiKey,
|
77
|
+
"X-Goog-FieldMask": fieldMask,
|
78
|
+
"Content-Type": "application/json",
|
79
|
+
},
|
80
|
+
});
|
81
|
+
return types_1.googlemapsNearbysearchOutputSchema.parse({
|
82
|
+
results: response.data.places.map((place) => {
|
83
|
+
var _a, _b;
|
84
|
+
return ({
|
85
|
+
name: place.displayName.text,
|
86
|
+
address: place.formattedAddress,
|
87
|
+
priceLevel: place.priceLevel,
|
88
|
+
rating: place.rating,
|
89
|
+
primaryType: place.primaryTypeDisplayName.text,
|
90
|
+
editorialSummary: ((_a = place.editorialSummary) === null || _a === void 0 ? void 0 : _a.text) || "",
|
91
|
+
openingHours: ((_b = place.regularOpeningHours) === null || _b === void 0 ? void 0 : _b.weekdayDescriptions.join("\n")) || "",
|
92
|
+
});
|
93
|
+
}),
|
94
|
+
});
|
95
|
+
});
|
96
|
+
exports.default = nearbysearch;
|
@@ -36,7 +36,11 @@ const createRecord = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params
|
|
36
36
|
console.error("Error creating Salesforce object:", error);
|
37
37
|
return {
|
38
38
|
success: false,
|
39
|
-
error: error instanceof
|
39
|
+
error: error instanceof axiosClient_1.ApiError
|
40
|
+
? error.data.length > 0
|
41
|
+
? error.data[0].message
|
42
|
+
: error.message
|
43
|
+
: "An unknown error occurred",
|
40
44
|
};
|
41
45
|
}
|
42
46
|
});
|
@@ -17,7 +17,7 @@ const getSnowflakeConnection_1 = require("./auth/getSnowflakeConnection");
|
|
17
17
|
const formatDataForCodeInterpreter_1 = require("../../util/formatDataForCodeInterpreter");
|
18
18
|
snowflake_sdk_1.default.configure({ logLevel: "ERROR" });
|
19
19
|
const runSnowflakeQuery = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
20
|
-
const { databaseName, warehouse, query, accountName, outputFormat = "json" } = params;
|
20
|
+
const { databaseName, warehouse, query, accountName, outputFormat = "json", limit } = params;
|
21
21
|
if (!accountName || !databaseName || !warehouse || !query) {
|
22
22
|
throw new Error("Missing required parameters for Snowflake query");
|
23
23
|
}
|
@@ -36,6 +36,9 @@ const runSnowflakeQuery = (_a) => __awaiter(void 0, [_a], void 0, function* ({ p
|
|
36
36
|
});
|
37
37
|
});
|
38
38
|
// Format the results based on the output format
|
39
|
+
if (limit && queryResults.length - 1 > limit) {
|
40
|
+
queryResults.splice(limit + 1); // Include header
|
41
|
+
}
|
39
42
|
const { formattedData, resultsLength } = (0, formatDataForCodeInterpreter_1.formatDataForCodeInterpreter)(queryResults, outputFormat);
|
40
43
|
return { formattedData, resultsLength };
|
41
44
|
});
|
@@ -0,0 +1,154 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
+
};
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
15
|
+
const snowflake_sdk_1 = __importDefault(require("snowflake-sdk"));
|
16
|
+
const crypto_1 = __importDefault(require("crypto"));
|
17
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
18
|
+
const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
|
19
|
+
const uuid_1 = require("uuid");
|
20
|
+
// Only log errors.
|
21
|
+
snowflake_sdk_1.default.configure({ logLevel: "ERROR" });
|
22
|
+
const runSnowflakeQueryWriteResultsToS3 = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
23
|
+
const { databaseName, warehouse, query, user, accountName, s3BucketName, s3Region, outputFormat = "json" } = params;
|
24
|
+
const { apiKey: privateKey, awsAccessKeyId, awsSecretAccessKey } = authParams;
|
25
|
+
if (!privateKey) {
|
26
|
+
throw new Error("Snowflake private key is required");
|
27
|
+
}
|
28
|
+
if (!awsAccessKeyId || !awsSecretAccessKey) {
|
29
|
+
throw new Error("AWS credentials are required");
|
30
|
+
}
|
31
|
+
if (!accountName || !user || !databaseName || !warehouse || !query || !s3BucketName) {
|
32
|
+
throw new Error("Missing required parameters for Snowflake query or S3 destination");
|
33
|
+
}
|
34
|
+
const getPrivateKeyCorrectFormat = (privateKey) => {
|
35
|
+
const buffer = Buffer.from(privateKey);
|
36
|
+
const privateKeyObject = crypto_1.default.createPrivateKey({
|
37
|
+
key: buffer,
|
38
|
+
format: "pem",
|
39
|
+
passphrase: "password",
|
40
|
+
});
|
41
|
+
const privateKeyCorrectFormat = privateKeyObject.export({
|
42
|
+
format: "pem",
|
43
|
+
type: "pkcs8",
|
44
|
+
});
|
45
|
+
return privateKeyCorrectFormat.toString();
|
46
|
+
};
|
47
|
+
const executeQueryAndFormatData = () => __awaiter(void 0, void 0, void 0, function* () {
|
48
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
49
|
+
const queryResults = yield new Promise((resolve, reject) => {
|
50
|
+
connection.execute({
|
51
|
+
sqlText: query,
|
52
|
+
complete: (err, stmt, rows) => {
|
53
|
+
if (err) {
|
54
|
+
return reject(err);
|
55
|
+
}
|
56
|
+
return resolve(rows || []);
|
57
|
+
},
|
58
|
+
});
|
59
|
+
});
|
60
|
+
// Format the results based on the output format
|
61
|
+
let formattedData;
|
62
|
+
if (outputFormat.toLowerCase() === "csv") {
|
63
|
+
if (queryResults.length === 0) {
|
64
|
+
formattedData = "";
|
65
|
+
}
|
66
|
+
else {
|
67
|
+
const headers = Object.keys(queryResults[0]).join(",");
|
68
|
+
const rows = queryResults.map(row => Object.values(row)
|
69
|
+
.map(value => (typeof value === "object" && value !== null ? JSON.stringify(value) : value))
|
70
|
+
.join(","));
|
71
|
+
formattedData = [headers, ...rows].join("\n");
|
72
|
+
}
|
73
|
+
}
|
74
|
+
else {
|
75
|
+
// Default to JSON
|
76
|
+
formattedData = JSON.stringify(queryResults, null, 2);
|
77
|
+
}
|
78
|
+
return { formattedData, resultsLength: queryResults.length };
|
79
|
+
});
|
80
|
+
const uploadToS3AndGetURL = (formattedData) => __awaiter(void 0, void 0, void 0, function* () {
|
81
|
+
// Create S3 client
|
82
|
+
const s3Client = new client_s3_1.S3Client({
|
83
|
+
region: s3Region,
|
84
|
+
credentials: {
|
85
|
+
accessKeyId: awsAccessKeyId,
|
86
|
+
secretAccessKey: awsSecretAccessKey,
|
87
|
+
},
|
88
|
+
});
|
89
|
+
const contentType = outputFormat.toLowerCase() === "csv" ? "text/csv" : "application/json";
|
90
|
+
const fileExtension = outputFormat.toLowerCase() === "csv" ? "csv" : "json";
|
91
|
+
const finalKey = `${databaseName}/${(0, uuid_1.v4)()}.${fileExtension}`;
|
92
|
+
// Upload to S3 without ACL
|
93
|
+
const uploadCommand = new client_s3_1.PutObjectCommand({
|
94
|
+
Bucket: s3BucketName,
|
95
|
+
Key: finalKey,
|
96
|
+
Body: formattedData,
|
97
|
+
ContentType: contentType,
|
98
|
+
});
|
99
|
+
yield s3Client.send(uploadCommand);
|
100
|
+
// Generate a presigned URL (valid for an hour)
|
101
|
+
const getObjectCommand = new client_s3_1.GetObjectCommand({
|
102
|
+
Bucket: s3BucketName,
|
103
|
+
Key: finalKey,
|
104
|
+
});
|
105
|
+
const presignedUrl = yield (0, s3_request_presigner_1.getSignedUrl)(s3Client, getObjectCommand, { expiresIn: 3600 });
|
106
|
+
return presignedUrl;
|
107
|
+
});
|
108
|
+
// Process the private key
|
109
|
+
const privateKeyCorrectFormatString = getPrivateKeyCorrectFormat(privateKey);
|
110
|
+
// Set up a connection using snowflake-sdk
|
111
|
+
const connection = snowflake_sdk_1.default.createConnection({
|
112
|
+
account: accountName,
|
113
|
+
username: user,
|
114
|
+
privateKey: privateKeyCorrectFormatString,
|
115
|
+
authenticator: "SNOWFLAKE_JWT",
|
116
|
+
role: "ACCOUNTADMIN",
|
117
|
+
warehouse: warehouse,
|
118
|
+
database: databaseName,
|
119
|
+
});
|
120
|
+
try {
|
121
|
+
// Connect to Snowflake
|
122
|
+
yield new Promise((resolve, reject) => {
|
123
|
+
connection.connect((err, conn) => {
|
124
|
+
if (err) {
|
125
|
+
console.error("Unable to connect to Snowflake:", err.message);
|
126
|
+
return reject(err);
|
127
|
+
}
|
128
|
+
resolve(conn);
|
129
|
+
});
|
130
|
+
});
|
131
|
+
const { formattedData, resultsLength } = yield executeQueryAndFormatData();
|
132
|
+
const presignedUrl = yield uploadToS3AndGetURL(formattedData);
|
133
|
+
// Return fields to match schema definition
|
134
|
+
connection.destroy(err => {
|
135
|
+
if (err) {
|
136
|
+
console.log("Failed to disconnect from Snowflake:", err);
|
137
|
+
}
|
138
|
+
});
|
139
|
+
return {
|
140
|
+
bucketUrl: presignedUrl,
|
141
|
+
message: `Query results successfully written to S3. URL valid for 1 hour.`,
|
142
|
+
rowCount: resultsLength,
|
143
|
+
};
|
144
|
+
}
|
145
|
+
catch (error) {
|
146
|
+
connection.destroy(err => {
|
147
|
+
if (err) {
|
148
|
+
console.log("Failed to disconnect from Snowflake:", err);
|
149
|
+
}
|
150
|
+
});
|
151
|
+
throw Error(`An error occurred: ${error}`);
|
152
|
+
}
|
153
|
+
});
|
154
|
+
exports.default = runSnowflakeQueryWriteResultsToS3;
|
@@ -0,0 +1,45 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
+
};
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
15
|
+
const firecrawl_js_1 = __importDefault(require("@mendable/firecrawl-js"));
|
16
|
+
const scrapeTweetDataWithNitter = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
17
|
+
const tweetUrlRegex = /^(?:https?:\/\/)?(?:www\.)?(?:twitter\.com|x\.com)\/([a-zA-Z0-9_]+)\/status\/(\d+)(?:\?.*)?$/;
|
18
|
+
if (!tweetUrlRegex.test(params.tweetUrl)) {
|
19
|
+
throw new Error("Invalid tweet URL. Expected format: https://twitter.com/username/status/id or https://x.com/username/status/id");
|
20
|
+
}
|
21
|
+
const nitterUrl = params.tweetUrl.replace(/^(?:https?:\/\/)?(?:www\.)?(?:twitter\.com|x\.com)/i, "https://nitter.net");
|
22
|
+
// Initialize Firecrawl
|
23
|
+
if (!authParams.apiKey) {
|
24
|
+
throw new Error("API key is required for X+Nitter+Firecrawl");
|
25
|
+
}
|
26
|
+
const firecrawl = new firecrawl_js_1.default({
|
27
|
+
apiKey: authParams.apiKey,
|
28
|
+
});
|
29
|
+
try {
|
30
|
+
// Scrape the Nitter URL
|
31
|
+
const result = yield firecrawl.scrapeUrl(nitterUrl);
|
32
|
+
if (!result.success) {
|
33
|
+
throw new Error(`Failed to scrape tweet: ${result.error || "Unknown error"}`);
|
34
|
+
}
|
35
|
+
// Extract the tweet text from the scraped content - simple approach - in practice, you might need more robust parsing based on nitter html structure
|
36
|
+
const tweetContent = result.markdown;
|
37
|
+
return {
|
38
|
+
text: tweetContent || "Error scraping with firecrawl",
|
39
|
+
};
|
40
|
+
}
|
41
|
+
catch (error) {
|
42
|
+
throw new Error(`Error scraping tweet: ${error instanceof Error ? error.message : error}`);
|
43
|
+
}
|
44
|
+
});
|
45
|
+
exports.default = scrapeTweetDataWithNitter;
|
package/package.json
CHANGED
@@ -1,23 +0,0 @@
|
|
1
|
-
declare const axios: any;
|
2
|
-
declare const WORKDAY_BASE_URL = "https://your-workday-url/ccx/service/YOUR_TENANT/Absence_Management/v43.2";
|
3
|
-
declare const TOKEN_URL = "https://your-workday-url/oauth2/YOUR_TENANT/token";
|
4
|
-
declare const CLIENT_ID = "your-client-id";
|
5
|
-
declare const CLIENT_SECRET = "your-client-secret";
|
6
|
-
/**
|
7
|
-
* Fetches an OAuth 2.0 access token from Workday.
|
8
|
-
*/
|
9
|
-
declare function getAccessToken(): Promise<any>;
|
10
|
-
/**
|
11
|
-
* Submits a time-off request to Workday.
|
12
|
-
* @param {Object} params - Time-off details.
|
13
|
-
* @param {string} params.workerId - Worker's ID in Workday.
|
14
|
-
* @param {string} params.startDate - Start date (YYYY-MM-DD).
|
15
|
-
* @param {string} params.endDate - End date (YYYY-MM-DD).
|
16
|
-
* @param {string} params.timeOffType - Time-off type (e.g., "SICK_LEAVE").
|
17
|
-
*/
|
18
|
-
declare function submitTimeOff({ workerId, startDate, endDate, timeOffType }: {
|
19
|
-
workerId: any;
|
20
|
-
startDate: any;
|
21
|
-
endDate: any;
|
22
|
-
timeOffType: any;
|
23
|
-
}): Promise<any>;
|
@@ -1,88 +0,0 @@
|
|
1
|
-
"use strict";
|
2
|
-
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
-
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
-
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
-
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
-
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
-
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
-
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
-
});
|
10
|
-
};
|
11
|
-
const axios = require("axios");
|
12
|
-
const WORKDAY_BASE_URL = "https://your-workday-url/ccx/service/YOUR_TENANT/Absence_Management/v43.2";
|
13
|
-
const TOKEN_URL = "https://your-workday-url/oauth2/YOUR_TENANT/token"; // OAuth token endpoint
|
14
|
-
const CLIENT_ID = "your-client-id";
|
15
|
-
const CLIENT_SECRET = "your-client-secret";
|
16
|
-
/**
|
17
|
-
* Fetches an OAuth 2.0 access token from Workday.
|
18
|
-
*/
|
19
|
-
function getAccessToken() {
|
20
|
-
return __awaiter(this, void 0, void 0, function* () {
|
21
|
-
var _a;
|
22
|
-
try {
|
23
|
-
const response = yield axios.post(TOKEN_URL, new URLSearchParams({ grant_type: "client_credentials" }), {
|
24
|
-
auth: {
|
25
|
-
username: CLIENT_ID,
|
26
|
-
password: CLIENT_SECRET
|
27
|
-
},
|
28
|
-
headers: { "Content-Type": "application/x-www-form-urlencoded" }
|
29
|
-
});
|
30
|
-
return response.data.access_token;
|
31
|
-
}
|
32
|
-
catch (error) {
|
33
|
-
console.error("Error fetching access token:", ((_a = error.response) === null || _a === void 0 ? void 0 : _a.data) || error.message);
|
34
|
-
throw error;
|
35
|
-
}
|
36
|
-
});
|
37
|
-
}
|
38
|
-
/**
|
39
|
-
* Submits a time-off request to Workday.
|
40
|
-
* @param {Object} params - Time-off details.
|
41
|
-
* @param {string} params.workerId - Worker's ID in Workday.
|
42
|
-
* @param {string} params.startDate - Start date (YYYY-MM-DD).
|
43
|
-
* @param {string} params.endDate - End date (YYYY-MM-DD).
|
44
|
-
* @param {string} params.timeOffType - Time-off type (e.g., "SICK_LEAVE").
|
45
|
-
*/
|
46
|
-
function submitTimeOff(_a) {
|
47
|
-
return __awaiter(this, arguments, void 0, function* ({ workerId, startDate, endDate, timeOffType }) {
|
48
|
-
var _b;
|
49
|
-
try {
|
50
|
-
const token = yield getAccessToken(); // Get OAuth token
|
51
|
-
const requestBody = {
|
52
|
-
"wd:Enter_Time_Off_Request": {
|
53
|
-
"wd:Worker_Reference": {
|
54
|
-
"wd:ID": [{ "_": workerId, "$": { "wd:type": "WID" } }]
|
55
|
-
},
|
56
|
-
"wd:Time_Off_Entries": [
|
57
|
-
{
|
58
|
-
"wd:Start_Date": startDate,
|
59
|
-
"wd:End_Date": endDate,
|
60
|
-
"wd:Time_Off_Type_Reference": {
|
61
|
-
"wd:ID": [{ "_": timeOffType, "$": { "wd:type": "Time_Off_Type_ID" } }]
|
62
|
-
}
|
63
|
-
}
|
64
|
-
]
|
65
|
-
}
|
66
|
-
};
|
67
|
-
const response = yield axios.post(`${WORKDAY_BASE_URL}/Enter_Time_Off`, requestBody, {
|
68
|
-
headers: {
|
69
|
-
"Authorization": `Bearer ${token}`,
|
70
|
-
"Content-Type": "application/json"
|
71
|
-
}
|
72
|
-
});
|
73
|
-
console.log("Time-off request submitted successfully:", response.data);
|
74
|
-
return response.data;
|
75
|
-
}
|
76
|
-
catch (error) {
|
77
|
-
console.error("Error submitting time-off request:", ((_b = error.response) === null || _b === void 0 ? void 0 : _b.data) || error.message);
|
78
|
-
throw error;
|
79
|
-
}
|
80
|
-
});
|
81
|
-
}
|
82
|
-
// Example Usage:
|
83
|
-
submitTimeOff({
|
84
|
-
workerId: "12345",
|
85
|
-
startDate: "2025-03-10",
|
86
|
-
endDate: "2025-03-12",
|
87
|
-
timeOffType: "SICK_LEAVE"
|
88
|
-
}).then(console.log).catch(console.error);
|