@credal/actions 0.2.153 → 0.2.155
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/actions/autogen/templates.js +29 -0
- package/dist/actions/autogen/types.d.ts +25 -4
- package/dist/actions/autogen/types.js +28 -0
- package/dist/actions/groups.js +14 -1
- package/dist/actions/providers/confluence/updatePage.d.ts +3 -0
- package/dist/actions/providers/confluence/updatePage.js +46 -0
- package/dist/actions/providers/generic/fillTemplateAction.d.ts +7 -0
- package/dist/actions/providers/generic/fillTemplateAction.js +18 -0
- package/dist/actions/providers/generic/genericApiCall.d.ts +3 -0
- package/dist/actions/providers/generic/genericApiCall.js +38 -0
- package/dist/actions/providers/google-oauth/getDriveContentById.d.ts +3 -0
- package/dist/actions/providers/google-oauth/getDriveContentById.js +161 -0
- package/dist/actions/providers/google-oauth/searchAndGetDriveContentByKeywords.d.ts +3 -0
- package/dist/actions/providers/google-oauth/searchAndGetDriveContentByKeywords.js +47 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByKeywords.d.ts +3 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByKeywords.js +110 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByQuery.d.ts +3 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByQuery.js +78 -0
- package/dist/actions/providers/google-oauth/utils/extractContentFromDriveFileId.d.ts +15 -0
- package/dist/actions/providers/google-oauth/utils/extractContentFromDriveFileId.js +129 -0
- package/dist/actions/providers/googlemaps/nearbysearch.d.ts +3 -0
- package/dist/actions/providers/googlemaps/nearbysearch.js +96 -0
- package/dist/actions/providers/slack/archiveChannel.js +9 -2
- package/dist/actions/providers/slack/getChannelMessages.js +98 -2
- package/dist/actions/providers/slack/messageTransformers.d.ts +53 -0
- package/dist/actions/providers/slack/messageTransformers.js +267 -0
- package/dist/actions/providers/slackUser/getSlackMessagesInTimeRange.d.ts +3 -0
- package/dist/actions/providers/slackUser/getSlackMessagesInTimeRange.js +81 -0
- package/dist/actions/providers/slackUser/searchSlack.d.ts +0 -15
- package/dist/actions/providers/slackUser/searchSlack.js +70 -193
- package/dist/actions/providers/snowflake/runSnowflakeQueryWriteResultsToS3.d.ts +3 -0
- package/dist/actions/providers/snowflake/runSnowflakeQueryWriteResultsToS3.js +154 -0
- package/dist/actions/providers/x/scrapeTweetDataWithNitter.d.ts +3 -0
- package/dist/actions/providers/x/scrapeTweetDataWithNitter.js +45 -0
- package/package.json +2 -2
- package/dist/actions/providers/salesforce/getSalesforceRecordByQuery.d.ts +0 -3
- package/dist/actions/providers/salesforce/getSalesforceRecordByQuery.js +0 -43
|
@@ -10,6 +10,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
10
10
|
import { WebClient } from "@slack/web-api";
|
|
11
11
|
import { MISSING_AUTH_TOKEN } from "../../util/missingAuthConstants.js";
|
|
12
12
|
import pLimit from "p-limit";
|
|
13
|
+
import { extractMessageText, transformToSlackMessage, simplifyFile, } from "../slack/messageTransformers.js";
|
|
13
14
|
/* ===================== Constants ===================== */
|
|
14
15
|
const HIT_ENRICH_POOL = 2; // keep concurrency conservative to avoid 429s
|
|
15
16
|
const limitHit = pLimit(HIT_ENRICH_POOL);
|
|
@@ -63,164 +64,6 @@ class SlackUserCache {
|
|
|
63
64
|
}
|
|
64
65
|
}
|
|
65
66
|
/* ===================== Helpers ===================== */
|
|
66
|
-
/* ===================== Helpers ===================== */
|
|
67
|
-
/**
|
|
68
|
-
* Extracts all visible text from a Slack message
|
|
69
|
-
*/
|
|
70
|
-
export function extractMessageText(m) {
|
|
71
|
-
var _a, _b, _c, _d;
|
|
72
|
-
if (!m)
|
|
73
|
-
return undefined;
|
|
74
|
-
const pieces = [];
|
|
75
|
-
// ---- Rich text helpers ----
|
|
76
|
-
const walkRichTextInline = (el) => {
|
|
77
|
-
var _a;
|
|
78
|
-
const blockPieces = [];
|
|
79
|
-
switch (el.type) {
|
|
80
|
-
case "text":
|
|
81
|
-
blockPieces.push(el.text);
|
|
82
|
-
break;
|
|
83
|
-
case "link":
|
|
84
|
-
blockPieces.push(el.text || el.url);
|
|
85
|
-
break;
|
|
86
|
-
case "user":
|
|
87
|
-
blockPieces.push(`<@${el.user_id}>`);
|
|
88
|
-
break;
|
|
89
|
-
case "channel":
|
|
90
|
-
blockPieces.push(`<#${el.channel_id}>`);
|
|
91
|
-
break;
|
|
92
|
-
case "emoji":
|
|
93
|
-
blockPieces.push(`:${el.name}:`);
|
|
94
|
-
break;
|
|
95
|
-
case "broadcast":
|
|
96
|
-
blockPieces.push(`@${el.range}`);
|
|
97
|
-
break;
|
|
98
|
-
case "date":
|
|
99
|
-
blockPieces.push((_a = el.fallback) !== null && _a !== void 0 ? _a : `<date:${el.timestamp}>`);
|
|
100
|
-
break;
|
|
101
|
-
case "team":
|
|
102
|
-
blockPieces.push(`<team:${el.team_id}>`);
|
|
103
|
-
break;
|
|
104
|
-
case "usergroup":
|
|
105
|
-
blockPieces.push(`<usergroup:${el.usergroup_id}>`);
|
|
106
|
-
break;
|
|
107
|
-
case "color":
|
|
108
|
-
// Usually formatting only, skip
|
|
109
|
-
break;
|
|
110
|
-
}
|
|
111
|
-
return blockPieces;
|
|
112
|
-
};
|
|
113
|
-
const walkRichTextElement = (el) => {
|
|
114
|
-
const result = [];
|
|
115
|
-
switch (el.type) {
|
|
116
|
-
case "rich_text_section":
|
|
117
|
-
case "rich_text_quote":
|
|
118
|
-
result.push(el.elements.map(walkRichTextInline).join("\n"));
|
|
119
|
-
break;
|
|
120
|
-
case "rich_text_list":
|
|
121
|
-
result.push(el.elements.map(section => section.elements.map(walkRichTextInline).join("\n")).join("\n"));
|
|
122
|
-
break;
|
|
123
|
-
case "rich_text_preformatted":
|
|
124
|
-
result.push(el.elements.map(walkRichTextInline).join("\n"));
|
|
125
|
-
break;
|
|
126
|
-
}
|
|
127
|
-
return result;
|
|
128
|
-
};
|
|
129
|
-
// ---- Block helpers ----
|
|
130
|
-
const walkBlock = (block) => {
|
|
131
|
-
var _a, _b, _c, _d, _e, _f, _g;
|
|
132
|
-
const blockPieces = [];
|
|
133
|
-
switch (block.type) {
|
|
134
|
-
case "section":
|
|
135
|
-
if ((_a = block.text) === null || _a === void 0 ? void 0 : _a.text)
|
|
136
|
-
blockPieces.push(block.text.text);
|
|
137
|
-
if (block.fields) {
|
|
138
|
-
for (const f of block.fields)
|
|
139
|
-
if (f.text)
|
|
140
|
-
blockPieces.push(f.text);
|
|
141
|
-
}
|
|
142
|
-
if (block.accessory && "text" in block.accessory && block.accessory.text) {
|
|
143
|
-
blockPieces.push(block.accessory.text.text);
|
|
144
|
-
}
|
|
145
|
-
break;
|
|
146
|
-
case "context":
|
|
147
|
-
if (Array.isArray(block.elements)) {
|
|
148
|
-
block.elements.forEach(el => {
|
|
149
|
-
if ("text" in el && el.text)
|
|
150
|
-
blockPieces.push(el.text);
|
|
151
|
-
});
|
|
152
|
-
}
|
|
153
|
-
break;
|
|
154
|
-
case "header":
|
|
155
|
-
if ((_b = block.text) === null || _b === void 0 ? void 0 : _b.text)
|
|
156
|
-
blockPieces.push(block.text.text);
|
|
157
|
-
break;
|
|
158
|
-
case "rich_text":
|
|
159
|
-
blockPieces.push(block.elements.map(walkRichTextElement).join("\n"));
|
|
160
|
-
break;
|
|
161
|
-
case "markdown":
|
|
162
|
-
if (block.text)
|
|
163
|
-
blockPieces.push(block.text);
|
|
164
|
-
break;
|
|
165
|
-
case "video":
|
|
166
|
-
if ((_c = block.title) === null || _c === void 0 ? void 0 : _c.text)
|
|
167
|
-
blockPieces.push(block.title.text);
|
|
168
|
-
if ((_d = block.description) === null || _d === void 0 ? void 0 : _d.text)
|
|
169
|
-
blockPieces.push(block.description.text);
|
|
170
|
-
break;
|
|
171
|
-
case "image":
|
|
172
|
-
if ((_e = block.title) === null || _e === void 0 ? void 0 : _e.text)
|
|
173
|
-
blockPieces.push(block.title.text);
|
|
174
|
-
break;
|
|
175
|
-
case "input":
|
|
176
|
-
if ((_f = block.label) === null || _f === void 0 ? void 0 : _f.text)
|
|
177
|
-
blockPieces.push(block.label.text);
|
|
178
|
-
if ((_g = block.hint) === null || _g === void 0 ? void 0 : _g.text)
|
|
179
|
-
blockPieces.push(block.hint.text);
|
|
180
|
-
break;
|
|
181
|
-
// divider, file, actions, input don’t contribute visible text
|
|
182
|
-
case "divider":
|
|
183
|
-
case "file":
|
|
184
|
-
case "actions":
|
|
185
|
-
break;
|
|
186
|
-
}
|
|
187
|
-
return blockPieces;
|
|
188
|
-
};
|
|
189
|
-
let blockText = "";
|
|
190
|
-
if (Array.isArray(m.blocks)) {
|
|
191
|
-
const blockPieces = m.blocks.map(b => walkBlock(b));
|
|
192
|
-
blockText = blockPieces.join("\n");
|
|
193
|
-
}
|
|
194
|
-
if (blockText) {
|
|
195
|
-
pieces.push(blockText);
|
|
196
|
-
}
|
|
197
|
-
else if (m.text) {
|
|
198
|
-
pieces.push(m.text);
|
|
199
|
-
}
|
|
200
|
-
// 3. Attachments
|
|
201
|
-
if (m.attachments) {
|
|
202
|
-
for (const att of m.attachments) {
|
|
203
|
-
if (att.pretext)
|
|
204
|
-
pieces.push(att.pretext);
|
|
205
|
-
if (att.title)
|
|
206
|
-
pieces.push(att.title);
|
|
207
|
-
if (att.text)
|
|
208
|
-
pieces.push(att.text);
|
|
209
|
-
if (att.fields) {
|
|
210
|
-
for (const f of att.fields) {
|
|
211
|
-
const title = (_b = (_a = f.title) === null || _a === void 0 ? void 0 : _a.trim()) !== null && _b !== void 0 ? _b : "";
|
|
212
|
-
const value = (_d = (_c = f.value) === null || _c === void 0 ? void 0 : _c.trim()) !== null && _d !== void 0 ? _d : "";
|
|
213
|
-
if (title || value) {
|
|
214
|
-
pieces.push(title && value ? `${title}: ${value}` : title || value);
|
|
215
|
-
}
|
|
216
|
-
}
|
|
217
|
-
}
|
|
218
|
-
}
|
|
219
|
-
}
|
|
220
|
-
// Deduplicate and join
|
|
221
|
-
const out = Array.from(new Set(pieces.map(s => s.trim()).filter(Boolean))).join("\n");
|
|
222
|
-
return out || undefined;
|
|
223
|
-
}
|
|
224
67
|
function normalizeChannelOperand(ch) {
|
|
225
68
|
const s = ch.trim();
|
|
226
69
|
if (/^[CGD][A-Z0-9]/i.test(s))
|
|
@@ -297,17 +140,6 @@ function getPermalink(client, channel, ts) {
|
|
|
297
140
|
}
|
|
298
141
|
});
|
|
299
142
|
}
|
|
300
|
-
function transformToSlackMessage(message) {
|
|
301
|
-
return {
|
|
302
|
-
ts: message.ts,
|
|
303
|
-
text: message.text,
|
|
304
|
-
user: message.user,
|
|
305
|
-
username: message.username,
|
|
306
|
-
thread_ts: message.thread_ts,
|
|
307
|
-
blocks: message.blocks,
|
|
308
|
-
attachments: message.attachments,
|
|
309
|
-
};
|
|
310
|
-
}
|
|
311
143
|
function fetchOneMessage(client, channel, ts) {
|
|
312
144
|
return __awaiter(this, void 0, void 0, function* () {
|
|
313
145
|
var _a;
|
|
@@ -397,17 +229,45 @@ function searchByTopic(input) {
|
|
|
397
229
|
return (_b = (_a = searchRes.messages) === null || _a === void 0 ? void 0 : _a.matches) !== null && _b !== void 0 ? _b : [];
|
|
398
230
|
});
|
|
399
231
|
}
|
|
232
|
+
/**
|
|
233
|
+
* Deduplicates and merges Slack threads.
|
|
234
|
+
* When multiple search hits point to the same thread (same thread_ts),
|
|
235
|
+
* we merge them into a single result with all unique messages in context.
|
|
236
|
+
*/
|
|
400
237
|
function dedupeAndSort(results) {
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
238
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
239
|
+
// Group by thread: channelId + ts (where ts is the root thread_ts)
|
|
240
|
+
const threadMap = new Map();
|
|
241
|
+
for (const result of results) {
|
|
242
|
+
const threadKey = `${result.channelId}-${result.ts}`;
|
|
243
|
+
const existing = threadMap.get(threadKey);
|
|
244
|
+
if (!existing) {
|
|
245
|
+
// First time seeing this thread
|
|
246
|
+
threadMap.set(threadKey, result);
|
|
247
|
+
}
|
|
248
|
+
else {
|
|
249
|
+
// Merge: dedupe context messages by ts
|
|
250
|
+
const existingTsSet = new Set((_b = (_a = existing.context) === null || _a === void 0 ? void 0 : _a.map(m => m.ts)) !== null && _b !== void 0 ? _b : []);
|
|
251
|
+
const newMessages = ((_c = result.context) !== null && _c !== void 0 ? _c : []).filter(m => !existingTsSet.has(m.ts));
|
|
252
|
+
if (newMessages.length > 0) {
|
|
253
|
+
existing.context = [...((_d = existing.context) !== null && _d !== void 0 ? _d : []), ...newMessages].sort((a, b) => Number(a.ts) - Number(b.ts));
|
|
254
|
+
}
|
|
255
|
+
// Update permalink if missing
|
|
256
|
+
if (!existing.permalink && result.permalink) {
|
|
257
|
+
existing.permalink = result.permalink;
|
|
258
|
+
}
|
|
259
|
+
// Merge members if needed (for DMs/MPIMs)
|
|
260
|
+
if (result.members && result.members.length > 0) {
|
|
261
|
+
const existingMemberIds = new Set((_f = (_e = existing.members) === null || _e === void 0 ? void 0 : _e.map(m => m.userId)) !== null && _f !== void 0 ? _f : []);
|
|
262
|
+
const newMembers = result.members.filter(m => !existingMemberIds.has(m.userId));
|
|
263
|
+
if (newMembers.length > 0) {
|
|
264
|
+
existing.members = [...((_g = existing.members) !== null && _g !== void 0 ? _g : []), ...newMembers];
|
|
265
|
+
}
|
|
266
|
+
}
|
|
408
267
|
}
|
|
409
268
|
}
|
|
410
|
-
|
|
269
|
+
// Sort by timestamp descending (most recent first)
|
|
270
|
+
return Array.from(threadMap.values()).sort((a, b) => Number(b.ts) - Number(a.ts));
|
|
411
271
|
}
|
|
412
272
|
/* ===================== MAIN EXPORT ===================== */
|
|
413
273
|
const searchSlack = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
@@ -415,7 +275,7 @@ const searchSlack = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params,
|
|
|
415
275
|
throw new Error(MISSING_AUTH_TOKEN);
|
|
416
276
|
const client = new WebClient(authParams.authToken);
|
|
417
277
|
const cache = new SlackUserCache(client);
|
|
418
|
-
const { emails, topic, timeRange, limit = 20, channel } = params;
|
|
278
|
+
const { emails, topic, timeRange, limit = 20, channel, fetchAdjacentMessages = true } = params;
|
|
419
279
|
const { user_id: myUserId } = yield client.auth.test();
|
|
420
280
|
if (!myUserId)
|
|
421
281
|
throw new Error("Failed to get my user ID.");
|
|
@@ -466,7 +326,7 @@ const searchSlack = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params,
|
|
|
466
326
|
searchResults.forEach(matches => allMatches.push(...matches));
|
|
467
327
|
const channelInfoCache = new Map();
|
|
468
328
|
const expanded = yield Promise.all(allMatches.map(m => limitHit(() => __awaiter(void 0, void 0, void 0, function* () {
|
|
469
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l;
|
|
329
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _o;
|
|
470
330
|
if (!m.ts || !((_a = m.channel) === null || _a === void 0 ? void 0 : _a.id))
|
|
471
331
|
return null;
|
|
472
332
|
const anchor = yield fetchOneMessage(client, m.channel.id, m.ts);
|
|
@@ -491,10 +351,12 @@ const searchSlack = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params,
|
|
|
491
351
|
yield fetchThread(client, m.channel.id, rootTs),
|
|
492
352
|
(_g = m.permalink) !== null && _g !== void 0 ? _g : (yield getPermalink(client, m.channel.id, rootTs)),
|
|
493
353
|
]
|
|
494
|
-
:
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
354
|
+
: fetchAdjacentMessages
|
|
355
|
+
? [
|
|
356
|
+
yield fetchContextWindow(client, m.channel.id, m.ts),
|
|
357
|
+
(_h = m.permalink) !== null && _h !== void 0 ? _h : (yield getPermalink(client, m.channel.id, m.ts)),
|
|
358
|
+
]
|
|
359
|
+
: [[], (_j = m.permalink) !== null && _j !== void 0 ? _j : (yield getPermalink(client, m.channel.id, m.ts))];
|
|
498
360
|
// filter logic
|
|
499
361
|
let passesFilter = false;
|
|
500
362
|
if (channelInfo.isIm || channelInfo.isMpim) {
|
|
@@ -506,17 +368,32 @@ const searchSlack = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params,
|
|
|
506
368
|
}
|
|
507
369
|
if (filteredTargetIds.length && !passesFilter)
|
|
508
370
|
return null;
|
|
509
|
-
const
|
|
371
|
+
const allContext = yield Promise.all(contextMsgs.map((t) => __awaiter(void 0, void 0, void 0, function* () {
|
|
510
372
|
var _a;
|
|
511
373
|
const u = t.user ? yield cache.get(t.user) : undefined;
|
|
512
374
|
const rawText = extractMessageText(t);
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
375
|
+
// Simplify files to only include highest quality thumbnail
|
|
376
|
+
if (t.files && Array.isArray(t.files)) {
|
|
377
|
+
t.files = t.files.map((file) => simplifyFile(file));
|
|
378
|
+
}
|
|
379
|
+
// Build interaction description
|
|
380
|
+
const interactions = [];
|
|
381
|
+
if (t.reactions && t.reactions.length > 0) {
|
|
382
|
+
interactions.push(`Reactions: ${t.reactions.map(r => `:${r.name}: (${r.count})`).join(", ")}`);
|
|
383
|
+
}
|
|
384
|
+
if (t.files && t.files.length > 0) {
|
|
385
|
+
interactions.push(`Files: ${t.files.map(f => f.title || f.name || "Untitled").join(", ")}`);
|
|
386
|
+
}
|
|
387
|
+
return Object.assign({ ts: t.ts, text: rawText ? yield expandSlackEntities(cache, rawText) : undefined, userEmail: u === null || u === void 0 ? void 0 : u.email, userName: (_a = u === null || u === void 0 ? void 0 : u.name) !== null && _a !== void 0 ? _a : t.username }, (interactions.length > 0 ? { interactions: interactions.join(" | ") } : {}));
|
|
519
388
|
})));
|
|
389
|
+
// Deduplicate by timestamp - appears the the context array returned can have duplicates
|
|
390
|
+
const seenTs = new Set();
|
|
391
|
+
const context = allContext.filter(msg => {
|
|
392
|
+
if (seenTs.has(msg.ts))
|
|
393
|
+
return false;
|
|
394
|
+
seenTs.add(msg.ts);
|
|
395
|
+
return true;
|
|
396
|
+
});
|
|
520
397
|
const anchorUser = (anchor === null || anchor === void 0 ? void 0 : anchor.user) ? yield cache.get(anchor.user) : undefined;
|
|
521
398
|
const anchorText = extractMessageText(anchor);
|
|
522
399
|
return {
|
|
@@ -524,10 +401,10 @@ const searchSlack = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params,
|
|
|
524
401
|
ts: rootTs,
|
|
525
402
|
text: anchorText ? yield expandSlackEntities(cache, anchorText) : undefined,
|
|
526
403
|
userEmail: anchorUser === null || anchorUser === void 0 ? void 0 : anchorUser.email,
|
|
527
|
-
userName: (
|
|
404
|
+
userName: (_k = anchorUser === null || anchorUser === void 0 ? void 0 : anchorUser.name) !== null && _k !== void 0 ? _k : anchor === null || anchor === void 0 ? void 0 : anchor.username,
|
|
528
405
|
context,
|
|
529
|
-
permalink: (
|
|
530
|
-
members: ((
|
|
406
|
+
permalink: (_l = m.permalink) !== null && _l !== void 0 ? _l : permalink,
|
|
407
|
+
members: ((_o = channelInfo.members) !== null && _o !== void 0 ? _o : []).map(uid => {
|
|
531
408
|
const u = cache.getSync(uid);
|
|
532
409
|
return { userId: uid, userEmail: u === null || u === void 0 ? void 0 : u.email, userName: u === null || u === void 0 ? void 0 : u.name };
|
|
533
410
|
}),
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
const snowflake_sdk_1 = __importDefault(require("snowflake-sdk"));
|
|
16
|
+
const crypto_1 = __importDefault(require("crypto"));
|
|
17
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
18
|
+
const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
|
|
19
|
+
const uuid_1 = require("uuid");
|
|
20
|
+
// Only log errors.
|
|
21
|
+
snowflake_sdk_1.default.configure({ logLevel: "ERROR" });
|
|
22
|
+
const runSnowflakeQueryWriteResultsToS3 = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
23
|
+
const { databaseName, warehouse, query, user, accountName, s3BucketName, s3Region, outputFormat = "json" } = params;
|
|
24
|
+
const { apiKey: privateKey, awsAccessKeyId, awsSecretAccessKey } = authParams;
|
|
25
|
+
if (!privateKey) {
|
|
26
|
+
throw new Error("Snowflake private key is required");
|
|
27
|
+
}
|
|
28
|
+
if (!awsAccessKeyId || !awsSecretAccessKey) {
|
|
29
|
+
throw new Error("AWS credentials are required");
|
|
30
|
+
}
|
|
31
|
+
if (!accountName || !user || !databaseName || !warehouse || !query || !s3BucketName) {
|
|
32
|
+
throw new Error("Missing required parameters for Snowflake query or S3 destination");
|
|
33
|
+
}
|
|
34
|
+
const getPrivateKeyCorrectFormat = (privateKey) => {
|
|
35
|
+
const buffer = Buffer.from(privateKey);
|
|
36
|
+
const privateKeyObject = crypto_1.default.createPrivateKey({
|
|
37
|
+
key: buffer,
|
|
38
|
+
format: "pem",
|
|
39
|
+
passphrase: "password",
|
|
40
|
+
});
|
|
41
|
+
const privateKeyCorrectFormat = privateKeyObject.export({
|
|
42
|
+
format: "pem",
|
|
43
|
+
type: "pkcs8",
|
|
44
|
+
});
|
|
45
|
+
return privateKeyCorrectFormat.toString();
|
|
46
|
+
};
|
|
47
|
+
const executeQueryAndFormatData = () => __awaiter(void 0, void 0, void 0, function* () {
|
|
48
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
49
|
+
const queryResults = yield new Promise((resolve, reject) => {
|
|
50
|
+
connection.execute({
|
|
51
|
+
sqlText: query,
|
|
52
|
+
complete: (err, stmt, rows) => {
|
|
53
|
+
if (err) {
|
|
54
|
+
return reject(err);
|
|
55
|
+
}
|
|
56
|
+
return resolve(rows || []);
|
|
57
|
+
},
|
|
58
|
+
});
|
|
59
|
+
});
|
|
60
|
+
// Format the results based on the output format
|
|
61
|
+
let formattedData;
|
|
62
|
+
if (outputFormat.toLowerCase() === "csv") {
|
|
63
|
+
if (queryResults.length === 0) {
|
|
64
|
+
formattedData = "";
|
|
65
|
+
}
|
|
66
|
+
else {
|
|
67
|
+
const headers = Object.keys(queryResults[0]).join(",");
|
|
68
|
+
const rows = queryResults.map(row => Object.values(row)
|
|
69
|
+
.map(value => (typeof value === "object" && value !== null ? JSON.stringify(value) : value))
|
|
70
|
+
.join(","));
|
|
71
|
+
formattedData = [headers, ...rows].join("\n");
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
// Default to JSON
|
|
76
|
+
formattedData = JSON.stringify(queryResults, null, 2);
|
|
77
|
+
}
|
|
78
|
+
return { formattedData, resultsLength: queryResults.length };
|
|
79
|
+
});
|
|
80
|
+
const uploadToS3AndGetURL = (formattedData) => __awaiter(void 0, void 0, void 0, function* () {
|
|
81
|
+
// Create S3 client
|
|
82
|
+
const s3Client = new client_s3_1.S3Client({
|
|
83
|
+
region: s3Region,
|
|
84
|
+
credentials: {
|
|
85
|
+
accessKeyId: awsAccessKeyId,
|
|
86
|
+
secretAccessKey: awsSecretAccessKey,
|
|
87
|
+
},
|
|
88
|
+
});
|
|
89
|
+
const contentType = outputFormat.toLowerCase() === "csv" ? "text/csv" : "application/json";
|
|
90
|
+
const fileExtension = outputFormat.toLowerCase() === "csv" ? "csv" : "json";
|
|
91
|
+
const finalKey = `${databaseName}/${(0, uuid_1.v4)()}.${fileExtension}`;
|
|
92
|
+
// Upload to S3 without ACL
|
|
93
|
+
const uploadCommand = new client_s3_1.PutObjectCommand({
|
|
94
|
+
Bucket: s3BucketName,
|
|
95
|
+
Key: finalKey,
|
|
96
|
+
Body: formattedData,
|
|
97
|
+
ContentType: contentType,
|
|
98
|
+
});
|
|
99
|
+
yield s3Client.send(uploadCommand);
|
|
100
|
+
// Generate a presigned URL (valid for an hour)
|
|
101
|
+
const getObjectCommand = new client_s3_1.GetObjectCommand({
|
|
102
|
+
Bucket: s3BucketName,
|
|
103
|
+
Key: finalKey,
|
|
104
|
+
});
|
|
105
|
+
const presignedUrl = yield (0, s3_request_presigner_1.getSignedUrl)(s3Client, getObjectCommand, { expiresIn: 3600 });
|
|
106
|
+
return presignedUrl;
|
|
107
|
+
});
|
|
108
|
+
// Process the private key
|
|
109
|
+
const privateKeyCorrectFormatString = getPrivateKeyCorrectFormat(privateKey);
|
|
110
|
+
// Set up a connection using snowflake-sdk
|
|
111
|
+
const connection = snowflake_sdk_1.default.createConnection({
|
|
112
|
+
account: accountName,
|
|
113
|
+
username: user,
|
|
114
|
+
privateKey: privateKeyCorrectFormatString,
|
|
115
|
+
authenticator: "SNOWFLAKE_JWT",
|
|
116
|
+
role: "ACCOUNTADMIN",
|
|
117
|
+
warehouse: warehouse,
|
|
118
|
+
database: databaseName,
|
|
119
|
+
});
|
|
120
|
+
try {
|
|
121
|
+
// Connect to Snowflake
|
|
122
|
+
yield new Promise((resolve, reject) => {
|
|
123
|
+
connection.connect((err, conn) => {
|
|
124
|
+
if (err) {
|
|
125
|
+
console.error("Unable to connect to Snowflake:", err.message);
|
|
126
|
+
return reject(err);
|
|
127
|
+
}
|
|
128
|
+
resolve(conn);
|
|
129
|
+
});
|
|
130
|
+
});
|
|
131
|
+
const { formattedData, resultsLength } = yield executeQueryAndFormatData();
|
|
132
|
+
const presignedUrl = yield uploadToS3AndGetURL(formattedData);
|
|
133
|
+
// Return fields to match schema definition
|
|
134
|
+
connection.destroy(err => {
|
|
135
|
+
if (err) {
|
|
136
|
+
console.log("Failed to disconnect from Snowflake:", err);
|
|
137
|
+
}
|
|
138
|
+
});
|
|
139
|
+
return {
|
|
140
|
+
bucketUrl: presignedUrl,
|
|
141
|
+
message: `Query results successfully written to S3. URL valid for 1 hour.`,
|
|
142
|
+
rowCount: resultsLength,
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
catch (error) {
|
|
146
|
+
connection.destroy(err => {
|
|
147
|
+
if (err) {
|
|
148
|
+
console.log("Failed to disconnect from Snowflake:", err);
|
|
149
|
+
}
|
|
150
|
+
});
|
|
151
|
+
throw Error(`An error occurred: ${error}`);
|
|
152
|
+
}
|
|
153
|
+
});
|
|
154
|
+
exports.default = runSnowflakeQueryWriteResultsToS3;
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
const firecrawl_js_1 = __importDefault(require("@mendable/firecrawl-js"));
|
|
16
|
+
const scrapeTweetDataWithNitter = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
17
|
+
const tweetUrlRegex = /^(?:https?:\/\/)?(?:www\.)?(?:twitter\.com|x\.com)\/([a-zA-Z0-9_]+)\/status\/(\d+)(?:\?.*)?$/;
|
|
18
|
+
if (!tweetUrlRegex.test(params.tweetUrl)) {
|
|
19
|
+
throw new Error("Invalid tweet URL. Expected format: https://twitter.com/username/status/id or https://x.com/username/status/id");
|
|
20
|
+
}
|
|
21
|
+
const nitterUrl = params.tweetUrl.replace(/^(?:https?:\/\/)?(?:www\.)?(?:twitter\.com|x\.com)/i, "https://nitter.net");
|
|
22
|
+
// Initialize Firecrawl
|
|
23
|
+
if (!authParams.apiKey) {
|
|
24
|
+
throw new Error("API key is required for X+Nitter+Firecrawl");
|
|
25
|
+
}
|
|
26
|
+
const firecrawl = new firecrawl_js_1.default({
|
|
27
|
+
apiKey: authParams.apiKey,
|
|
28
|
+
});
|
|
29
|
+
try {
|
|
30
|
+
// Scrape the Nitter URL
|
|
31
|
+
const result = yield firecrawl.scrapeUrl(nitterUrl);
|
|
32
|
+
if (!result.success) {
|
|
33
|
+
throw new Error(`Failed to scrape tweet: ${result.error || "Unknown error"}`);
|
|
34
|
+
}
|
|
35
|
+
// Extract the tweet text from the scraped content - simple approach - in practice, you might need more robust parsing based on nitter html structure
|
|
36
|
+
const tweetContent = result.markdown;
|
|
37
|
+
return {
|
|
38
|
+
text: tweetContent || "Error scraping with firecrawl",
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
catch (error) {
|
|
42
|
+
throw new Error(`Error scraping tweet: ${error instanceof Error ? error.message : error}`);
|
|
43
|
+
}
|
|
44
|
+
});
|
|
45
|
+
exports.default = scrapeTweetDataWithNitter;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@credal/actions",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.155",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "AI Actions by Credal AI",
|
|
6
6
|
"sideEffects": false,
|
|
@@ -64,7 +64,7 @@
|
|
|
64
64
|
"json-schema-to-zod": "^2.5.0",
|
|
65
65
|
"jsonwebtoken": "^9.0.2",
|
|
66
66
|
"limiter": "^3.0.0",
|
|
67
|
-
"mammoth": "^1.
|
|
67
|
+
"mammoth": "^1.11.00",
|
|
68
68
|
"mongodb": "^6.13.1",
|
|
69
69
|
"node-forge": "^1.3.1",
|
|
70
70
|
"officeparser": "^5.2.0",
|
|
@@ -1,43 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
-
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
-
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
-
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
-
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
-
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
-
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
-
});
|
|
10
|
-
};
|
|
11
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
-
const axiosClient_1 = require("../../util/axiosClient");
|
|
13
|
-
const getSalesforceRecordByQuery = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
14
|
-
const { authToken, baseUrl } = authParams;
|
|
15
|
-
const { query, limit } = params;
|
|
16
|
-
if (!authToken || !baseUrl) {
|
|
17
|
-
return {
|
|
18
|
-
success: false,
|
|
19
|
-
error: "authToken and baseUrl are required for Salesforce API",
|
|
20
|
-
};
|
|
21
|
-
}
|
|
22
|
-
// The API limits the maximum number of records returned to 2000, the limit lets the user set a smaller custom limit
|
|
23
|
-
const url = `${baseUrl}/services/data/v56.0/query/?q=${encodeURIComponent(query + " LIMIT " + (limit != undefined && limit <= 2000 ? limit : 2000))}`;
|
|
24
|
-
try {
|
|
25
|
-
const response = yield axiosClient_1.axiosClient.get(url, {
|
|
26
|
-
headers: {
|
|
27
|
-
Authorization: `Bearer ${authToken}`,
|
|
28
|
-
},
|
|
29
|
-
});
|
|
30
|
-
return {
|
|
31
|
-
success: true,
|
|
32
|
-
records: response.data,
|
|
33
|
-
};
|
|
34
|
-
}
|
|
35
|
-
catch (error) {
|
|
36
|
-
console.error("Error retrieving Salesforce record:", error);
|
|
37
|
-
return {
|
|
38
|
-
success: false,
|
|
39
|
-
error: error instanceof Error ? error.message : "An unknown error occurred",
|
|
40
|
-
};
|
|
41
|
-
}
|
|
42
|
-
});
|
|
43
|
-
exports.default = getSalesforceRecordByQuery;
|