@credal/actions 0.2.48 → 0.2.50
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/actions/actionMapper.js +39 -1
- package/dist/actions/autogen/templates.d.ts +6 -0
- package/dist/actions/autogen/templates.js +640 -0
- package/dist/actions/autogen/types.d.ts +803 -1
- package/dist/actions/autogen/types.js +222 -0
- package/dist/actions/groups.js +11 -1
- package/dist/actions/providers/confluence/updatePage.js +14 -15
- package/dist/actions/providers/generic/fillTemplateAction.d.ts +7 -0
- package/dist/actions/providers/generic/fillTemplateAction.js +18 -0
- package/dist/actions/providers/generic/genericApiCall.d.ts +3 -0
- package/dist/actions/providers/generic/genericApiCall.js +38 -0
- package/dist/actions/providers/github/searchRepository.js +3 -2
- package/dist/actions/providers/google-oauth/getDriveContentById.d.ts +3 -0
- package/dist/actions/providers/google-oauth/getDriveContentById.js +161 -0
- package/dist/actions/providers/google-oauth/getDriveFileContentById.js +74 -54
- package/dist/actions/providers/google-oauth/searchAndGetDriveContentByKeywords.d.ts +3 -0
- package/dist/actions/providers/google-oauth/searchAndGetDriveContentByKeywords.js +47 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByKeywords.d.ts +3 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByKeywords.js +110 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByQuery.d.ts +3 -0
- package/dist/actions/providers/google-oauth/searchDriveAndGetContentByQuery.js +78 -0
- package/dist/actions/providers/google-oauth/utils/extractContentFromDriveFileId.d.ts +15 -0
- package/dist/actions/providers/google-oauth/utils/extractContentFromDriveFileId.js +129 -0
- package/dist/actions/providers/googlemaps/nearbysearch.d.ts +3 -0
- package/dist/actions/providers/googlemaps/nearbysearch.js +96 -0
- package/dist/actions/providers/linear/getIssueDetails.d.ts +3 -0
- package/dist/actions/providers/linear/getIssueDetails.js +127 -0
- package/dist/actions/providers/linear/getIssues.d.ts +3 -0
- package/dist/actions/providers/linear/getIssues.js +160 -0
- package/dist/actions/providers/linear/getProjectDetails.d.ts +3 -0
- package/dist/actions/providers/linear/getProjectDetails.js +129 -0
- package/dist/actions/providers/linear/getProjects.d.ts +3 -0
- package/dist/actions/providers/linear/getProjects.js +96 -0
- package/dist/actions/providers/linear/getTeamDetails.d.ts +3 -0
- package/dist/actions/providers/linear/getTeamDetails.js +84 -0
- package/dist/actions/providers/linear/getTeams.d.ts +3 -0
- package/dist/actions/providers/linear/getTeams.js +68 -0
- package/dist/actions/providers/snowflake/runSnowflakeQueryWriteResultsToS3.d.ts +3 -0
- package/dist/actions/providers/snowflake/runSnowflakeQueryWriteResultsToS3.js +154 -0
- package/dist/actions/providers/x/scrapeTweetDataWithNitter.d.ts +3 -0
- package/dist/actions/providers/x/scrapeTweetDataWithNitter.js +45 -0
- package/dist/utils/google.d.ts +4 -0
- package/dist/utils/google.js +170 -0
- package/package.json +2 -1
- package/dist/actions/providers/jamf/types.d.ts +0 -8
- package/dist/actions/providers/jamf/types.js +0 -7
@@ -0,0 +1,84 @@
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
8
|
+
});
|
9
|
+
};
|
10
|
+
const getTeamDetails = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
11
|
+
var _b;
|
12
|
+
const { authToken } = authParams;
|
13
|
+
const { teamId } = params;
|
14
|
+
if (!authToken) {
|
15
|
+
throw new Error("Valid auth token is required to get Linear team details");
|
16
|
+
}
|
17
|
+
const query = `
|
18
|
+
query GetTeam($id: String!) {
|
19
|
+
team(id: $id) {
|
20
|
+
id
|
21
|
+
name
|
22
|
+
key
|
23
|
+
members {
|
24
|
+
nodes {
|
25
|
+
id
|
26
|
+
name
|
27
|
+
email
|
28
|
+
}
|
29
|
+
}
|
30
|
+
}
|
31
|
+
}
|
32
|
+
`;
|
33
|
+
try {
|
34
|
+
const response = yield fetch("https://api.linear.app/graphql", {
|
35
|
+
method: "POST",
|
36
|
+
headers: {
|
37
|
+
"Content-Type": "application/json",
|
38
|
+
Authorization: `Bearer ${authToken}`,
|
39
|
+
},
|
40
|
+
body: JSON.stringify({
|
41
|
+
query,
|
42
|
+
variables: { id: teamId },
|
43
|
+
}),
|
44
|
+
});
|
45
|
+
if (!response.ok) {
|
46
|
+
const errorText = yield response.text();
|
47
|
+
throw new Error(`HTTP error, status: ${response.status} body: ${errorText}`);
|
48
|
+
}
|
49
|
+
const data = yield response.json();
|
50
|
+
if (data.errors) {
|
51
|
+
throw new Error(`GraphQL errors: ${JSON.stringify(data.errors)}`);
|
52
|
+
}
|
53
|
+
if (!((_b = data.data) === null || _b === void 0 ? void 0 : _b.team)) {
|
54
|
+
return {
|
55
|
+
success: false,
|
56
|
+
error: "Team not found",
|
57
|
+
};
|
58
|
+
}
|
59
|
+
const { id, name, key, members } = data.data.team;
|
60
|
+
return {
|
61
|
+
success: true,
|
62
|
+
team: {
|
63
|
+
id,
|
64
|
+
name,
|
65
|
+
identifier: key,
|
66
|
+
members: Array.isArray(members === null || members === void 0 ? void 0 : members.nodes)
|
67
|
+
? members.nodes.map(({ id, name, email }) => ({
|
68
|
+
id,
|
69
|
+
name,
|
70
|
+
email,
|
71
|
+
}))
|
72
|
+
: [],
|
73
|
+
},
|
74
|
+
};
|
75
|
+
}
|
76
|
+
catch (error) {
|
77
|
+
console.error("Error retrieving Linear team details: ", error);
|
78
|
+
return {
|
79
|
+
success: false,
|
80
|
+
error: error instanceof Error ? error.message : "Unknown error",
|
81
|
+
};
|
82
|
+
}
|
83
|
+
});
|
84
|
+
export default getTeamDetails;
|
@@ -0,0 +1,68 @@
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
8
|
+
});
|
9
|
+
};
|
10
|
+
const getTeams = (_a) => __awaiter(void 0, [_a], void 0, function* ({ authParams, }) {
|
11
|
+
var _b;
|
12
|
+
const { authToken } = authParams;
|
13
|
+
if (!authToken) {
|
14
|
+
throw new Error("Valid auth token is required to get Linear teams");
|
15
|
+
}
|
16
|
+
const query = `
|
17
|
+
query GetTeams {
|
18
|
+
teams {
|
19
|
+
nodes {
|
20
|
+
id
|
21
|
+
name
|
22
|
+
}
|
23
|
+
}
|
24
|
+
}
|
25
|
+
`;
|
26
|
+
try {
|
27
|
+
const response = yield fetch("https://api.linear.app/graphql", {
|
28
|
+
method: "POST",
|
29
|
+
headers: {
|
30
|
+
"Content-Type": "application/json",
|
31
|
+
Authorization: `Bearer ${authToken}`,
|
32
|
+
},
|
33
|
+
body: JSON.stringify({
|
34
|
+
query,
|
35
|
+
}),
|
36
|
+
});
|
37
|
+
if (!response.ok) {
|
38
|
+
throw new Error(`HTTP error! status: ${response.status}`);
|
39
|
+
}
|
40
|
+
const data = yield response.json();
|
41
|
+
if (data.errors) {
|
42
|
+
throw new Error(`GraphQL errors: ${JSON.stringify(data.errors)}`);
|
43
|
+
}
|
44
|
+
if (!((_b = data.data) === null || _b === void 0 ? void 0 : _b.teams)) {
|
45
|
+
return {
|
46
|
+
success: false,
|
47
|
+
error: "No teams found",
|
48
|
+
};
|
49
|
+
}
|
50
|
+
const { nodes } = data.data.teams;
|
51
|
+
const teams = Array.isArray(nodes) ? nodes : [];
|
52
|
+
return {
|
53
|
+
success: true,
|
54
|
+
teams: teams.map((team) => ({
|
55
|
+
id: team.id,
|
56
|
+
name: team.name,
|
57
|
+
})),
|
58
|
+
};
|
59
|
+
}
|
60
|
+
catch (error) {
|
61
|
+
console.error("Error retrieving Linear teams: ", error);
|
62
|
+
return {
|
63
|
+
success: false,
|
64
|
+
error: error instanceof Error ? error.message : "Unknown error",
|
65
|
+
};
|
66
|
+
}
|
67
|
+
});
|
68
|
+
export default getTeams;
|
@@ -0,0 +1,154 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
+
};
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
15
|
+
const snowflake_sdk_1 = __importDefault(require("snowflake-sdk"));
|
16
|
+
const crypto_1 = __importDefault(require("crypto"));
|
17
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
18
|
+
const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
|
19
|
+
const uuid_1 = require("uuid");
|
20
|
+
// Only log errors.
|
21
|
+
snowflake_sdk_1.default.configure({ logLevel: "ERROR" });
|
22
|
+
const runSnowflakeQueryWriteResultsToS3 = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
23
|
+
const { databaseName, warehouse, query, user, accountName, s3BucketName, s3Region, outputFormat = "json" } = params;
|
24
|
+
const { apiKey: privateKey, awsAccessKeyId, awsSecretAccessKey } = authParams;
|
25
|
+
if (!privateKey) {
|
26
|
+
throw new Error("Snowflake private key is required");
|
27
|
+
}
|
28
|
+
if (!awsAccessKeyId || !awsSecretAccessKey) {
|
29
|
+
throw new Error("AWS credentials are required");
|
30
|
+
}
|
31
|
+
if (!accountName || !user || !databaseName || !warehouse || !query || !s3BucketName) {
|
32
|
+
throw new Error("Missing required parameters for Snowflake query or S3 destination");
|
33
|
+
}
|
34
|
+
const getPrivateKeyCorrectFormat = (privateKey) => {
|
35
|
+
const buffer = Buffer.from(privateKey);
|
36
|
+
const privateKeyObject = crypto_1.default.createPrivateKey({
|
37
|
+
key: buffer,
|
38
|
+
format: "pem",
|
39
|
+
passphrase: "password",
|
40
|
+
});
|
41
|
+
const privateKeyCorrectFormat = privateKeyObject.export({
|
42
|
+
format: "pem",
|
43
|
+
type: "pkcs8",
|
44
|
+
});
|
45
|
+
return privateKeyCorrectFormat.toString();
|
46
|
+
};
|
47
|
+
const executeQueryAndFormatData = () => __awaiter(void 0, void 0, void 0, function* () {
|
48
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
49
|
+
const queryResults = yield new Promise((resolve, reject) => {
|
50
|
+
connection.execute({
|
51
|
+
sqlText: query,
|
52
|
+
complete: (err, stmt, rows) => {
|
53
|
+
if (err) {
|
54
|
+
return reject(err);
|
55
|
+
}
|
56
|
+
return resolve(rows || []);
|
57
|
+
},
|
58
|
+
});
|
59
|
+
});
|
60
|
+
// Format the results based on the output format
|
61
|
+
let formattedData;
|
62
|
+
if (outputFormat.toLowerCase() === "csv") {
|
63
|
+
if (queryResults.length === 0) {
|
64
|
+
formattedData = "";
|
65
|
+
}
|
66
|
+
else {
|
67
|
+
const headers = Object.keys(queryResults[0]).join(",");
|
68
|
+
const rows = queryResults.map(row => Object.values(row)
|
69
|
+
.map(value => (typeof value === "object" && value !== null ? JSON.stringify(value) : value))
|
70
|
+
.join(","));
|
71
|
+
formattedData = [headers, ...rows].join("\n");
|
72
|
+
}
|
73
|
+
}
|
74
|
+
else {
|
75
|
+
// Default to JSON
|
76
|
+
formattedData = JSON.stringify(queryResults, null, 2);
|
77
|
+
}
|
78
|
+
return { formattedData, resultsLength: queryResults.length };
|
79
|
+
});
|
80
|
+
const uploadToS3AndGetURL = (formattedData) => __awaiter(void 0, void 0, void 0, function* () {
|
81
|
+
// Create S3 client
|
82
|
+
const s3Client = new client_s3_1.S3Client({
|
83
|
+
region: s3Region,
|
84
|
+
credentials: {
|
85
|
+
accessKeyId: awsAccessKeyId,
|
86
|
+
secretAccessKey: awsSecretAccessKey,
|
87
|
+
},
|
88
|
+
});
|
89
|
+
const contentType = outputFormat.toLowerCase() === "csv" ? "text/csv" : "application/json";
|
90
|
+
const fileExtension = outputFormat.toLowerCase() === "csv" ? "csv" : "json";
|
91
|
+
const finalKey = `${databaseName}/${(0, uuid_1.v4)()}.${fileExtension}`;
|
92
|
+
// Upload to S3 without ACL
|
93
|
+
const uploadCommand = new client_s3_1.PutObjectCommand({
|
94
|
+
Bucket: s3BucketName,
|
95
|
+
Key: finalKey,
|
96
|
+
Body: formattedData,
|
97
|
+
ContentType: contentType,
|
98
|
+
});
|
99
|
+
yield s3Client.send(uploadCommand);
|
100
|
+
// Generate a presigned URL (valid for an hour)
|
101
|
+
const getObjectCommand = new client_s3_1.GetObjectCommand({
|
102
|
+
Bucket: s3BucketName,
|
103
|
+
Key: finalKey,
|
104
|
+
});
|
105
|
+
const presignedUrl = yield (0, s3_request_presigner_1.getSignedUrl)(s3Client, getObjectCommand, { expiresIn: 3600 });
|
106
|
+
return presignedUrl;
|
107
|
+
});
|
108
|
+
// Process the private key
|
109
|
+
const privateKeyCorrectFormatString = getPrivateKeyCorrectFormat(privateKey);
|
110
|
+
// Set up a connection using snowflake-sdk
|
111
|
+
const connection = snowflake_sdk_1.default.createConnection({
|
112
|
+
account: accountName,
|
113
|
+
username: user,
|
114
|
+
privateKey: privateKeyCorrectFormatString,
|
115
|
+
authenticator: "SNOWFLAKE_JWT",
|
116
|
+
role: "ACCOUNTADMIN",
|
117
|
+
warehouse: warehouse,
|
118
|
+
database: databaseName,
|
119
|
+
});
|
120
|
+
try {
|
121
|
+
// Connect to Snowflake
|
122
|
+
yield new Promise((resolve, reject) => {
|
123
|
+
connection.connect((err, conn) => {
|
124
|
+
if (err) {
|
125
|
+
console.error("Unable to connect to Snowflake:", err.message);
|
126
|
+
return reject(err);
|
127
|
+
}
|
128
|
+
resolve(conn);
|
129
|
+
});
|
130
|
+
});
|
131
|
+
const { formattedData, resultsLength } = yield executeQueryAndFormatData();
|
132
|
+
const presignedUrl = yield uploadToS3AndGetURL(formattedData);
|
133
|
+
// Return fields to match schema definition
|
134
|
+
connection.destroy(err => {
|
135
|
+
if (err) {
|
136
|
+
console.log("Failed to disconnect from Snowflake:", err);
|
137
|
+
}
|
138
|
+
});
|
139
|
+
return {
|
140
|
+
bucketUrl: presignedUrl,
|
141
|
+
message: `Query results successfully written to S3. URL valid for 1 hour.`,
|
142
|
+
rowCount: resultsLength,
|
143
|
+
};
|
144
|
+
}
|
145
|
+
catch (error) {
|
146
|
+
connection.destroy(err => {
|
147
|
+
if (err) {
|
148
|
+
console.log("Failed to disconnect from Snowflake:", err);
|
149
|
+
}
|
150
|
+
});
|
151
|
+
throw Error(`An error occurred: ${error}`);
|
152
|
+
}
|
153
|
+
});
|
154
|
+
exports.default = runSnowflakeQueryWriteResultsToS3;
|
@@ -0,0 +1,45 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9
|
+
});
|
10
|
+
};
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
+
};
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
15
|
+
const firecrawl_js_1 = __importDefault(require("@mendable/firecrawl-js"));
|
16
|
+
const scrapeTweetDataWithNitter = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
17
|
+
const tweetUrlRegex = /^(?:https?:\/\/)?(?:www\.)?(?:twitter\.com|x\.com)\/([a-zA-Z0-9_]+)\/status\/(\d+)(?:\?.*)?$/;
|
18
|
+
if (!tweetUrlRegex.test(params.tweetUrl)) {
|
19
|
+
throw new Error("Invalid tweet URL. Expected format: https://twitter.com/username/status/id or https://x.com/username/status/id");
|
20
|
+
}
|
21
|
+
const nitterUrl = params.tweetUrl.replace(/^(?:https?:\/\/)?(?:www\.)?(?:twitter\.com|x\.com)/i, "https://nitter.net");
|
22
|
+
// Initialize Firecrawl
|
23
|
+
if (!authParams.apiKey) {
|
24
|
+
throw new Error("API key is required for X+Nitter+Firecrawl");
|
25
|
+
}
|
26
|
+
const firecrawl = new firecrawl_js_1.default({
|
27
|
+
apiKey: authParams.apiKey,
|
28
|
+
});
|
29
|
+
try {
|
30
|
+
// Scrape the Nitter URL
|
31
|
+
const result = yield firecrawl.scrapeUrl(nitterUrl);
|
32
|
+
if (!result.success) {
|
33
|
+
throw new Error(`Failed to scrape tweet: ${result.error || "Unknown error"}`);
|
34
|
+
}
|
35
|
+
// Extract the tweet text from the scraped content - simple approach - in practice, you might need more robust parsing based on nitter html structure
|
36
|
+
const tweetContent = result.markdown;
|
37
|
+
return {
|
38
|
+
text: tweetContent || "Error scraping with firecrawl",
|
39
|
+
};
|
40
|
+
}
|
41
|
+
catch (error) {
|
42
|
+
throw new Error(`Error scraping tweet: ${error instanceof Error ? error.message : error}`);
|
43
|
+
}
|
44
|
+
});
|
45
|
+
exports.default = scrapeTweetDataWithNitter;
|
@@ -0,0 +1,4 @@
|
|
1
|
+
import type { docs_v1, sheets_v4, slides_v1 } from "googleapis";
|
2
|
+
export declare function parseGoogleDocFromRawContentToPlainText(snapshotRawContent: docs_v1.Schema$Document): string;
|
3
|
+
export declare function parseGoogleSheetsFromRawContentToPlainText(snapshotRawContent: sheets_v4.Schema$Spreadsheet): string;
|
4
|
+
export declare function parseGoogleSlidesFromRawContentToPlainText(snapshotRawContent: slides_v1.Schema$Presentation): string;
|
@@ -0,0 +1,170 @@
|
|
1
|
+
// Helper function to parse Google Docs content to plain text
|
2
|
+
export function parseGoogleDocFromRawContentToPlainText(snapshotRawContent) {
|
3
|
+
var _a, _b, _c, _d, _e, _f;
|
4
|
+
const docSections = [
|
5
|
+
{
|
6
|
+
heading: undefined,
|
7
|
+
paragraphs: [],
|
8
|
+
},
|
9
|
+
];
|
10
|
+
if (!((_a = snapshotRawContent.body) === null || _a === void 0 ? void 0 : _a.content))
|
11
|
+
return "";
|
12
|
+
for (const content of snapshotRawContent.body.content) {
|
13
|
+
if (!content)
|
14
|
+
continue;
|
15
|
+
// Handle paragraphs (existing logic)
|
16
|
+
if (content.paragraph) {
|
17
|
+
const paragraph = content.paragraph;
|
18
|
+
if ((_b = paragraph.paragraphStyle) === null || _b === void 0 ? void 0 : _b.headingId) {
|
19
|
+
// New heading
|
20
|
+
docSections.push({
|
21
|
+
heading: {
|
22
|
+
id: paragraph.paragraphStyle.headingId,
|
23
|
+
type: paragraph.paragraphStyle.namedStyleType || "",
|
24
|
+
},
|
25
|
+
paragraphs: [],
|
26
|
+
});
|
27
|
+
}
|
28
|
+
if (paragraph === null || paragraph === void 0 ? void 0 : paragraph.elements) {
|
29
|
+
const combinedTextRuns = paragraph.elements
|
30
|
+
.map(element => { var _a; return (_a = element.textRun) === null || _a === void 0 ? void 0 : _a.content; })
|
31
|
+
.filter((content) => Boolean(content))
|
32
|
+
.join("");
|
33
|
+
const bulletNestingLevel = paragraph.bullet === undefined ? undefined : ((_d = (_c = paragraph.bullet) === null || _c === void 0 ? void 0 : _c.nestingLevel) !== null && _d !== void 0 ? _d : 0);
|
34
|
+
const paragraphContent = bulletNestingLevel === undefined
|
35
|
+
? combinedTextRuns
|
36
|
+
: "\t".repeat(bulletNestingLevel) + " • " + combinedTextRuns;
|
37
|
+
docSections[docSections.length - 1].paragraphs.push(paragraphContent);
|
38
|
+
}
|
39
|
+
}
|
40
|
+
// Handle tables
|
41
|
+
if (content.table) {
|
42
|
+
const table = content.table;
|
43
|
+
const tableText = [];
|
44
|
+
if (table.tableRows) {
|
45
|
+
for (const row of table.tableRows) {
|
46
|
+
if (!row.tableCells)
|
47
|
+
continue;
|
48
|
+
const cellTexts = [];
|
49
|
+
for (const cell of row.tableCells) {
|
50
|
+
if (!cell.content)
|
51
|
+
continue;
|
52
|
+
const cellText = [];
|
53
|
+
for (const cellContent of cell.content) {
|
54
|
+
if ((_e = cellContent.paragraph) === null || _e === void 0 ? void 0 : _e.elements) {
|
55
|
+
const cellParagraphText = cellContent.paragraph.elements
|
56
|
+
.map(element => { var _a; return (_a = element.textRun) === null || _a === void 0 ? void 0 : _a.content; })
|
57
|
+
.filter((content) => Boolean(content))
|
58
|
+
.join("");
|
59
|
+
if (cellParagraphText.trim()) {
|
60
|
+
cellText.push(cellParagraphText.trim());
|
61
|
+
}
|
62
|
+
}
|
63
|
+
}
|
64
|
+
cellTexts.push(cellText.join(" "));
|
65
|
+
}
|
66
|
+
if (cellTexts.some(text => text.trim())) {
|
67
|
+
tableText.push(cellTexts.join(" | "));
|
68
|
+
}
|
69
|
+
}
|
70
|
+
}
|
71
|
+
if (tableText.length > 0) {
|
72
|
+
docSections[docSections.length - 1].paragraphs.push(tableText.join("\n"));
|
73
|
+
}
|
74
|
+
}
|
75
|
+
// Handle section breaks (just in case they contain text)
|
76
|
+
if (content.sectionBreak) {
|
77
|
+
// Section breaks typically don't contain text, but we'll check anyway
|
78
|
+
// This is mostly for completeness
|
79
|
+
continue;
|
80
|
+
}
|
81
|
+
// Handle table of contents (extract any text)
|
82
|
+
if (content.tableOfContents) {
|
83
|
+
const toc = content.tableOfContents;
|
84
|
+
if (toc.content) {
|
85
|
+
const tocText = [];
|
86
|
+
for (const tocContent of toc.content) {
|
87
|
+
if ((_f = tocContent.paragraph) === null || _f === void 0 ? void 0 : _f.elements) {
|
88
|
+
const tocParagraphText = tocContent.paragraph.elements
|
89
|
+
.map(element => { var _a; return (_a = element.textRun) === null || _a === void 0 ? void 0 : _a.content; })
|
90
|
+
.filter((content) => Boolean(content))
|
91
|
+
.join("");
|
92
|
+
if (tocParagraphText.trim()) {
|
93
|
+
tocText.push(tocParagraphText.trim());
|
94
|
+
}
|
95
|
+
}
|
96
|
+
}
|
97
|
+
if (tocText.length > 0) {
|
98
|
+
docSections[docSections.length - 1].paragraphs.push(tocText.join("\n"));
|
99
|
+
}
|
100
|
+
}
|
101
|
+
}
|
102
|
+
}
|
103
|
+
const validDocSections = docSections.filter(section => section.heading || section.paragraphs.length > 0);
|
104
|
+
return validDocSections.map(section => section.paragraphs.join(" ")).join("\n");
|
105
|
+
}
|
106
|
+
export function parseGoogleSheetsFromRawContentToPlainText(snapshotRawContent) {
|
107
|
+
var _a;
|
108
|
+
if (!snapshotRawContent.sheets)
|
109
|
+
return "";
|
110
|
+
const sheetContents = [];
|
111
|
+
for (const sheet of snapshotRawContent.sheets) {
|
112
|
+
if (!sheet.data || !((_a = sheet.properties) === null || _a === void 0 ? void 0 : _a.title))
|
113
|
+
continue;
|
114
|
+
const sheetTitle = sheet.properties.title;
|
115
|
+
const sheetRows = [`Sheet: ${sheetTitle}`];
|
116
|
+
for (const gridData of sheet.data) {
|
117
|
+
if (!gridData.rowData)
|
118
|
+
continue;
|
119
|
+
for (const rowData of gridData.rowData) {
|
120
|
+
if (!rowData.values)
|
121
|
+
continue;
|
122
|
+
const cellValues = rowData.values
|
123
|
+
.map(cell => {
|
124
|
+
var _a, _b, _c;
|
125
|
+
if (cell.formattedValue)
|
126
|
+
return cell.formattedValue;
|
127
|
+
if ((_a = cell.userEnteredValue) === null || _a === void 0 ? void 0 : _a.stringValue)
|
128
|
+
return cell.userEnteredValue.stringValue;
|
129
|
+
if ((_b = cell.userEnteredValue) === null || _b === void 0 ? void 0 : _b.numberValue)
|
130
|
+
return cell.userEnteredValue.numberValue.toString();
|
131
|
+
if ((_c = cell.userEnteredValue) === null || _c === void 0 ? void 0 : _c.boolValue)
|
132
|
+
return cell.userEnteredValue.boolValue.toString();
|
133
|
+
return "";
|
134
|
+
})
|
135
|
+
.filter(value => value !== "");
|
136
|
+
if (cellValues.length > 0) {
|
137
|
+
sheetRows.push(cellValues.join(" | "));
|
138
|
+
}
|
139
|
+
}
|
140
|
+
}
|
141
|
+
if (sheetRows.length > 1) {
|
142
|
+
sheetContents.push(sheetRows.join("\n"));
|
143
|
+
}
|
144
|
+
}
|
145
|
+
return sheetContents.join("\n\n");
|
146
|
+
}
|
147
|
+
export function parseGoogleSlidesFromRawContentToPlainText(snapshotRawContent) {
|
148
|
+
var _a, _b, _c;
|
149
|
+
if (!snapshotRawContent.slides)
|
150
|
+
return "";
|
151
|
+
const slideContents = [];
|
152
|
+
for (const slide of snapshotRawContent.slides) {
|
153
|
+
if (!slide.pageElements)
|
154
|
+
continue;
|
155
|
+
const slideTexts = [];
|
156
|
+
for (const pageElement of slide.pageElements) {
|
157
|
+
if (!((_b = (_a = pageElement.shape) === null || _a === void 0 ? void 0 : _a.text) === null || _b === void 0 ? void 0 : _b.textElements))
|
158
|
+
continue;
|
159
|
+
for (const textElement of pageElement.shape.text.textElements) {
|
160
|
+
if ((_c = textElement.textRun) === null || _c === void 0 ? void 0 : _c.content) {
|
161
|
+
slideTexts.push(textElement.textRun.content.trim());
|
162
|
+
}
|
163
|
+
}
|
164
|
+
}
|
165
|
+
if (slideTexts.length > 0) {
|
166
|
+
slideContents.push(slideTexts.join(" "));
|
167
|
+
}
|
168
|
+
}
|
169
|
+
return slideContents.join("\n\n");
|
170
|
+
}
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@credal/actions",
|
3
|
-
"version": "0.2.
|
3
|
+
"version": "0.2.50",
|
4
4
|
"type": "module",
|
5
5
|
"description": "AI Actions by Credal AI",
|
6
6
|
"sideEffects": false,
|
@@ -55,6 +55,7 @@
|
|
55
55
|
"date-fns": "^4.1.0",
|
56
56
|
"docx": "^9.3.0",
|
57
57
|
"dotenv": "^16.4.7",
|
58
|
+
"googleapis": "^148.0.0",
|
58
59
|
"json-schema-to-zod": "^2.5.0",
|
59
60
|
"jsonwebtoken": "^9.0.2",
|
60
61
|
"mammoth": "^1.4.27",
|