@credal/actions 0.2.126 → 0.2.128
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/actions/autogen/templates.js +903 -581
- package/dist/actions/autogen/types.d.ts +860 -944
- package/dist/actions/autogen/types.js +362 -212
- package/dist/actions/providers/firecrawl/scrapeUrl.js +11 -4
- package/dist/actions/providers/github/getFileContent.js +13 -5
- package/dist/actions/providers/github/listDirectory.js +12 -11
- package/dist/actions/providers/github/listPullRequests.js +58 -30
- package/dist/actions/providers/github/searchOrganization.d.ts +1 -1
- package/dist/actions/providers/github/searchOrganization.js +26 -4
- package/dist/actions/providers/gitlab/getFileContent.js +15 -6
- package/dist/actions/providers/gitlab/listDirectory.js +10 -6
- package/dist/actions/providers/gitlab/searchGroup.js +84 -55
- package/dist/actions/providers/google-oauth/getDriveFileContentById.js +10 -1
- package/dist/actions/providers/google-oauth/searchDriveByKeywordsAndGetFileContent.js +12 -4
- package/dist/actions/providers/google-oauth/searchDriveByQueryAndGetFileContent.js +2 -1
- package/dist/actions/providers/jira/createJiraTicket.js +11 -3
- package/dist/actions/providers/jira/getJiraIssuesByQuery.js +9 -7
- package/dist/actions/providers/jira/getJiraTicketDetails.js +8 -1
- package/dist/actions/providers/jira/updateJiraTicketDetails.js +11 -2
- package/dist/actions/providers/jira/updateServiceDeskRequest.d.ts +3 -0
- package/dist/actions/providers/jira/updateServiceDeskRequest.js +72 -0
- package/dist/actions/providers/jira/utils.d.ts +1 -0
- package/dist/actions/providers/jira/utils.js +40 -0
- package/dist/actions/providers/salesforce/getSalesforceRecordsByQuery.js +10 -3
- package/dist/actions/providers/salesforce/searchSalesforceRecords.js +9 -7
- package/dist/actions/providers/slackUser/searchSlack.js +8 -1
- package/package.json +1 -1
- package/dist/actions/groups.d.ts +0 -6
- package/dist/actions/groups.js +0 -248
- package/dist/actions/providers/credal/callCopilot.d.ts +0 -3
- package/dist/actions/providers/credal/callCopilot.js +0 -36
- package/dist/actions/providers/math/index.d.ts +0 -1
- package/dist/actions/providers/math/index.js +0 -37
- package/dist/actions/providers/salesforce/getSalesforceRecordByQuery.d.ts +0 -3
- package/dist/actions/providers/salesforce/getSalesforceRecordByQuery.js +0 -43
- package/dist/actions/providers/slack/archiveChannel.d.ts +0 -3
- package/dist/actions/providers/slack/archiveChannel.js +0 -35
- package/dist/actions/providers/slack/index.d.ts +0 -1
- package/dist/actions/providers/slack/index.js +0 -37
- package/dist/actions/providers/slack/listConversations.d.ts +0 -3
- package/dist/actions/providers/slack/listConversations.js +0 -41
|
@@ -8,8 +8,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
8
8
|
});
|
|
9
9
|
};
|
|
10
10
|
import FirecrawlApp from "@mendable/firecrawl-js";
|
|
11
|
-
import { firecrawlScrapeUrlOutputSchema } from "../../autogen/types.js";
|
|
12
11
|
const scrapeUrl = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
12
|
+
var _b, _c;
|
|
13
13
|
const firecrawl = new FirecrawlApp({
|
|
14
14
|
apiKey: authParams.apiKey,
|
|
15
15
|
});
|
|
@@ -62,8 +62,15 @@ const scrapeUrl = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, a
|
|
|
62
62
|
// Default to markdown if no formats specified
|
|
63
63
|
content = result.markdown || "";
|
|
64
64
|
}
|
|
65
|
-
return
|
|
66
|
-
|
|
67
|
-
|
|
65
|
+
return {
|
|
66
|
+
success: true,
|
|
67
|
+
results: [
|
|
68
|
+
{
|
|
69
|
+
name: (_c = (_b = result.metadata) === null || _b === void 0 ? void 0 : _b.title) !== null && _c !== void 0 ? _c : "Untitled",
|
|
70
|
+
url: params.url,
|
|
71
|
+
contents: content,
|
|
72
|
+
},
|
|
73
|
+
],
|
|
74
|
+
};
|
|
68
75
|
});
|
|
69
76
|
export default scrapeUrl;
|
|
@@ -13,7 +13,7 @@ import { getOctokit } from "./utils.js";
|
|
|
13
13
|
* Get file content
|
|
14
14
|
*/
|
|
15
15
|
const getFileContent = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
16
|
-
var _b;
|
|
16
|
+
var _b, _c;
|
|
17
17
|
if (!authParams.authToken) {
|
|
18
18
|
return {
|
|
19
19
|
success: false,
|
|
@@ -48,10 +48,18 @@ const getFileContent = (_a) => __awaiter(void 0, [_a], void 0, function* ({ para
|
|
|
48
48
|
const content = Buffer.from(data.content, data.encoding).toString("utf-8");
|
|
49
49
|
return {
|
|
50
50
|
success: true,
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
51
|
+
results: [
|
|
52
|
+
{
|
|
53
|
+
name: data.name,
|
|
54
|
+
url: (_b = data.html_url) !== null && _b !== void 0 ? _b : data.url,
|
|
55
|
+
contents: {
|
|
56
|
+
content,
|
|
57
|
+
size: data.size,
|
|
58
|
+
name: data.name,
|
|
59
|
+
htmlUrl: (_c = data.html_url) !== null && _c !== void 0 ? _c : data.url,
|
|
60
|
+
},
|
|
61
|
+
},
|
|
62
|
+
],
|
|
55
63
|
};
|
|
56
64
|
});
|
|
57
65
|
export default getFileContent;
|
|
@@ -36,19 +36,20 @@ const listDirectory = (_a) => __awaiter(void 0, [_a], void 0, function* ({ param
|
|
|
36
36
|
error: "Content is not a directory",
|
|
37
37
|
};
|
|
38
38
|
}
|
|
39
|
-
const content = data.map(item => {
|
|
40
|
-
var _a;
|
|
41
|
-
return {
|
|
42
|
-
name: item.name,
|
|
43
|
-
path: item.path,
|
|
44
|
-
type: item.type,
|
|
45
|
-
size: item.size,
|
|
46
|
-
htmlUrl: (_a = item.html_url) !== null && _a !== void 0 ? _a : item.url,
|
|
47
|
-
};
|
|
48
|
-
});
|
|
49
39
|
return {
|
|
50
40
|
success: true,
|
|
51
|
-
|
|
41
|
+
results: data.map(item => {
|
|
42
|
+
var _a;
|
|
43
|
+
return ({
|
|
44
|
+
name: item.name,
|
|
45
|
+
url: (_a = item.html_url) !== null && _a !== void 0 ? _a : item.url,
|
|
46
|
+
contents: {
|
|
47
|
+
path: item.path,
|
|
48
|
+
type: item.type,
|
|
49
|
+
size: item.size,
|
|
50
|
+
},
|
|
51
|
+
});
|
|
52
|
+
}),
|
|
52
53
|
};
|
|
53
54
|
});
|
|
54
55
|
export default listDirectory;
|
|
@@ -13,39 +13,67 @@ import { MISSING_AUTH_TOKEN } from "../../util/missingAuthConstants.js";
|
|
|
13
13
|
const listPullRequests = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
14
14
|
const { authToken } = authParams;
|
|
15
15
|
if (!authToken) {
|
|
16
|
-
|
|
16
|
+
return githubListPullRequestsOutputSchema.parse({
|
|
17
|
+
success: false,
|
|
18
|
+
error: MISSING_AUTH_TOKEN,
|
|
19
|
+
});
|
|
17
20
|
}
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
21
|
+
try {
|
|
22
|
+
const { repositoryName, repositoryOwner, state } = params;
|
|
23
|
+
const url = `https://api.github.com/repos/${repositoryOwner}/${repositoryName}/pulls`;
|
|
24
|
+
const allPulls = [];
|
|
25
|
+
let page = 1;
|
|
26
|
+
const perPage = 100;
|
|
27
|
+
while (true) {
|
|
28
|
+
const response = yield axios.get(url, {
|
|
29
|
+
headers: {
|
|
30
|
+
Authorization: `Bearer ${authToken}`,
|
|
31
|
+
Accept: "application/vnd.github+json",
|
|
32
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
|
33
|
+
},
|
|
34
|
+
params: {
|
|
35
|
+
state: state !== null && state !== void 0 ? state : "all",
|
|
36
|
+
sort: "created",
|
|
37
|
+
direction: "desc",
|
|
38
|
+
per_page: perPage,
|
|
39
|
+
page,
|
|
40
|
+
},
|
|
41
|
+
});
|
|
42
|
+
const pulls = response.data;
|
|
43
|
+
if (pulls.length === 0)
|
|
44
|
+
break;
|
|
45
|
+
allPulls.push(...pulls);
|
|
46
|
+
// Stop if we got fewer than requested (last page)
|
|
47
|
+
if (pulls.length < perPage)
|
|
48
|
+
break;
|
|
49
|
+
page++;
|
|
50
|
+
}
|
|
51
|
+
const results = allPulls.map(pull => ({
|
|
52
|
+
name: pull.title,
|
|
53
|
+
url: pull.html_url,
|
|
54
|
+
contents: {
|
|
55
|
+
number: pull.number,
|
|
56
|
+
title: pull.title,
|
|
57
|
+
state: pull.state,
|
|
58
|
+
url: pull.html_url,
|
|
59
|
+
createdAt: pull.created_at,
|
|
60
|
+
updatedAt: pull.updated_at,
|
|
61
|
+
user: {
|
|
62
|
+
login: pull.user.login,
|
|
63
|
+
},
|
|
64
|
+
description: pull.body || "",
|
|
36
65
|
},
|
|
66
|
+
}));
|
|
67
|
+
return githubListPullRequestsOutputSchema.parse({
|
|
68
|
+
success: true,
|
|
69
|
+
results,
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
catch (error) {
|
|
73
|
+
return githubListPullRequestsOutputSchema.parse({
|
|
74
|
+
success: false,
|
|
75
|
+
error: error instanceof Error ? error.message : "An unknown error occurred",
|
|
37
76
|
});
|
|
38
|
-
const pulls = response.data;
|
|
39
|
-
if (pulls.length === 0)
|
|
40
|
-
break;
|
|
41
|
-
allPulls.push(...pulls);
|
|
42
|
-
// Stop if the rest are older than one year
|
|
43
|
-
if (pulls.length < perPage)
|
|
44
|
-
break;
|
|
45
|
-
page++;
|
|
46
77
|
}
|
|
47
|
-
return githubListPullRequestsOutputSchema.parse({
|
|
48
|
-
pullRequests: allPulls,
|
|
49
|
-
});
|
|
50
78
|
});
|
|
51
79
|
export default listPullRequests;
|
|
@@ -7,6 +7,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
7
7
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
8
|
});
|
|
9
9
|
};
|
|
10
|
+
import { githubSearchOrganizationOutputSchema, } from "../../autogen/types.js";
|
|
10
11
|
import { MISSING_AUTH_TOKEN } from "../../util/missingAuthConstants.js";
|
|
11
12
|
import { getOctokit } from "./utils.js";
|
|
12
13
|
// Limits on the number of results to return
|
|
@@ -19,7 +20,10 @@ const MAX_PATCH_LINES = 20;
|
|
|
19
20
|
const MAX_FRAGMENT_LINES = 20;
|
|
20
21
|
const searchOrganization = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
21
22
|
if (!authParams.authToken) {
|
|
22
|
-
|
|
23
|
+
return githubSearchOrganizationOutputSchema.parse({
|
|
24
|
+
success: false,
|
|
25
|
+
error: MISSING_AUTH_TOKEN,
|
|
26
|
+
});
|
|
23
27
|
}
|
|
24
28
|
const octokit = yield getOctokit(authParams.authToken);
|
|
25
29
|
const { organization, query, repository } = params;
|
|
@@ -141,9 +145,27 @@ const searchOrganization = (_a) => __awaiter(void 0, [_a], void 0, function* ({
|
|
|
141
145
|
};
|
|
142
146
|
});
|
|
143
147
|
return {
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
148
|
+
success: true,
|
|
149
|
+
results: [
|
|
150
|
+
...codeResults.map(result => ({
|
|
151
|
+
type: "code",
|
|
152
|
+
name: result.name,
|
|
153
|
+
url: result.url,
|
|
154
|
+
content: result,
|
|
155
|
+
})),
|
|
156
|
+
...enrichedCommits.map(result => ({
|
|
157
|
+
type: "commit",
|
|
158
|
+
name: result.sha,
|
|
159
|
+
url: result.url,
|
|
160
|
+
content: result,
|
|
161
|
+
})),
|
|
162
|
+
...issuesAndPRs.map(result => ({
|
|
163
|
+
type: "issueOrPullRequest",
|
|
164
|
+
name: result.title,
|
|
165
|
+
url: result.html_url,
|
|
166
|
+
content: result,
|
|
167
|
+
})),
|
|
168
|
+
],
|
|
147
169
|
};
|
|
148
170
|
});
|
|
149
171
|
export default searchOrganization;
|
|
@@ -36,18 +36,27 @@ const getFileContent = (_a) => __awaiter(void 0, [_a], void 0, function* ({ para
|
|
|
36
36
|
const { project_id, path, ref = "HEAD" } = params;
|
|
37
37
|
// The file path must be URL-encoded per GitLab API docs
|
|
38
38
|
const filePath = encodeURIComponent(path);
|
|
39
|
-
const
|
|
40
|
-
const data = yield gitlabFetch(
|
|
39
|
+
const fetchUrl = `${gitlabBaseUrl}/api/v4/projects/${project_id}/repository/files/${filePath}?ref=${encodeURIComponent(ref)}`;
|
|
40
|
+
const data = yield gitlabFetch(fetchUrl, authToken);
|
|
41
41
|
if (data.encoding !== "base64" || typeof data.content !== "string") {
|
|
42
42
|
return { success: false, error: `Unexpected response: ${JSON.stringify(data)}` };
|
|
43
43
|
}
|
|
44
44
|
const content = Buffer.from(data.content, "base64").toString("utf-8");
|
|
45
|
+
const url = data.web_url || `${gitlabBaseUrl}/${project_id}/-/blob/${ref}/${path}`;
|
|
45
46
|
return {
|
|
46
47
|
success: true,
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
48
|
+
results: [
|
|
49
|
+
{
|
|
50
|
+
name: data.file_name,
|
|
51
|
+
url,
|
|
52
|
+
contents: {
|
|
53
|
+
content,
|
|
54
|
+
size: Buffer.byteLength(content),
|
|
55
|
+
name: data.file_name,
|
|
56
|
+
htmlUrl: url,
|
|
57
|
+
},
|
|
58
|
+
},
|
|
59
|
+
],
|
|
51
60
|
};
|
|
52
61
|
});
|
|
53
62
|
export default getFileContent;
|
|
@@ -38,18 +38,22 @@ const listDirectory = (_a) => __awaiter(void 0, [_a], void 0, function* ({ param
|
|
|
38
38
|
`?path=${encodeURIComponent(path)}` +
|
|
39
39
|
`&ref=${encodeURIComponent(ref)}`;
|
|
40
40
|
const treeItems = yield gitlabFetch(url, authToken);
|
|
41
|
-
const
|
|
41
|
+
const results = treeItems.map(item => {
|
|
42
42
|
var _a;
|
|
43
43
|
const isFile = item.type === "blob";
|
|
44
44
|
const htmlUrl = `${gitlabBaseUrl}/${fullPath}/-/blob/${ref}/${item.path}`;
|
|
45
45
|
return {
|
|
46
46
|
name: item.name,
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
47
|
+
url: htmlUrl,
|
|
48
|
+
contents: {
|
|
49
|
+
name: item.name,
|
|
50
|
+
path: item.path,
|
|
51
|
+
type: item.type, // "blob" or "tree"
|
|
52
|
+
size: isFile ? ((_a = item.size) !== null && _a !== void 0 ? _a : 0) : 0, // Size may not be returned; fallback to 0
|
|
53
|
+
htmlUrl,
|
|
54
|
+
},
|
|
51
55
|
};
|
|
52
56
|
});
|
|
53
|
-
return {
|
|
57
|
+
return { success: true, results };
|
|
54
58
|
});
|
|
55
59
|
export default listDirectory;
|
|
@@ -84,62 +84,91 @@ function getCommitDetails(input) {
|
|
|
84
84
|
});
|
|
85
85
|
}
|
|
86
86
|
const searchGroup = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
const
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
const
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
const
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
87
|
+
try {
|
|
88
|
+
const { authToken, baseUrl } = authParams;
|
|
89
|
+
const gitlabBaseUrl = baseUrl !== null && baseUrl !== void 0 ? baseUrl : GITLAB_API_URL;
|
|
90
|
+
const gitlabBaseApiUrl = `${gitlabBaseUrl}/api/v4`;
|
|
91
|
+
if (!authToken)
|
|
92
|
+
throw new Error(MISSING_AUTH_TOKEN);
|
|
93
|
+
const { query, groupId, project } = params;
|
|
94
|
+
const projectPathCache = createProjectPathCache();
|
|
95
|
+
const fullProjectPath = project ? `${groupId}/${project}` : undefined;
|
|
96
|
+
const encodedGroup = encodeURIComponent(groupId);
|
|
97
|
+
const fetchSearchResults = (scope) => __awaiter(void 0, void 0, void 0, function* () {
|
|
98
|
+
const endpoint = fullProjectPath
|
|
99
|
+
? `${gitlabBaseApiUrl}/projects/${encodeURIComponent(fullProjectPath)}/search?scope=${scope}&search=${encodeURIComponent(query)}`
|
|
100
|
+
: `${gitlabBaseApiUrl}/groups/${encodedGroup}/search?scope=${scope}&search=${encodeURIComponent(query)}`;
|
|
101
|
+
return gitlabFetch(endpoint, authToken);
|
|
102
|
+
});
|
|
103
|
+
const [mrResults, blobResults, commitResults] = yield Promise.all([
|
|
104
|
+
fetchSearchResults("merge_requests"),
|
|
105
|
+
fetchSearchResults("blobs"),
|
|
106
|
+
fetchSearchResults("commits"),
|
|
107
|
+
]);
|
|
108
|
+
const limitedMRResults = mrResults.slice(0, MAX_ISSUES_OR_PRS);
|
|
109
|
+
const mergeRequests = yield Promise.all(limitedMRResults.map((metadata) => __awaiter(void 0, void 0, void 0, function* () {
|
|
110
|
+
const endpoint = `${gitlabBaseApiUrl}/projects/${metadata.project_id}/merge_requests/${metadata.iid}/diffs`;
|
|
111
|
+
let diffs = yield gitlabFetch(endpoint, authToken);
|
|
112
|
+
diffs = (diffs || []).slice(0, MAX_FILES_PER_PR).map(diff => (Object.assign(Object.assign({}, diff), { diff: diff.diff ? diff.diff.split("\n").slice(0, MAX_PATCH_LINES).join("\n") : diff.diff })));
|
|
113
|
+
return { metadata, diffs };
|
|
114
|
+
})));
|
|
115
|
+
const limitedBlobResults = blobResults.slice(0, MAX_CODE_RESULTS);
|
|
116
|
+
const blobsWithUrls = yield Promise.all(limitedBlobResults.map(blob => enhanceBlobWithUrl(blob, authToken, gitlabBaseApiUrl, gitlabBaseUrl, projectPathCache)));
|
|
117
|
+
const blobs = blobsWithUrls.map(blob => {
|
|
118
|
+
const matches = mergeRequests
|
|
119
|
+
.filter(mr => mr.metadata.project_id === blob.project_id && mr.diffs.some(diff => diff.new_path === blob.path))
|
|
120
|
+
.map(mr => ({
|
|
121
|
+
title: mr.metadata.title,
|
|
122
|
+
web_url: mr.metadata.web_url,
|
|
123
|
+
author: mr.metadata.author ? { name: mr.metadata.author.name } : undefined,
|
|
124
|
+
merged_at: mr.metadata.merged_at,
|
|
125
|
+
}));
|
|
126
|
+
return {
|
|
127
|
+
metadata: Object.assign(Object.assign({}, blob), { data: blob.data.split("\n").slice(0, MAX_FRAGMENT_LINES).join("\n") }),
|
|
128
|
+
matchedMergeRequests: matches,
|
|
129
|
+
};
|
|
130
|
+
});
|
|
131
|
+
const limitedCommitResults = commitResults.slice(0, MAX_COMMITS);
|
|
132
|
+
const commits = yield Promise.all(limitedCommitResults.map(commit => getCommitDetails({
|
|
133
|
+
projectId: commit.project_id,
|
|
134
|
+
sha: commit.id,
|
|
135
|
+
authToken,
|
|
136
|
+
baseUrl: gitlabBaseApiUrl,
|
|
137
|
+
webBaseUrl: gitlabBaseUrl,
|
|
138
|
+
projectPathCache,
|
|
139
|
+
})));
|
|
140
|
+
// Transform results into the new standardized format
|
|
141
|
+
const results = [
|
|
142
|
+
...mergeRequests.map(mr => ({
|
|
143
|
+
name: mr.metadata.title,
|
|
144
|
+
url: mr.metadata.web_url,
|
|
145
|
+
type: "mergeRequest",
|
|
146
|
+
contents: mr,
|
|
147
|
+
})),
|
|
148
|
+
...blobs.map(blob => ({
|
|
149
|
+
name: blob.metadata.filename,
|
|
150
|
+
url: blob.metadata.web_url,
|
|
151
|
+
type: "blob",
|
|
152
|
+
contents: blob,
|
|
153
|
+
})),
|
|
154
|
+
...commits.map(commit => ({
|
|
155
|
+
name: commit.message.split("\n")[0], // Use first line of commit message as name
|
|
156
|
+
url: commit.web_url,
|
|
157
|
+
type: "commit",
|
|
158
|
+
contents: commit,
|
|
159
|
+
})),
|
|
160
|
+
];
|
|
125
161
|
return {
|
|
126
|
-
|
|
127
|
-
|
|
162
|
+
success: true,
|
|
163
|
+
results,
|
|
128
164
|
};
|
|
129
|
-
}
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
projectPathCache,
|
|
138
|
-
})));
|
|
139
|
-
return {
|
|
140
|
-
mergeRequests,
|
|
141
|
-
blobs,
|
|
142
|
-
commits,
|
|
143
|
-
};
|
|
165
|
+
}
|
|
166
|
+
catch (error) {
|
|
167
|
+
return {
|
|
168
|
+
success: false,
|
|
169
|
+
error: error instanceof Error ? error.message : "An unknown error occurred",
|
|
170
|
+
results: [],
|
|
171
|
+
};
|
|
172
|
+
}
|
|
144
173
|
});
|
|
145
174
|
export default searchGroup;
|
|
@@ -115,7 +115,16 @@ const getDriveFileContentById = (_a) => __awaiter(void 0, [_a], void 0, function
|
|
|
115
115
|
// TODO in the future do this around the most valuable snippet of the doc?
|
|
116
116
|
content = content.slice(0, charLimit);
|
|
117
117
|
}
|
|
118
|
-
return {
|
|
118
|
+
return {
|
|
119
|
+
success: true,
|
|
120
|
+
results: [
|
|
121
|
+
{
|
|
122
|
+
name: fileName,
|
|
123
|
+
url: `${BASE_URL}${encodeURIComponent(params.fileId)}`,
|
|
124
|
+
contents: { content, fileName, fileLength: originalLength },
|
|
125
|
+
},
|
|
126
|
+
],
|
|
127
|
+
};
|
|
119
128
|
}
|
|
120
129
|
catch (error) {
|
|
121
130
|
console.error("Error getting Google Drive file content", error);
|
|
@@ -13,7 +13,7 @@ import getDriveFileContentById from "./getDriveFileContentById.js";
|
|
|
13
13
|
const searchDriveByKeywordsAndGetFileContent = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
14
14
|
var _b;
|
|
15
15
|
if (!authParams.authToken) {
|
|
16
|
-
return { success: false, error: MISSING_AUTH_TOKEN
|
|
16
|
+
return { success: false, error: MISSING_AUTH_TOKEN };
|
|
17
17
|
}
|
|
18
18
|
const { searchQuery, limit, searchDriveByDrive, orderByQuery, fileSizeLimit: maxChars } = params;
|
|
19
19
|
const query = searchQuery
|
|
@@ -27,11 +27,12 @@ const searchDriveByKeywordsAndGetFileContent = (_a) => __awaiter(void 0, [_a], v
|
|
|
27
27
|
});
|
|
28
28
|
// If search failed, return error
|
|
29
29
|
if (!searchResult.success) {
|
|
30
|
-
return { success: false, error: searchResult.error
|
|
30
|
+
return { success: false, error: searchResult.error };
|
|
31
31
|
}
|
|
32
32
|
// For each file, fetch its content in parallel
|
|
33
33
|
const files = (_b = searchResult.files) !== null && _b !== void 0 ? _b : [];
|
|
34
34
|
const contentPromises = files.map((file) => __awaiter(void 0, void 0, void 0, function* () {
|
|
35
|
+
var _a, _b, _c;
|
|
35
36
|
try {
|
|
36
37
|
const contentResult = yield getDriveFileContentById({
|
|
37
38
|
params: { fileId: file.id, limit: maxChars },
|
|
@@ -42,7 +43,7 @@ const searchDriveByKeywordsAndGetFileContent = (_a) => __awaiter(void 0, [_a], v
|
|
|
42
43
|
name: file.name,
|
|
43
44
|
mimeType: file.mimeType,
|
|
44
45
|
url: file.url,
|
|
45
|
-
content: contentResult.success ? contentResult.content : undefined,
|
|
46
|
+
content: contentResult.success ? (_c = (_b = (_a = contentResult.results) === null || _a === void 0 ? void 0 : _a[0]) === null || _b === void 0 ? void 0 : _b.contents) === null || _c === void 0 ? void 0 : _c.content : undefined,
|
|
46
47
|
};
|
|
47
48
|
}
|
|
48
49
|
catch (error) {
|
|
@@ -57,6 +58,13 @@ const searchDriveByKeywordsAndGetFileContent = (_a) => __awaiter(void 0, [_a], v
|
|
|
57
58
|
}));
|
|
58
59
|
const filesWithContent = yield Promise.all(contentPromises);
|
|
59
60
|
// Return combined results
|
|
60
|
-
return {
|
|
61
|
+
return {
|
|
62
|
+
success: true,
|
|
63
|
+
results: filesWithContent.map(file => ({
|
|
64
|
+
name: file.name,
|
|
65
|
+
url: file.url,
|
|
66
|
+
contents: file,
|
|
67
|
+
})),
|
|
68
|
+
};
|
|
61
69
|
});
|
|
62
70
|
export default searchDriveByKeywordsAndGetFileContent;
|
|
@@ -28,6 +28,7 @@ const searchDriveByQueryAndGetFileContent = (_a) => __awaiter(void 0, [_a], void
|
|
|
28
28
|
// For each file, fetch its content in parallel
|
|
29
29
|
const files = (_b = searchResult.files) !== null && _b !== void 0 ? _b : [];
|
|
30
30
|
const contentPromises = files.map((file) => __awaiter(void 0, void 0, void 0, function* () {
|
|
31
|
+
var _a, _b, _c;
|
|
31
32
|
try {
|
|
32
33
|
const contentResult = yield getDriveFileContentById({
|
|
33
34
|
params: { fileId: file.id, limit: maxChars },
|
|
@@ -38,7 +39,7 @@ const searchDriveByQueryAndGetFileContent = (_a) => __awaiter(void 0, [_a], void
|
|
|
38
39
|
name: file.name,
|
|
39
40
|
mimeType: file.mimeType,
|
|
40
41
|
url: file.url,
|
|
41
|
-
content: contentResult.success ? contentResult.content : undefined,
|
|
42
|
+
content: contentResult.success ? (_c = (_b = (_a = contentResult.results) === null || _a === void 0 ? void 0 : _a[0]) === null || _b === void 0 ? void 0 : _b.contents) === null || _c === void 0 ? void 0 : _c.content : undefined,
|
|
42
43
|
};
|
|
43
44
|
}
|
|
44
45
|
catch (error) {
|
|
@@ -8,7 +8,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
8
8
|
});
|
|
9
9
|
};
|
|
10
10
|
import { axiosClient } from "../../util/axiosClient.js";
|
|
11
|
-
import { getUserAccountIdFromEmail } from "./utils.js";
|
|
11
|
+
import { getUserAccountIdFromEmail, getRequestTypeCustomFieldId } from "./utils.js";
|
|
12
12
|
const createJiraTicket = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
13
13
|
const { authToken, cloudId, baseUrl } = authParams;
|
|
14
14
|
const apiUrl = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/`;
|
|
@@ -25,6 +25,14 @@ const createJiraTicket = (_a) => __awaiter(void 0, [_a], void 0, function* ({ pa
|
|
|
25
25
|
if (params.assignee && typeof params.assignee === "string" && params.assignee.includes("@") && authToken) {
|
|
26
26
|
assigneeId = yield getUserAccountIdFromEmail(params.assignee, apiUrl, authToken);
|
|
27
27
|
}
|
|
28
|
+
// If request type is provided, find the custom field ID and prepare the value
|
|
29
|
+
const requestTypeField = {};
|
|
30
|
+
if (params.requestTypeId && authToken) {
|
|
31
|
+
const requestTypeFieldId = yield getRequestTypeCustomFieldId(params.projectKey, apiUrl, authToken);
|
|
32
|
+
if (requestTypeFieldId) {
|
|
33
|
+
requestTypeField[requestTypeFieldId] = params.requestTypeId;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
28
36
|
const description = {
|
|
29
37
|
type: "doc",
|
|
30
38
|
version: 1,
|
|
@@ -41,11 +49,11 @@ const createJiraTicket = (_a) => __awaiter(void 0, [_a], void 0, function* ({ pa
|
|
|
41
49
|
],
|
|
42
50
|
};
|
|
43
51
|
const payload = {
|
|
44
|
-
fields: Object.assign(Object.assign(Object.assign({ project: {
|
|
52
|
+
fields: Object.assign(Object.assign(Object.assign(Object.assign({ project: {
|
|
45
53
|
key: params.projectKey,
|
|
46
54
|
}, summary: params.summary, description: description, issuetype: {
|
|
47
55
|
name: params.issueType,
|
|
48
|
-
} }, (reporterId ? { reporter: { id: reporterId } } : {})), (assigneeId ? { assignee: { id: assigneeId } } : {})), (params.customFields ? params.customFields : {})),
|
|
56
|
+
} }, (reporterId ? { reporter: { id: reporterId } } : {})), (assigneeId ? { assignee: { id: assigneeId } } : {})), requestTypeField), (params.customFields ? params.customFields : {})),
|
|
49
57
|
};
|
|
50
58
|
const response = yield axiosClient.post(`${apiUrl}/issue`, payload, {
|
|
51
59
|
headers: {
|
|
@@ -48,10 +48,12 @@ const getJiraIssuesByQuery = (_a) => __awaiter(void 0, [_a], void 0, function* (
|
|
|
48
48
|
});
|
|
49
49
|
return {
|
|
50
50
|
success: true,
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
51
|
+
results: response.data.issues.map(issue => {
|
|
52
|
+
var _a, _b, _c, _d;
|
|
53
|
+
return ({
|
|
54
|
+
name: issue.key,
|
|
55
|
+
url: `${baseUrl}/browse/${issue.key}`,
|
|
56
|
+
contents: {
|
|
55
57
|
id: issue.id,
|
|
56
58
|
key: issue.key,
|
|
57
59
|
summary: issue.fields.summary,
|
|
@@ -78,9 +80,9 @@ const getJiraIssuesByQuery = (_a) => __awaiter(void 0, [_a], void 0, function* (
|
|
|
78
80
|
resolution: ((_d = issue.fields.resolution) === null || _d === void 0 ? void 0 : _d.name) || null,
|
|
79
81
|
dueDate: issue.fields.duedate || null,
|
|
80
82
|
url: `${baseUrl}/browse/${issue.key}`,
|
|
81
|
-
}
|
|
82
|
-
})
|
|
83
|
-
},
|
|
83
|
+
},
|
|
84
|
+
});
|
|
85
|
+
}),
|
|
84
86
|
};
|
|
85
87
|
}
|
|
86
88
|
catch (error) {
|
|
@@ -8,6 +8,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
8
8
|
});
|
|
9
9
|
};
|
|
10
10
|
import { axiosClient } from "../../util/axiosClient.js";
|
|
11
|
+
// https://developer.atlassian.com/cloud/jira/platform/rest/v2/api-group-issues/#api-rest-api-2-issue-issueidorkey-get
|
|
11
12
|
const getJiraTicketDetails = (_a) => __awaiter(void 0, [_a], void 0, function* ({ params, authParams, }) {
|
|
12
13
|
const { authToken, cloudId } = authParams;
|
|
13
14
|
const { issueId } = params;
|
|
@@ -24,7 +25,13 @@ const getJiraTicketDetails = (_a) => __awaiter(void 0, [_a], void 0, function* (
|
|
|
24
25
|
});
|
|
25
26
|
return {
|
|
26
27
|
success: true,
|
|
27
|
-
|
|
28
|
+
results: [
|
|
29
|
+
{
|
|
30
|
+
name: response.data.key,
|
|
31
|
+
url: response.data.self,
|
|
32
|
+
contents: response.data,
|
|
33
|
+
},
|
|
34
|
+
],
|
|
28
35
|
};
|
|
29
36
|
}
|
|
30
37
|
catch (error) {
|