microfox 1.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +72 -0
- package/README.md +114 -0
- package/dist/agent-template.txt +486 -0
- package/dist/background-agent/.gitignore.txt +22 -0
- package/dist/background-agent/eslint.config.js.txt +48 -0
- package/dist/background-agent/microfox.json.txt +4 -0
- package/dist/background-agent/openapi.md.txt +1 -0
- package/dist/background-agent/package.json.txt +47 -0
- package/dist/background-agent/serverless.yml.txt +123 -0
- package/dist/background-agent/src/functions/cron-paginate.ts.txt +135 -0
- package/dist/background-agent/src/functions/cron-populate.ts.txt +33 -0
- package/dist/background-agent/src/functions/route-trigger-index.ts.txt +157 -0
- package/dist/background-agent/src/functions/route-trigger-populate.ts.txt +0 -0
- package/dist/background-agent/src/functions/sqs-index.ts.txt +147 -0
- package/dist/background-agent/src/helpers/ragRedis.ts.txt +78 -0
- package/dist/background-agent/src/index.ts.txt +69 -0
- package/dist/background-agent/tsconfig.json.txt +33 -0
- package/dist/chunk-4HNHBA2H.mjs +104 -0
- package/dist/chunk-4HNHBA2H.mjs.map +1 -0
- package/dist/chunk-ARAHSYJI.mjs +263 -0
- package/dist/chunk-ARAHSYJI.mjs.map +1 -0
- package/dist/chunk-JGAX4PD6.mjs +290 -0
- package/dist/chunk-JGAX4PD6.mjs.map +1 -0
- package/dist/chunk-KPJJOO76.mjs +12 -0
- package/dist/chunk-KPJJOO76.mjs.map +1 -0
- package/dist/chunk-TZQZMKHP.mjs +11 -0
- package/dist/chunk-TZQZMKHP.mjs.map +1 -0
- package/dist/chunk-UFRGJMF4.mjs +154 -0
- package/dist/chunk-UFRGJMF4.mjs.map +1 -0
- package/dist/chunk-UHWJTQKW.mjs +139 -0
- package/dist/chunk-UHWJTQKW.mjs.map +1 -0
- package/dist/chunk-UYROVW53.mjs +89 -0
- package/dist/chunk-UYROVW53.mjs.map +1 -0
- package/dist/chunk-XGFSFWK3.mjs +113 -0
- package/dist/chunk-XGFSFWK3.mjs.map +1 -0
- package/dist/cli.d.mts +2 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +1154 -0
- package/dist/cli.js.map +1 -0
- package/dist/cli.mjs +52 -0
- package/dist/cli.mjs.map +1 -0
- package/dist/commands/add.d.mts +5 -0
- package/dist/commands/add.d.ts +5 -0
- package/dist/commands/add.js +126 -0
- package/dist/commands/add.js.map +1 -0
- package/dist/commands/add.mjs +10 -0
- package/dist/commands/add.mjs.map +1 -0
- package/dist/commands/code.d.mts +5 -0
- package/dist/commands/code.d.ts +5 -0
- package/dist/commands/code.js +187 -0
- package/dist/commands/code.js.map +1 -0
- package/dist/commands/code.mjs +9 -0
- package/dist/commands/code.mjs.map +1 -0
- package/dist/commands/install.d.mts +5 -0
- package/dist/commands/install.d.ts +5 -0
- package/dist/commands/install.js +296 -0
- package/dist/commands/install.js.map +1 -0
- package/dist/commands/install.mjs +9 -0
- package/dist/commands/install.mjs.map +1 -0
- package/dist/commands/kickstart.d.mts +5 -0
- package/dist/commands/kickstart.d.ts +5 -0
- package/dist/commands/kickstart.js +322 -0
- package/dist/commands/kickstart.js.map +1 -0
- package/dist/commands/kickstart.mjs +10 -0
- package/dist/commands/kickstart.mjs.map +1 -0
- package/dist/commands/push.d.mts +5 -0
- package/dist/commands/push.d.ts +5 -0
- package/dist/commands/push.js +137 -0
- package/dist/commands/push.js.map +1 -0
- package/dist/commands/push.mjs +9 -0
- package/dist/commands/push.mjs.map +1 -0
- package/dist/commands/status.d.mts +7 -0
- package/dist/commands/status.d.ts +7 -0
- package/dist/commands/status.js +148 -0
- package/dist/commands/status.js.map +1 -0
- package/dist/commands/status.mjs +13 -0
- package/dist/commands/status.mjs.map +1 -0
- package/dist/commands/update.d.mts +5 -0
- package/dist/commands/update.d.ts +5 -0
- package/dist/commands/update.js +172 -0
- package/dist/commands/update.js.map +1 -0
- package/dist/commands/update.mjs +9 -0
- package/dist/commands/update.mjs.map +1 -0
- package/dist/index.d.mts +20 -0
- package/dist/index.d.ts +20 -0
- package/dist/index.js +406 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +14 -0
- package/dist/index.mjs.map +1 -0
- package/dist/package-template.txt +494 -0
- package/package.json +68 -0
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
// TODO: this is a simple lambda function, that intakes a record from an SQS queue, and indexes the subreddit into the vector database
|
|
2
|
+
|
|
3
|
+
import { ToolParse, ProcessTask } from "@microfox/tool-core";
|
|
4
|
+
import { SQSEvent, SQSBatchResponse } from "aws-lambda";
|
|
5
|
+
import {
|
|
6
|
+
ragSubredditPaginator,
|
|
7
|
+
ragRedditVectorbase,
|
|
8
|
+
ragredditRedis,
|
|
9
|
+
subredditStore,
|
|
10
|
+
RagPostMetadata,
|
|
11
|
+
} from "../helpers/ragRedis.js";
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
const toolHandler = new ToolParse({});
|
|
15
|
+
|
|
16
|
+
const taskHandler = new ProcessTask({
|
|
17
|
+
url: process.env.TASK_UPSTASH_REDIS_REST_URL,
|
|
18
|
+
token: process.env.TASK_UPSTASH_REDIS_REST_TOKEN,
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
// The return type is changed to SQSBatchResponse to allow for partial batch failures.
|
|
22
|
+
// This means if one message in a batch fails, only that message will be retried, not the entire batch.
|
|
23
|
+
// This requires enabling "Report batch item failures" in the Lambda trigger configuration for the SQS queue.
|
|
24
|
+
export const handler = async (event: SQSEvent): Promise<SQSBatchResponse> => {
|
|
25
|
+
console.log("handler: sqs-index-subreddit", {
|
|
26
|
+
records: event.Records.length,
|
|
27
|
+
});
|
|
28
|
+
const batchItemFailures = [];
|
|
29
|
+
for (const record of event.Records) {
|
|
30
|
+
console.log("handler: processing record", { messageId: record.messageId });
|
|
31
|
+
const body = JSON.parse(record.body);
|
|
32
|
+
console.log("Processing message:", body);
|
|
33
|
+
const { parma1, xAuthSecrets, task_id } = body;
|
|
34
|
+
try {
|
|
35
|
+
await taskHandler.updateTask(task_id, {
|
|
36
|
+
status: "processing",
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
// Param checks
|
|
40
|
+
if (
|
|
41
|
+
!parma1 ||
|
|
42
|
+
parma1 === "null" ||
|
|
43
|
+
parma1 === "undefined" ||
|
|
44
|
+
parma1 === undefined
|
|
45
|
+
) {
|
|
46
|
+
console.error("handler: param1 is required", { parma1 });
|
|
47
|
+
continue;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Auth checks
|
|
51
|
+
if (xAuthSecrets) {
|
|
52
|
+
console.log("Populating env vars from xAuthSecrets", xAuthSecrets);
|
|
53
|
+
toolHandler.populateEnvVars({
|
|
54
|
+
headers: {
|
|
55
|
+
"x-auth-secrets": xAuthSecrets,
|
|
56
|
+
},
|
|
57
|
+
} as any);
|
|
58
|
+
console.log("handler: populated env vars from xAuthSecrets");
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Feed Env Vars dynamically.
|
|
62
|
+
if (!process.env.SOMETHING) {
|
|
63
|
+
console.log(
|
|
64
|
+
"Fetching env vars from microfox template api",
|
|
65
|
+
Object.keys(process.env)
|
|
66
|
+
);
|
|
67
|
+
await toolHandler.fetchEnvVars({
|
|
68
|
+
stage: "staging",
|
|
69
|
+
packageName: "@microfox/somthing",
|
|
70
|
+
templateType: "testing",
|
|
71
|
+
});
|
|
72
|
+
console.log("handler: fetched env vars from microfox template api");
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// 1. Create Constructors
|
|
76
|
+
// 2. Write the code to do your thing.
|
|
77
|
+
// 3. Output a data object.
|
|
78
|
+
const data: any = {};
|
|
79
|
+
|
|
80
|
+
// Plain Data Insertion
|
|
81
|
+
try {
|
|
82
|
+
if (data) {
|
|
83
|
+
await subredditStore.set(data.uniqueId, data);
|
|
84
|
+
console.log(`Stored data info for ${data.uniqueId}`);
|
|
85
|
+
} else {
|
|
86
|
+
console.log(`Could not find data info for ${data.uniqueId}`);
|
|
87
|
+
}
|
|
88
|
+
} catch (e) {
|
|
89
|
+
console.error("Could not get data info", e);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// Rag Data Insertion
|
|
93
|
+
const pagination = await ragSubredditPaginator(parma1).startNewIndexing({
|
|
94
|
+
done: false,
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
console.log("handler: fetching new posts from subreddit", { pagination });
|
|
98
|
+
// TODO: do your thing
|
|
99
|
+
const documents = data.documents
|
|
100
|
+
.map(({ data: p }: { data: any }) => ({
|
|
101
|
+
id: p.id,
|
|
102
|
+
doc: ``,
|
|
103
|
+
metadata: p,
|
|
104
|
+
}))
|
|
105
|
+
.filter((d: any) => d.doc != null && d.id != null);
|
|
106
|
+
|
|
107
|
+
if (documents.length > 0) {
|
|
108
|
+
console.log(`Indexing ${documents.length} posts to vectorbase...`);
|
|
109
|
+
// Delete the previously Indexed Data (Optional)
|
|
110
|
+
await ragRedditVectorbase.delete(
|
|
111
|
+
{
|
|
112
|
+
filter: `metadata.subreddit = "${data.uniqueId}"`,
|
|
113
|
+
},
|
|
114
|
+
{
|
|
115
|
+
namespace: "ragreddit",
|
|
116
|
+
}
|
|
117
|
+
);
|
|
118
|
+
await ragRedditVectorbase.feedDocsToRAG(documents, "ragreddit");
|
|
119
|
+
|
|
120
|
+
console.log("Successfully indexed posts.");
|
|
121
|
+
} else {
|
|
122
|
+
console.log("No new posts to index.");
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
console.log("handler: completing indexing", { id: parma1 });
|
|
126
|
+
await ragSubredditPaginator(parma1).completeIndexing();
|
|
127
|
+
console.log("handler: completed indexing", { id: parma1 });
|
|
128
|
+
await taskHandler.updateTask(task_id, {
|
|
129
|
+
status: "completed",
|
|
130
|
+
});
|
|
131
|
+
} catch (error) {
|
|
132
|
+
console.error(`Failed to process SQS message ${record.messageId}`, error);
|
|
133
|
+
await ragSubredditPaginator(parma1).failIndexing(
|
|
134
|
+
error instanceof Error ? error.message : "Unknown error"
|
|
135
|
+
);
|
|
136
|
+
console.log("handler: failed indexing", { id: parma1 });
|
|
137
|
+
await taskHandler.updateTask(task_id, {
|
|
138
|
+
status: "failed",
|
|
139
|
+
});
|
|
140
|
+
batchItemFailures.push({ itemIdentifier: record.messageId });
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
console.log("handler: finished processing batch", {
|
|
144
|
+
batchItemFailures: batchItemFailures.length,
|
|
145
|
+
});
|
|
146
|
+
return { batchItemFailures };
|
|
147
|
+
};
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { Redis } from "@upstash/redis";
|
|
2
|
+
import { RagUpstashSdk } from "@microfox/rag-upstash";
|
|
3
|
+
import { Crud, Paginator } from "@microfox/db-upstash";
|
|
4
|
+
|
|
5
|
+
export const ragredditRedis = new Redis({
|
|
6
|
+
url: process.env.UPSTASH_RAGREDDIT_REDIS_REST_URL || "",
|
|
7
|
+
token: process.env.UPSTASH_RAGREDDIT_REDIS_REST_TOKEN || "",
|
|
8
|
+
});
|
|
9
|
+
|
|
10
|
+
export type RagPostMetadata = {
|
|
11
|
+
id: string;
|
|
12
|
+
title: string;
|
|
13
|
+
url: string;
|
|
14
|
+
permalink?: string;
|
|
15
|
+
score?: number;
|
|
16
|
+
author: string;
|
|
17
|
+
author_fullname?: string;
|
|
18
|
+
created_utc: number;
|
|
19
|
+
subreddit: string;
|
|
20
|
+
num_comments: number;
|
|
21
|
+
ups: number;
|
|
22
|
+
downs: number;
|
|
23
|
+
pwls?: number;
|
|
24
|
+
wls?: number;
|
|
25
|
+
selftext: string;
|
|
26
|
+
selftext_html?: string;
|
|
27
|
+
media?: any;
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
export const ragRedditVectorbase = new RagUpstashSdk<RagPostMetadata>({
|
|
31
|
+
upstashUrl: process.env.UPSTASH_RAGREDDIT_VECTOR_REST_URL || "",
|
|
32
|
+
upstashToken: process.env.UPSTASH_RAGREDDIT_VECTOR_REST_TOKEN || "",
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
//ragRedditVectorbase.feedDocsToRAG()
|
|
36
|
+
//ragRedditVectorbase.queryDocsFromRAG()
|
|
37
|
+
|
|
38
|
+
export type SubredditIndexing = {
|
|
39
|
+
done: boolean;
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
export const subredditListPaginator = new Paginator<{
|
|
43
|
+
page: number;
|
|
44
|
+
itemsPerPage: number;
|
|
45
|
+
totalCount: number;
|
|
46
|
+
}>(ragredditRedis, "subreddit_list");
|
|
47
|
+
|
|
48
|
+
export const ragSubredditPaginator = (subreddit: string) => {
|
|
49
|
+
return new Paginator<SubredditIndexing>(
|
|
50
|
+
ragredditRedis,
|
|
51
|
+
`rag_subreddit_${subreddit}`
|
|
52
|
+
);
|
|
53
|
+
};
|
|
54
|
+
|
|
55
|
+
export type SubredditInfo = {
|
|
56
|
+
id: string;
|
|
57
|
+
name: string;
|
|
58
|
+
title: string;
|
|
59
|
+
description: string;
|
|
60
|
+
url: string;
|
|
61
|
+
avatar: string;
|
|
62
|
+
createdAt: string;
|
|
63
|
+
updatedAt: string;
|
|
64
|
+
lastIndexedAt: string;
|
|
65
|
+
postCount: number;
|
|
66
|
+
commentCount: number;
|
|
67
|
+
color: string;
|
|
68
|
+
};
|
|
69
|
+
|
|
70
|
+
export const subredditStore = new Crud<SubredditInfo>(
|
|
71
|
+
ragredditRedis,
|
|
72
|
+
`subreddit`
|
|
73
|
+
);
|
|
74
|
+
|
|
75
|
+
export type SubredditReport = {
|
|
76
|
+
id: string;
|
|
77
|
+
name: string;
|
|
78
|
+
};
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import { handleApiRequestDocs as handleTriggerIndexDocs } from "./functions/route-trigger-index.js";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Complete API documentation
|
|
5
|
+
* This is an OpenAPI 3.0.1 definition for the agent's API.
|
|
6
|
+
*/
|
|
7
|
+
const apiDocs = {
|
|
8
|
+
openapi: "3.0.1",
|
|
9
|
+
info: {
|
|
10
|
+
title: "template API",
|
|
11
|
+
version: "1.0.0",
|
|
12
|
+
description: "API for the template agent.",
|
|
13
|
+
contact: {
|
|
14
|
+
name: "API Support",
|
|
15
|
+
email: "support@microfox.app",
|
|
16
|
+
},
|
|
17
|
+
},
|
|
18
|
+
servers: [
|
|
19
|
+
{
|
|
20
|
+
url: "https://api.microfox.com/agents/template",
|
|
21
|
+
description: "Production server",
|
|
22
|
+
},
|
|
23
|
+
],
|
|
24
|
+
auth: "x-auth-packages",
|
|
25
|
+
paths: {
|
|
26
|
+
"/index": {
|
|
27
|
+
post: handleTriggerIndexDocs,
|
|
28
|
+
},
|
|
29
|
+
},
|
|
30
|
+
components: {
|
|
31
|
+
schemas: {},
|
|
32
|
+
"x-auth-packages": [
|
|
33
|
+
{
|
|
34
|
+
packageName: "@microfox/ai-provider-anthropic",
|
|
35
|
+
},
|
|
36
|
+
],
|
|
37
|
+
},
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* GET endpoint to serve API documentation.
|
|
42
|
+
* This function returns the OpenAPI specification in JSON format.
|
|
43
|
+
*/
|
|
44
|
+
export const getDocs = async () => {
|
|
45
|
+
try {
|
|
46
|
+
return {
|
|
47
|
+
statusCode: 200,
|
|
48
|
+
headers: {
|
|
49
|
+
"Content-Type": "application/json",
|
|
50
|
+
"Access-Control-Allow-Origin": "*",
|
|
51
|
+
"Access-Control-Allow-Credentials": true,
|
|
52
|
+
},
|
|
53
|
+
body: JSON.stringify(apiDocs, null, 2),
|
|
54
|
+
};
|
|
55
|
+
} catch (error) {
|
|
56
|
+
console.error("Error serving docs:", error);
|
|
57
|
+
return {
|
|
58
|
+
statusCode: 500,
|
|
59
|
+
headers: {
|
|
60
|
+
"Content-Type": "application/json",
|
|
61
|
+
"Access-Control-Allow-Origin": "*",
|
|
62
|
+
"Access-Control-Allow-Credentials": true,
|
|
63
|
+
},
|
|
64
|
+
body: JSON.stringify({
|
|
65
|
+
error: error instanceof Error ? error.message : "Unknown error",
|
|
66
|
+
}),
|
|
67
|
+
};
|
|
68
|
+
}
|
|
69
|
+
};
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"target": "ES2020",
|
|
4
|
+
"module": "NodeNext",
|
|
5
|
+
"moduleResolution": "NodeNext",
|
|
6
|
+
"lib": ["ES2020"],
|
|
7
|
+
"allowJs": true,
|
|
8
|
+
"strict": true,
|
|
9
|
+
"esModuleInterop": true,
|
|
10
|
+
"skipLibCheck": true,
|
|
11
|
+
"forceConsistentCasingInFileNames": true,
|
|
12
|
+
"experimentalDecorators": true,
|
|
13
|
+
"emitDecoratorMetadata": true,
|
|
14
|
+
"strictPropertyInitialization": false,
|
|
15
|
+
"noImplicitAny": true,
|
|
16
|
+
"noCheck": false,
|
|
17
|
+
"outDir": "./dist",
|
|
18
|
+
"rootDir": "./src",
|
|
19
|
+
"baseUrl": "./src",
|
|
20
|
+
"paths": {
|
|
21
|
+
"@/*": ["*"]
|
|
22
|
+
}
|
|
23
|
+
},
|
|
24
|
+
"ts-node": {
|
|
25
|
+
"esm": true,
|
|
26
|
+
"compilerOptions": {
|
|
27
|
+
"module": "NodeNext",
|
|
28
|
+
"moduleResolution": "NodeNext"
|
|
29
|
+
}
|
|
30
|
+
},
|
|
31
|
+
"include": ["src/**/*"],
|
|
32
|
+
"exclude": ["node_modules", "dist/**", ".serverless/**"]
|
|
33
|
+
}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/commands/push.ts
|
|
4
|
+
import { Command } from "commander";
|
|
5
|
+
import fs from "fs";
|
|
6
|
+
import path from "path";
|
|
7
|
+
import chalk from "chalk";
|
|
8
|
+
import axios from "axios";
|
|
9
|
+
import micromatch from "micromatch";
|
|
10
|
+
var API_ENDPOINT = "https://staging-cicd.microfox.app/api/deployments/new-agent-cli";
|
|
11
|
+
var getDirectoryFiles = (dir, basePath = "", ignorePatterns) => {
|
|
12
|
+
const structure = [];
|
|
13
|
+
const items = fs.readdirSync(dir, { withFileTypes: true });
|
|
14
|
+
for (const item of items) {
|
|
15
|
+
const relativePath = path.join(basePath, item.name);
|
|
16
|
+
if (micromatch.isMatch(relativePath, ignorePatterns)) {
|
|
17
|
+
continue;
|
|
18
|
+
}
|
|
19
|
+
if (item.isDirectory()) {
|
|
20
|
+
structure.push(...getDirectoryFiles(path.join(dir, item.name), relativePath, ignorePatterns));
|
|
21
|
+
} else {
|
|
22
|
+
structure.push({
|
|
23
|
+
type: "file",
|
|
24
|
+
name: item.name,
|
|
25
|
+
path: relativePath.replace(/\\/g, "/"),
|
|
26
|
+
content: fs.readFileSync(path.join(dir, item.name), "utf-8")
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
return structure;
|
|
31
|
+
};
|
|
32
|
+
async function pushAction() {
|
|
33
|
+
const cwd = process.cwd();
|
|
34
|
+
const microfoxConfigPath = path.join(cwd, "microfox.json");
|
|
35
|
+
if (!fs.existsSync(microfoxConfigPath)) {
|
|
36
|
+
console.error(chalk.red("\u274C Error: `microfox.json` not found in the current directory."));
|
|
37
|
+
console.log(chalk.yellow("This command must be run from the root of an agent project."));
|
|
38
|
+
process.exit(1);
|
|
39
|
+
}
|
|
40
|
+
console.log(chalk.cyan("\u{1F680} Pushing your agent to Microfox..."));
|
|
41
|
+
const microfoxConfig = JSON.parse(fs.readFileSync(microfoxConfigPath, "utf-8"));
|
|
42
|
+
let agentApiKey;
|
|
43
|
+
const envPath = path.join(cwd, "env.json");
|
|
44
|
+
if (fs.existsSync(envPath)) {
|
|
45
|
+
try {
|
|
46
|
+
const envConfig = JSON.parse(fs.readFileSync(envPath, "utf-8"));
|
|
47
|
+
agentApiKey = envConfig.AGENT_API_KEY;
|
|
48
|
+
} catch (e) {
|
|
49
|
+
console.warn(chalk.yellow("\u26A0\uFE0F Could not read or parse `env.json`. The AGENT_API_KEY will not be sent."));
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
const stage = microfoxConfig.stage || "prod";
|
|
53
|
+
const ignored = microfoxConfig.ignored || [];
|
|
54
|
+
const defaultIgnore = ["node_modules/**", ".git/**", "dist/**", ".build/**", ".serverless/**", ".DS_Store", "package-lock.json", "pnpm-lock.yaml"];
|
|
55
|
+
const allIgnored = [...defaultIgnore, ...ignored];
|
|
56
|
+
const files = getDirectoryFiles(cwd, "", allIgnored);
|
|
57
|
+
try {
|
|
58
|
+
console.log(chalk.blue("\u{1F4E6} Bundling and deploying your agent..."));
|
|
59
|
+
const response = await axios.post(
|
|
60
|
+
API_ENDPOINT,
|
|
61
|
+
{
|
|
62
|
+
stage,
|
|
63
|
+
isLocal: false,
|
|
64
|
+
dir: files
|
|
65
|
+
},
|
|
66
|
+
{
|
|
67
|
+
headers: {
|
|
68
|
+
"x-agent-api-key": agentApiKey
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
);
|
|
72
|
+
if (response.status === 200) {
|
|
73
|
+
console.log(chalk.green("\u2705 Deployment successful!"));
|
|
74
|
+
console.log(chalk.green(` Run ID: ${response.data.runId}`));
|
|
75
|
+
console.log(chalk.green(` Message: ${response.data.message}`));
|
|
76
|
+
} else {
|
|
77
|
+
console.error(chalk.red(`\u274C Deployment failed with status: ${response.status}`));
|
|
78
|
+
console.error(response.data);
|
|
79
|
+
process.exit(1);
|
|
80
|
+
}
|
|
81
|
+
} catch (error) {
|
|
82
|
+
console.error(chalk.red("\u274C An error occurred during deployment:"));
|
|
83
|
+
if (axios.isAxiosError(error) && error.response) {
|
|
84
|
+
console.error(chalk.red(` Status: ${error.response.status}`));
|
|
85
|
+
console.error(chalk.red(` Data: ${JSON.stringify(error.response.data, null, 2)}`));
|
|
86
|
+
} else {
|
|
87
|
+
console.error(error);
|
|
88
|
+
}
|
|
89
|
+
process.exit(1);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
var pushCommand = new Command("push").description("Deploy your agent to the Microfox platform").action(async () => {
|
|
93
|
+
try {
|
|
94
|
+
await pushAction();
|
|
95
|
+
} catch (error) {
|
|
96
|
+
console.error(chalk.red("\u274C Error:"), error instanceof Error ? error.message : String(error));
|
|
97
|
+
process.exit(1);
|
|
98
|
+
}
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
export {
|
|
102
|
+
pushCommand
|
|
103
|
+
};
|
|
104
|
+
//# sourceMappingURL=chunk-4HNHBA2H.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/commands/push.ts"],"sourcesContent":["import { Command } from 'commander';\nimport fs from 'fs';\nimport path from 'path';\nimport chalk from 'chalk';\nimport axios from 'axios';\nimport micromatch from 'micromatch';\n\ninterface FileDirectory {\n type: 'file' | 'directory';\n name: string;\n path: string;\n content?: string;\n children?: FileDirectory[];\n}\n\nconst API_ENDPOINT = 'https://staging-cicd.microfox.app/api/deployments/new-agent-cli';\n\nconst getDirectoryFiles = (dir: string, basePath: string = '', ignorePatterns: string[]): FileDirectory[] => {\n const structure: FileDirectory[] = [];\n const items = fs.readdirSync(dir, { withFileTypes: true });\n\n for (const item of items) {\n const relativePath = path.join(basePath, item.name);\n if (micromatch.isMatch(relativePath, ignorePatterns)) {\n continue;\n }\n\n if (item.isDirectory()) {\n structure.push(...getDirectoryFiles(path.join(dir, item.name), relativePath, ignorePatterns));\n } else {\n structure.push({\n type: 'file',\n name: item.name,\n path: relativePath.replace(/\\\\/g, '/'),\n content: fs.readFileSync(path.join(dir, item.name), 'utf-8'),\n });\n }\n }\n return structure;\n};\n\nasync function pushAction(): Promise<void> {\n const cwd = process.cwd();\n const microfoxConfigPath = path.join(cwd, 'microfox.json');\n\n if (!fs.existsSync(microfoxConfigPath)) {\n console.error(chalk.red('❌ Error: `microfox.json` not found in the current directory.'));\n console.log(chalk.yellow('This command must be run from the root of an agent project.'));\n process.exit(1);\n }\n\n console.log(chalk.cyan('🚀 Pushing your agent to Microfox...'));\n\n const microfoxConfig = JSON.parse(fs.readFileSync(microfoxConfigPath, 'utf-8'));\n\n let agentApiKey: string | undefined;\n const envPath = path.join(cwd, 'env.json');\n if (fs.existsSync(envPath)) {\n try {\n const envConfig = JSON.parse(fs.readFileSync(envPath, 'utf-8'));\n agentApiKey = envConfig.AGENT_API_KEY;\n } catch (e) {\n console.warn(chalk.yellow('⚠️ Could not read or parse `env.json`. The AGENT_API_KEY will not be sent.'));\n }\n }\n\n const stage = microfoxConfig.stage || 'prod';\n const ignored: string[] = microfoxConfig.ignored || [];\n\n const defaultIgnore = ['node_modules/**', '.git/**', 'dist/**', '.build/**', '.serverless/**', '.DS_Store', 'package-lock.json', 'pnpm-lock.yaml'];\n const allIgnored = [...defaultIgnore, ...ignored];\n\n const files: FileDirectory[] = getDirectoryFiles(cwd, '', allIgnored);\n\n // console.log(JSON.stringify(files, null, 2));\n \n try {\n console.log(chalk.blue('📦 Bundling and deploying your agent...'));\n const response = await axios.post(\n API_ENDPOINT,\n {\n stage,\n isLocal: false,\n dir: files,\n },\n {\n headers: {\n 'x-agent-api-key': agentApiKey,\n },\n },\n );\n\n if (response.status === 200) {\n console.log(chalk.green('✅ Deployment successful!'));\n console.log(chalk.green(` Run ID: ${response.data.runId}`));\n console.log(chalk.green(` Message: ${response.data.message}`));\n } else {\n console.error(chalk.red(`❌ Deployment failed with status: ${response.status}`));\n console.error(response.data);\n process.exit(1);\n }\n } catch (error) {\n console.error(chalk.red('❌ An error occurred during deployment:'));\n if (axios.isAxiosError(error) && error.response) {\n console.error(chalk.red(` Status: ${error.response.status}`));\n console.error(chalk.red(` Data: ${JSON.stringify(error.response.data, null, 2)}`));\n } else {\n console.error(error);\n }\n process.exit(1);\n }\n}\n\nexport const pushCommand = new Command('push')\n .description('Deploy your agent to the Microfox platform')\n .action(async () => {\n try {\n await pushAction();\n } catch (error) {\n console.error(chalk.red('❌ Error:'), error instanceof Error ? error.message : String(error));\n process.exit(1);\n }\n }); "],"mappings":";;;AAAA,SAAS,eAAe;AACxB,OAAO,QAAQ;AACf,OAAO,UAAU;AACjB,OAAO,WAAW;AAClB,OAAO,WAAW;AAClB,OAAO,gBAAgB;AAUvB,IAAM,eAAe;AAErB,IAAM,oBAAoB,CAAC,KAAa,WAAmB,IAAI,mBAA8C;AAC3G,QAAM,YAA6B,CAAC;AACpC,QAAM,QAAQ,GAAG,YAAY,KAAK,EAAE,eAAe,KAAK,CAAC;AAEzD,aAAW,QAAQ,OAAO;AACxB,UAAM,eAAe,KAAK,KAAK,UAAU,KAAK,IAAI;AAClD,QAAI,WAAW,QAAQ,cAAc,cAAc,GAAG;AACpD;AAAA,IACF;AAEA,QAAI,KAAK,YAAY,GAAG;AACtB,gBAAU,KAAK,GAAG,kBAAkB,KAAK,KAAK,KAAK,KAAK,IAAI,GAAG,cAAc,cAAc,CAAC;AAAA,IAC9F,OAAO;AACL,gBAAU,KAAK;AAAA,QACb,MAAM;AAAA,QACN,MAAM,KAAK;AAAA,QACX,MAAM,aAAa,QAAQ,OAAO,GAAG;AAAA,QACrC,SAAS,GAAG,aAAa,KAAK,KAAK,KAAK,KAAK,IAAI,GAAG,OAAO;AAAA,MAC7D,CAAC;AAAA,IACH;AAAA,EACF;AACA,SAAO;AACT;AAEA,eAAe,aAA4B;AACzC,QAAM,MAAM,QAAQ,IAAI;AACxB,QAAM,qBAAqB,KAAK,KAAK,KAAK,eAAe;AAEzD,MAAI,CAAC,GAAG,WAAW,kBAAkB,GAAG;AACtC,YAAQ,MAAM,MAAM,IAAI,mEAA8D,CAAC;AACvF,YAAQ,IAAI,MAAM,OAAO,6DAA6D,CAAC;AACvF,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,UAAQ,IAAI,MAAM,KAAK,6CAAsC,CAAC;AAE9D,QAAM,iBAAiB,KAAK,MAAM,GAAG,aAAa,oBAAoB,OAAO,CAAC;AAE9E,MAAI;AACJ,QAAM,UAAU,KAAK,KAAK,KAAK,UAAU;AACzC,MAAI,GAAG,WAAW,OAAO,GAAG;AAC1B,QAAI;AACF,YAAM,YAAY,KAAK,MAAM,GAAG,aAAa,SAAS,OAAO,CAAC;AAC9D,oBAAc,UAAU;AAAA,IAC1B,SAAS,GAAG;AACV,cAAQ,KAAK,MAAM,OAAO,uFAA6E,CAAC;AAAA,IAC1G;AAAA,EACF;AAEA,QAAM,QAAQ,eAAe,SAAS;AACtC,QAAM,UAAoB,eAAe,WAAW,CAAC;AAErD,QAAM,gBAAgB,CAAC,mBAAmB,WAAW,WAAW,aAAa,kBAAkB,aAAa,qBAAqB,gBAAgB;AACjJ,QAAM,aAAa,CAAC,GAAG,eAAe,GAAG,OAAO;AAEhD,QAAM,QAAyB,kBAAkB,KAAK,IAAI,UAAU;AAIpE,MAAI;AACF,YAAQ,IAAI,MAAM,KAAK,gDAAyC,CAAC;AACjE,UAAM,WAAW,MAAM,MAAM;AAAA,MAC3B;AAAA,MACA;AAAA,QACE;AAAA,QACA,SAAS;AAAA,QACT,KAAK;AAAA,MACP;AAAA,MACA;AAAA,QACE,SAAS;AAAA,UACP,mBAAmB;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS,WAAW,KAAK;AAC3B,cAAQ,IAAI,MAAM,MAAM,+BAA0B,CAAC;AACnD,cAAQ,IAAI,MAAM,MAAM,cAAc,SAAS,KAAK,KAAK,EAAE,CAAC;AAC5D,cAAQ,IAAI,MAAM,MAAM,eAAe,SAAS,KAAK,OAAO,EAAE,CAAC;AAAA,IACjE,OAAO;AACL,cAAQ,MAAM,MAAM,IAAI,yCAAoC,SAAS,MAAM,EAAE,CAAC;AAC9E,cAAQ,MAAM,SAAS,IAAI;AAC3B,cAAQ,KAAK,CAAC;AAAA,IAChB;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,MAAM,MAAM,IAAI,6CAAwC,CAAC;AACjE,QAAI,MAAM,aAAa,KAAK,KAAK,MAAM,UAAU;AAC/C,cAAQ,MAAM,MAAM,IAAI,cAAc,MAAM,SAAS,MAAM,EAAE,CAAC;AAC9D,cAAQ,MAAM,MAAM,IAAI,YAAY,KAAK,UAAU,MAAM,SAAS,MAAM,MAAM,CAAC,CAAC,EAAE,CAAC;AAAA,IACrF,OAAO;AACL,cAAQ,MAAM,KAAK;AAAA,IACrB;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAEO,IAAM,cAAc,IAAI,QAAQ,MAAM,EACxC,YAAY,4CAA4C,EACxD,OAAO,YAAY;AAChB,MAAI;AACA,UAAM,WAAW;AAAA,EACrB,SAAS,OAAO;AACZ,YAAQ,MAAM,MAAM,IAAI,eAAU,GAAG,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAC3F,YAAQ,KAAK,CAAC;AAAA,EAClB;AACJ,CAAC;","names":[]}
|