@fil-b/foc-storage-mcp 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +243 -0
- package/dist/mcp-server.d.ts +1 -0
- package/dist/mcp-server.js +1412 -0
- package/package.json +66 -0
|
@@ -0,0 +1,1412 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/mcp-server.ts
|
|
4
|
+
import { config as config2 } from "dotenv";
|
|
5
|
+
|
|
6
|
+
// src/mastra/index.ts
|
|
7
|
+
import { Mastra } from "@mastra/core/mastra";
|
|
8
|
+
import { PinoLogger } from "@mastra/loggers";
|
|
9
|
+
import { LibSQLStore } from "@mastra/libsql";
|
|
10
|
+
import { MCPServer } from "@mastra/mcp";
|
|
11
|
+
|
|
12
|
+
// src/mastra/agents/foc-storage-agent.ts
|
|
13
|
+
import { Agent } from "@mastra/core/agent";
|
|
14
|
+
|
|
15
|
+
// src/mastra/tools/dataset-tools.ts
|
|
16
|
+
import { createTool } from "@mastra/core";
|
|
17
|
+
|
|
18
|
+
// src/types/core.ts
|
|
19
|
+
import {
|
|
20
|
+
TIME_CONSTANTS,
|
|
21
|
+
SIZE_CONSTANTS,
|
|
22
|
+
TOKENS
|
|
23
|
+
} from "@filoz/synapse-sdk";
|
|
24
|
+
|
|
25
|
+
// src/types/schemas.ts
|
|
26
|
+
import { z as z2 } from "zod";
|
|
27
|
+
|
|
28
|
+
// src/config/index.ts
|
|
29
|
+
import { config } from "dotenv";
|
|
30
|
+
import { z } from "zod";
|
|
31
|
+
config();
|
|
32
|
+
var EnvSchema = z.object({
|
|
33
|
+
PRIVATE_KEY: z.string().min(1, "PRIVATE_KEY is required"),
|
|
34
|
+
FILECOIN_NETWORK: z.enum(["mainnet", "calibration"]).default("calibration"),
|
|
35
|
+
TOTAL_STORAGE_NEEDED_GiB: z.coerce.number().default(1024),
|
|
36
|
+
PERSISTENCE_PERIOD_DAYS: z.coerce.number().default(365),
|
|
37
|
+
RUNOUT_NOTIFICATION_THRESHOLD_DAYS: z.coerce.number().default(10)
|
|
38
|
+
});
|
|
39
|
+
var env = EnvSchema.parse(process.env);
|
|
40
|
+
var NETWORK_CONFIGS = {
|
|
41
|
+
mainnet: {
|
|
42
|
+
chainId: 314,
|
|
43
|
+
name: "Filecoin Mainnet",
|
|
44
|
+
rpcUrl: "https://api.node.glif.io/rpc/v1"
|
|
45
|
+
},
|
|
46
|
+
calibration: {
|
|
47
|
+
chainId: 314159,
|
|
48
|
+
name: "Filecoin Calibration",
|
|
49
|
+
rpcUrl: "https://api.calibration.node.glif.io/rpc/v1"
|
|
50
|
+
}
|
|
51
|
+
};
|
|
52
|
+
var MAX_UINT256 = 2n ** 256n - 1n;
|
|
53
|
+
var DATA_SET_CREATION_FEE = BigInt(0.1 * 10 ** 18);
|
|
54
|
+
var BYTES_PER_TIB = 1024n * 1024n * 1024n * 1024n;
|
|
55
|
+
var BYTES_PER_GIB = 1024n * 1024n * 1024n;
|
|
56
|
+
var DEFAULT_EXPECTED_STORAGE_BYTES = 1024 * 1024 * 1024 * 1024;
|
|
57
|
+
|
|
58
|
+
// src/types/schemas.ts
|
|
59
|
+
import { SIZE_CONSTANTS as SIZE_CONSTANTS2 } from "@filoz/synapse-sdk";
|
|
60
|
+
var GetDatasetsSchema = z2.object({
|
|
61
|
+
includeAllDatasets: z2.boolean().optional().describe("Include all datasets. Default: false"),
|
|
62
|
+
filterByCDN: z2.boolean().optional().describe("Filter to only CDN-enabled datasets. Default: false. If includeAllDatasets is true, this will be ignored")
|
|
63
|
+
});
|
|
64
|
+
var GetDatasetSchema = z2.object({
|
|
65
|
+
datasetId: z2.string().describe("Dataset ID to get.")
|
|
66
|
+
});
|
|
67
|
+
var CreateDatasetSchema = z2.object({
|
|
68
|
+
withCDN: z2.boolean().optional().default(false).describe("Enable CDN for faster file retrieval. Default: false. Recommended for frequently accessed files"),
|
|
69
|
+
providerId: z2.string().optional().describe("Specific storage provider ID. If not specified, best provider will be auto-selected"),
|
|
70
|
+
// Metadata for the dataset - allows up to 10 string key-value pairs
|
|
71
|
+
metadata: z2.record(z2.string(), z2.string()).optional().refine((data) => !data || Object.keys(data).length <= 10, {
|
|
72
|
+
message: "Metadata can contain at most 10 key-value pairs"
|
|
73
|
+
}).describe("Metadata for the dataset. Supports up to 10 string key-value pairs where both keys and values must be strings")
|
|
74
|
+
});
|
|
75
|
+
var UploadFileSchema = z2.object({
|
|
76
|
+
filePath: z2.string().describe("Absolute path to file on local filesystem to upload"),
|
|
77
|
+
fileName: z2.string().optional().describe("Custom filename for storage. If not provided, uses original filename"),
|
|
78
|
+
metadata: z2.record(z2.string(), z2.string()).optional().refine((data) => !data || Object.keys(data).length <= 4, {
|
|
79
|
+
message: "Metadata can contain at most 4 key-value pairs"
|
|
80
|
+
}).describe("Metadata for the file. Supports up to 4 string key-value pairs where both keys and values must be strings"),
|
|
81
|
+
datasetId: z2.string().optional().describe("Existing dataset ID to add file to. If not provided, creates new dataset"),
|
|
82
|
+
withCDN: z2.boolean().optional().describe("Enable CDN for this file. Default: false. Use for frequently accessed files"),
|
|
83
|
+
autoPayment: z2.boolean().optional().default(true).describe("Automatically process payment if insufficient balance. Default: true")
|
|
84
|
+
});
|
|
85
|
+
var GetBalancesSchema = z2.object({
|
|
86
|
+
storageCapacityBytes: z2.number().optional().default(env.TOTAL_STORAGE_NEEDED_GiB * Number(SIZE_CONSTANTS2.GiB)).describe("Storage capacity in bytes. Default: 1 TB. This is used to calculate the storage needs and the deposit needed."),
|
|
87
|
+
persistencePeriodDays: z2.number().optional().default(env.PERSISTENCE_PERIOD_DAYS).describe("Persistence period in days. Default: 365. This is used to calculate the storage needs and the deposit needed."),
|
|
88
|
+
notificationThresholdDays: z2.number().optional().default(env.RUNOUT_NOTIFICATION_THRESHOLD_DAYS).describe("Notification threshold in days. Default: 10. This is used check if the user needs to top up their storage balance before the storage balance runs out.")
|
|
89
|
+
});
|
|
90
|
+
var ProcessPaymentSchema = z2.object({
|
|
91
|
+
depositAmount: z2.number().optional().default(0).describe("Amount to deposit in USDFC. Default: 0. If not provided, the tool will check the balance and deposit the necessary amount.")
|
|
92
|
+
});
|
|
93
|
+
var GetProvidersSchema = z2.object({
|
|
94
|
+
onlyApproved: z2.boolean().optional().default(true).describe("Filter to only approved providers. Default: true")
|
|
95
|
+
});
|
|
96
|
+
var BaseErrorResponseSchema = z2.object({
|
|
97
|
+
success: z2.literal(false),
|
|
98
|
+
error: z2.string(),
|
|
99
|
+
message: z2.string()
|
|
100
|
+
});
|
|
101
|
+
var UnifiedSizeInfoSchema = z2.object({
|
|
102
|
+
sizeBytes: z2.string(),
|
|
103
|
+
sizeKiB: z2.number(),
|
|
104
|
+
sizeMiB: z2.number(),
|
|
105
|
+
sizeGiB: z2.number(),
|
|
106
|
+
withCDN: z2.boolean().optional(),
|
|
107
|
+
leafCount: z2.number().optional(),
|
|
108
|
+
pieceCount: z2.number().optional(),
|
|
109
|
+
message: z2.string().optional()
|
|
110
|
+
});
|
|
111
|
+
var DataSetPieceSchema = z2.object({
|
|
112
|
+
pieceCid: z2.string(),
|
|
113
|
+
retrievalUrl: z2.string(),
|
|
114
|
+
sizes: UnifiedSizeInfoSchema,
|
|
115
|
+
metadata: z2.record(z2.string(), z2.string())
|
|
116
|
+
});
|
|
117
|
+
var DataSetSchema = z2.object({
|
|
118
|
+
datasetId: z2.number(),
|
|
119
|
+
withCDN: z2.boolean(),
|
|
120
|
+
datasetMetadata: z2.record(z2.string(), z2.string()),
|
|
121
|
+
totalDatasetSizeMessage: z2.string(),
|
|
122
|
+
dataSetPieces: z2.array(DataSetPieceSchema)
|
|
123
|
+
});
|
|
124
|
+
var FormattedStorageBalanceResultSchema = z2.object({
|
|
125
|
+
depositNeeded: z2.string(),
|
|
126
|
+
availableToFreeUp: z2.string(),
|
|
127
|
+
daysLeftAtMaxBurnRate: z2.number(),
|
|
128
|
+
daysLeftAtBurnRate: z2.number(),
|
|
129
|
+
isRateSufficient: z2.boolean(),
|
|
130
|
+
isLockupSufficient: z2.boolean(),
|
|
131
|
+
isSufficient: z2.boolean(),
|
|
132
|
+
availableFunds: z2.string(),
|
|
133
|
+
currentMonthlyRate: z2.string(),
|
|
134
|
+
maxMonthlyRate: z2.string()
|
|
135
|
+
});
|
|
136
|
+
var GetProvidersOutputSchema = z2.object({
|
|
137
|
+
success: z2.boolean(),
|
|
138
|
+
// Success fields
|
|
139
|
+
providers: z2.array(z2.any()).optional(),
|
|
140
|
+
count: z2.number().optional(),
|
|
141
|
+
// Error fields
|
|
142
|
+
error: z2.string().optional(),
|
|
143
|
+
// Common field
|
|
144
|
+
message: z2.string()
|
|
145
|
+
});
|
|
146
|
+
var UploadFileOutputSchema = z2.object({
|
|
147
|
+
success: z2.boolean(),
|
|
148
|
+
// Success fields
|
|
149
|
+
taskId: z2.string().optional(),
|
|
150
|
+
pieceCid: z2.string().optional(),
|
|
151
|
+
retrievalUrl: z2.string().optional(),
|
|
152
|
+
txHash: z2.string().optional(),
|
|
153
|
+
fileName: z2.string().optional(),
|
|
154
|
+
fileSize: z2.number().optional(),
|
|
155
|
+
progressLog: z2.array(z2.string()).optional(),
|
|
156
|
+
// Error fields
|
|
157
|
+
error: z2.string().optional(),
|
|
158
|
+
// Common field
|
|
159
|
+
message: z2.string()
|
|
160
|
+
});
|
|
161
|
+
var ProcessPaymentOutputSchema = z2.object({
|
|
162
|
+
success: z2.boolean(),
|
|
163
|
+
message: z2.string(),
|
|
164
|
+
txHash: z2.string().nullable().optional(),
|
|
165
|
+
required: z2.object({
|
|
166
|
+
deposit: z2.number()
|
|
167
|
+
}).optional(),
|
|
168
|
+
available: z2.number().optional(),
|
|
169
|
+
// Error fields
|
|
170
|
+
error: z2.string().optional()
|
|
171
|
+
});
|
|
172
|
+
var GetBalancesOutputSchema = z2.object({
|
|
173
|
+
success: z2.boolean(),
|
|
174
|
+
// Success fields
|
|
175
|
+
checkStorageBalanceResultFormatted: z2.any().optional(),
|
|
176
|
+
checkStorageBalanceResult: z2.any().optional(),
|
|
177
|
+
// Error fields
|
|
178
|
+
error: z2.string().optional(),
|
|
179
|
+
message: z2.string().optional()
|
|
180
|
+
});
|
|
181
|
+
var GetDatasetsOutputSchema = z2.object({
|
|
182
|
+
success: z2.boolean(),
|
|
183
|
+
// Success fields
|
|
184
|
+
datasets: z2.array(DataSetSchema).optional(),
|
|
185
|
+
count: z2.number().optional(),
|
|
186
|
+
// Error fields
|
|
187
|
+
error: z2.string().optional(),
|
|
188
|
+
// Common field
|
|
189
|
+
message: z2.string()
|
|
190
|
+
});
|
|
191
|
+
var GetDatasetOutputSchema = z2.object({
|
|
192
|
+
success: z2.boolean(),
|
|
193
|
+
// Success fields
|
|
194
|
+
dataset: DataSetSchema,
|
|
195
|
+
// Error fields
|
|
196
|
+
error: z2.string().optional(),
|
|
197
|
+
// Common field
|
|
198
|
+
message: z2.string()
|
|
199
|
+
});
|
|
200
|
+
var CreateDatasetOutputSchema = z2.object({
|
|
201
|
+
success: z2.boolean(),
|
|
202
|
+
// Success fields
|
|
203
|
+
datasetId: z2.string().optional(),
|
|
204
|
+
txHash: z2.string().optional(),
|
|
205
|
+
// Error fields
|
|
206
|
+
error: z2.string().optional(),
|
|
207
|
+
// Common field
|
|
208
|
+
message: z2.string()
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
// src/lib/calculations.ts
|
|
212
|
+
import Decimal from "decimal.js";
|
|
213
|
+
import { SIZE_CONSTANTS as SIZE_CONSTANTS3 } from "@filoz/synapse-sdk";
|
|
214
|
+
import { getSizeFromPieceCID } from "@filoz/synapse-sdk/piece";
|
|
215
|
+
Decimal.set({
|
|
216
|
+
precision: 34,
|
|
217
|
+
rounding: Decimal.ROUND_HALF_UP,
|
|
218
|
+
toExpNeg: -21,
|
|
219
|
+
toExpPos: 21,
|
|
220
|
+
maxE: 9e15,
|
|
221
|
+
minE: -9e15,
|
|
222
|
+
modulo: Decimal.ROUND_DOWN
|
|
223
|
+
});
|
|
224
|
+
var toDecimal = (value) => value instanceof Decimal ? value : new Decimal(value.toString());
|
|
225
|
+
var bytesToKiB = (bytes) => toDecimal(bytes).div(new Decimal(SIZE_CONSTANTS3.KiB.toString()));
|
|
226
|
+
var bytesToMiB = (bytes) => toDecimal(bytes).div(new Decimal(SIZE_CONSTANTS3.MiB.toString()));
|
|
227
|
+
var bytesToGiB = (bytes) => toDecimal(bytes).div(new Decimal(SIZE_CONSTANTS3.GiB.toString()));
|
|
228
|
+
var getPieceInfoFromCidBytes = (input) => {
|
|
229
|
+
const sizeBytes = BigInt(getSizeFromPieceCID(input));
|
|
230
|
+
return {
|
|
231
|
+
sizeBytes,
|
|
232
|
+
sizeKiB: bytesToKiB(sizeBytes).toNumber(),
|
|
233
|
+
sizeMiB: bytesToMiB(sizeBytes).toNumber(),
|
|
234
|
+
sizeGiB: bytesToGiB(sizeBytes).toNumber()
|
|
235
|
+
};
|
|
236
|
+
};
|
|
237
|
+
var sizeInfoMessage = (sizeInfo) => {
|
|
238
|
+
if (sizeInfo.sizeInGB > 0.1) {
|
|
239
|
+
return `Dataset size: ${sizeInfo.sizeInGB.toFixed(4)} GB`;
|
|
240
|
+
}
|
|
241
|
+
if (sizeInfo.sizeInMiB > 0.1) {
|
|
242
|
+
return `Dataset size: ${sizeInfo.sizeInMiB.toFixed(4)} MB`;
|
|
243
|
+
}
|
|
244
|
+
if (sizeInfo.sizeInKiB > 0.1) {
|
|
245
|
+
return `Dataset size: ${sizeInfo.sizeInKiB.toFixed(4)} KB`;
|
|
246
|
+
}
|
|
247
|
+
return `Dataset size: ${sizeInfo.sizeInBytes} Bytes`;
|
|
248
|
+
};
|
|
249
|
+
|
|
250
|
+
// src/lib/synapse.ts
|
|
251
|
+
import { Synapse } from "@filoz/synapse-sdk";
|
|
252
|
+
var getSynapseInstance = async () => {
|
|
253
|
+
const network = env.FILECOIN_NETWORK;
|
|
254
|
+
const synapse = await Synapse.create(
|
|
255
|
+
{
|
|
256
|
+
privateKey: env.PRIVATE_KEY,
|
|
257
|
+
rpcURL: NETWORK_CONFIGS[network].rpcUrl
|
|
258
|
+
}
|
|
259
|
+
);
|
|
260
|
+
return synapse;
|
|
261
|
+
};
|
|
262
|
+
|
|
263
|
+
// src/lib/utils.ts
|
|
264
|
+
import { promises as fs } from "fs";
|
|
265
|
+
async function validateFilePath(filePath) {
|
|
266
|
+
try {
|
|
267
|
+
const stats = await fs.stat(filePath);
|
|
268
|
+
if (!stats.isFile()) {
|
|
269
|
+
throw new Error(`Path is not a file: ${filePath}`);
|
|
270
|
+
}
|
|
271
|
+
return {
|
|
272
|
+
path: filePath,
|
|
273
|
+
size: stats.size
|
|
274
|
+
};
|
|
275
|
+
} catch (error) {
|
|
276
|
+
throw new Error(`File not found or inaccessible: ${filePath}`);
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
function createErrorResponse(errorType, message, additionalData) {
|
|
280
|
+
return {
|
|
281
|
+
success: false,
|
|
282
|
+
error: errorType,
|
|
283
|
+
message,
|
|
284
|
+
...additionalData
|
|
285
|
+
};
|
|
286
|
+
}
|
|
287
|
+
function fromBaseUnits(amount, decimals = 18) {
|
|
288
|
+
const str = amount.toString().padStart(decimals + 1, "0");
|
|
289
|
+
const whole = str.slice(0, -decimals) || "0";
|
|
290
|
+
const decimal = str.slice(-decimals).replace(/0+$/, "");
|
|
291
|
+
return decimal ? `${whole}.${decimal}` : whole;
|
|
292
|
+
}
|
|
293
|
+
function serializeBigInt(obj) {
|
|
294
|
+
if (obj === null || obj === void 0) {
|
|
295
|
+
return obj;
|
|
296
|
+
}
|
|
297
|
+
if (typeof obj === "bigint") {
|
|
298
|
+
return obj.toString();
|
|
299
|
+
}
|
|
300
|
+
if (Array.isArray(obj)) {
|
|
301
|
+
return obj.map(serializeBigInt);
|
|
302
|
+
}
|
|
303
|
+
if (typeof obj === "object") {
|
|
304
|
+
return Object.fromEntries(
|
|
305
|
+
Object.entries(obj).map(([key, value]) => [key, serializeBigInt(value)])
|
|
306
|
+
);
|
|
307
|
+
}
|
|
308
|
+
return obj;
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
// src/services/wallet-service.ts
|
|
312
|
+
import { ethers, Wallet, JsonRpcProvider } from "ethers";
|
|
313
|
+
|
|
314
|
+
// src/services/payment-service.ts
|
|
315
|
+
import { TIME_CONSTANTS as TIME_CONSTANTS2 } from "@filoz/synapse-sdk";
|
|
316
|
+
async function processStoragePayment(synapse, depositAmount, persistenceDays) {
|
|
317
|
+
const warmStorageAddress = synapse.getWarmStorageAddress();
|
|
318
|
+
const epochs = TIME_CONSTANTS2.EPOCHS_PER_DAY * BigInt(persistenceDays);
|
|
319
|
+
if (depositAmount > 0n) {
|
|
320
|
+
const tx = await synapse.payments.depositWithPermitAndApproveOperator(
|
|
321
|
+
depositAmount,
|
|
322
|
+
warmStorageAddress,
|
|
323
|
+
MAX_UINT256,
|
|
324
|
+
MAX_UINT256,
|
|
325
|
+
epochs
|
|
326
|
+
);
|
|
327
|
+
const receipt = await tx.wait(1);
|
|
328
|
+
return { txHash: receipt?.hash || tx.hash, success: true };
|
|
329
|
+
} else {
|
|
330
|
+
const tx = await synapse.payments.approveService(
|
|
331
|
+
warmStorageAddress,
|
|
332
|
+
MAX_UINT256,
|
|
333
|
+
MAX_UINT256,
|
|
334
|
+
epochs
|
|
335
|
+
);
|
|
336
|
+
const receipt = await tx.wait(1);
|
|
337
|
+
return { txHash: receipt?.hash || tx.hash, success: true };
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
// src/services/dataset-service.ts
|
|
342
|
+
import { PDPServer, WarmStorageService } from "@filoz/synapse-sdk";
|
|
343
|
+
var getDatasets = async (withCDN = false, includeAll = false, onlyDatasetId = void 0) => {
|
|
344
|
+
try {
|
|
345
|
+
const synapse = await getSynapseInstance();
|
|
346
|
+
const warmStorageAddress = synapse.getWarmStorageAddress();
|
|
347
|
+
const warmStorageService = await WarmStorageService.create(synapse.getProvider(), warmStorageAddress);
|
|
348
|
+
const datasets = await synapse.storage.findDataSets();
|
|
349
|
+
if (datasets.length === 0) {
|
|
350
|
+
return {
|
|
351
|
+
success: true,
|
|
352
|
+
datasets: [],
|
|
353
|
+
count: 0,
|
|
354
|
+
message: "No datasets found. Upload files to create your first dataset."
|
|
355
|
+
};
|
|
356
|
+
}
|
|
357
|
+
const providers = await Promise.all(
|
|
358
|
+
datasets.map(
|
|
359
|
+
(dataset) => synapse.getProviderInfo(dataset.providerId).catch(() => null)
|
|
360
|
+
)
|
|
361
|
+
);
|
|
362
|
+
const userAddress = await synapse.getSigner().getAddress();
|
|
363
|
+
const filteredDatasets = datasets.filter((dataset) => {
|
|
364
|
+
if (onlyDatasetId !== void 0) {
|
|
365
|
+
return dataset.pdpVerifierDataSetId === onlyDatasetId;
|
|
366
|
+
}
|
|
367
|
+
if (includeAll) {
|
|
368
|
+
return true;
|
|
369
|
+
}
|
|
370
|
+
return dataset.withCDN === withCDN;
|
|
371
|
+
});
|
|
372
|
+
if (filteredDatasets.length === 0) {
|
|
373
|
+
return {
|
|
374
|
+
success: true,
|
|
375
|
+
datasets: [],
|
|
376
|
+
count: 0,
|
|
377
|
+
message: `No datasets found with the given criteria`
|
|
378
|
+
};
|
|
379
|
+
}
|
|
380
|
+
const enrichedDatasets = await Promise.all(
|
|
381
|
+
filteredDatasets.map(async (dataset) => {
|
|
382
|
+
const provider = providers.find((p) => p?.id === dataset.providerId);
|
|
383
|
+
const serviceURL = provider.products.PDP?.data.serviceURL || "";
|
|
384
|
+
try {
|
|
385
|
+
const pdpServer = new PDPServer(null, serviceURL);
|
|
386
|
+
const data = await pdpServer.getDataSet(dataset.pdpVerifierDataSetId).then((data2) => {
|
|
387
|
+
data2.pieces.reverse();
|
|
388
|
+
return data2;
|
|
389
|
+
});
|
|
390
|
+
const pieces = data.pieces.reduce(
|
|
391
|
+
(acc, piece) => {
|
|
392
|
+
acc[piece.pieceCid.toV1().toString()] = getPieceInfoFromCidBytes(piece.pieceCid);
|
|
393
|
+
return acc;
|
|
394
|
+
},
|
|
395
|
+
{}
|
|
396
|
+
);
|
|
397
|
+
const piecesMetadata = (await Promise.all(data.pieces.map(async (piece) => {
|
|
398
|
+
return { pieceId: piece.pieceId, metadata: await warmStorageService.getPieceMetadata(dataset.pdpVerifierDataSetId, piece.pieceId) };
|
|
399
|
+
}))).reduce((acc, piece) => {
|
|
400
|
+
acc[piece.pieceId] = piece.metadata;
|
|
401
|
+
return acc;
|
|
402
|
+
}, {});
|
|
403
|
+
const getRetrievalUrl = (pieceCid) => {
|
|
404
|
+
if (dataset.withCDN) {
|
|
405
|
+
return `https://${userAddress}.calibration.filbeam.io/${pieceCid}`;
|
|
406
|
+
} else {
|
|
407
|
+
return `${serviceURL}/piece/${pieceCid}`;
|
|
408
|
+
}
|
|
409
|
+
};
|
|
410
|
+
const datasetSizeInfo = data.pieces.reduce((acc, piece) => {
|
|
411
|
+
acc.sizeInBytes += Number(pieces[piece.pieceCid.toV1().toString()].sizeBytes);
|
|
412
|
+
acc.sizeInKiB += Number(pieces[piece.pieceCid.toV1().toString()].sizeKiB);
|
|
413
|
+
acc.sizeInMiB += Number(pieces[piece.pieceCid.toV1().toString()].sizeMiB);
|
|
414
|
+
acc.sizeInGB += Number(pieces[piece.pieceCid.toV1().toString()].sizeGiB);
|
|
415
|
+
return acc;
|
|
416
|
+
}, { sizeInBytes: 0, sizeInKiB: 0, sizeInMiB: 0, sizeInGB: 0, message: "" });
|
|
417
|
+
const dataSetPieces = data.pieces.map((piece) => ({
|
|
418
|
+
pieceCid: piece.pieceCid.toV1().toString(),
|
|
419
|
+
retrievalUrl: getRetrievalUrl(piece.pieceCid.toV1().toString()),
|
|
420
|
+
sizes: pieces[piece.pieceCid.toV1().toString()],
|
|
421
|
+
metadata: piecesMetadata[piece.pieceId]
|
|
422
|
+
}));
|
|
423
|
+
return {
|
|
424
|
+
datasetId: dataset.pdpVerifierDataSetId,
|
|
425
|
+
withCDN: dataset.withCDN,
|
|
426
|
+
datasetMetadata: dataset.metadata,
|
|
427
|
+
totalDatasetSizeMessage: sizeInfoMessage(datasetSizeInfo),
|
|
428
|
+
dataSetPieces
|
|
429
|
+
};
|
|
430
|
+
} catch (error) {
|
|
431
|
+
return null;
|
|
432
|
+
}
|
|
433
|
+
})
|
|
434
|
+
);
|
|
435
|
+
return {
|
|
436
|
+
success: true,
|
|
437
|
+
datasets: enrichedDatasets.filter((dataset) => dataset !== null).map(serializeBigInt),
|
|
438
|
+
count: enrichedDatasets.length,
|
|
439
|
+
message: `Found ${enrichedDatasets.length} dataset(s)`
|
|
440
|
+
};
|
|
441
|
+
} catch (error) {
|
|
442
|
+
return createErrorResponse(
|
|
443
|
+
"dataset_fetch_failed",
|
|
444
|
+
`Failed to fetch datasets: ${error.message}`,
|
|
445
|
+
{ success: false }
|
|
446
|
+
);
|
|
447
|
+
}
|
|
448
|
+
};
|
|
449
|
+
|
|
450
|
+
// src/services/storage-service.ts
|
|
451
|
+
import {
|
|
452
|
+
SIZE_CONSTANTS as SIZE_CONSTANTS4,
|
|
453
|
+
TIME_CONSTANTS as TIME_CONSTANTS3,
|
|
454
|
+
TOKENS as TOKENS2,
|
|
455
|
+
WarmStorageService as WarmStorageService2
|
|
456
|
+
} from "@filoz/synapse-sdk";
|
|
457
|
+
import { formatUnits } from "viem";
|
|
458
|
+
var checkStorageBalance = async (synapse, storageCapacityBytes = env.TOTAL_STORAGE_NEEDED_GiB * Number(SIZE_CONSTANTS4.GiB), persistencePeriodDays = env.PERSISTENCE_PERIOD_DAYS) => {
|
|
459
|
+
const warmStorageService = await WarmStorageService2.create(synapse.getProvider(), synapse.getWarmStorageAddress());
|
|
460
|
+
const [storageInfo, accountInfo, prices] = await Promise.all([
|
|
461
|
+
synapse.storage.getStorageInfo(),
|
|
462
|
+
synapse.payments.accountInfo(TOKENS2.USDFC),
|
|
463
|
+
warmStorageService.calculateStorageCost(storageCapacityBytes)
|
|
464
|
+
]);
|
|
465
|
+
let filRaw;
|
|
466
|
+
try {
|
|
467
|
+
filRaw = await synapse.payments.walletBalance();
|
|
468
|
+
} catch (error) {
|
|
469
|
+
console.error(error);
|
|
470
|
+
throw new Error("Error fetching wallet balances. \n FIL balance not available. \n The main cause of this error is that your wallet doesn't have any FIL and has no transaction history on the network. Action: Top up your wallet with FIL.");
|
|
471
|
+
}
|
|
472
|
+
let usdfcRaw = await synapse.payments.walletBalance(TOKENS2.USDFC);
|
|
473
|
+
const allowance = storageInfo.allowances;
|
|
474
|
+
const availableFunds = accountInfo.availableFunds;
|
|
475
|
+
const currentMonthlyRate = allowance.rateUsed * TIME_CONSTANTS3.EPOCHS_PER_MONTH;
|
|
476
|
+
const maxMonthlyRate = prices.perMonth;
|
|
477
|
+
const daysLeftAtMaxBurnRate = maxMonthlyRate === 0n ? Infinity : Number(availableFunds) / Number(maxMonthlyRate) * 30;
|
|
478
|
+
const daysLeftAtBurnRate = currentMonthlyRate === 0n ? Infinity : Number(availableFunds) / Number(currentMonthlyRate) * 30;
|
|
479
|
+
const amountNeeded = prices.perDay * BigInt(persistencePeriodDays);
|
|
480
|
+
const totalDepositNeeded = daysLeftAtMaxBurnRate >= env.RUNOUT_NOTIFICATION_THRESHOLD_DAYS ? 0n : amountNeeded - accountInfo.availableFunds;
|
|
481
|
+
const availableToFreeUp = accountInfo.availableFunds > amountNeeded ? accountInfo.availableFunds - amountNeeded : 0n;
|
|
482
|
+
const isRateSufficient = allowance.rateAllowance === MAX_UINT256;
|
|
483
|
+
const isLockupSufficient = allowance.lockupAllowance === MAX_UINT256;
|
|
484
|
+
const isSufficient = isRateSufficient && isLockupSufficient && daysLeftAtMaxBurnRate >= env.RUNOUT_NOTIFICATION_THRESHOLD_DAYS;
|
|
485
|
+
return {
|
|
486
|
+
filBalance: filRaw,
|
|
487
|
+
usdfcBalance: usdfcRaw,
|
|
488
|
+
availableStorageFundsUsdfc: availableFunds,
|
|
489
|
+
depositNeeded: totalDepositNeeded,
|
|
490
|
+
availableToFreeUp,
|
|
491
|
+
daysLeftAtMaxBurnRate,
|
|
492
|
+
daysLeftAtBurnRate,
|
|
493
|
+
isRateSufficient,
|
|
494
|
+
isLockupSufficient,
|
|
495
|
+
isSufficient,
|
|
496
|
+
currentStorageMonthlyRate: currentMonthlyRate,
|
|
497
|
+
maxStorageMonthlyRate: maxMonthlyRate
|
|
498
|
+
};
|
|
499
|
+
};
|
|
500
|
+
var formatStorageBalanceResult = (checkStorageBalanceResult) => {
|
|
501
|
+
return {
|
|
502
|
+
filBalance: formatBalance(checkStorageBalanceResult.filBalance, "FIL"),
|
|
503
|
+
usdfcBalance: formatBalance(checkStorageBalanceResult.usdfcBalance, "USDFC"),
|
|
504
|
+
availableStorageFundsUsdfc: formatBalance(checkStorageBalanceResult.availableStorageFundsUsdfc, "USDFC"),
|
|
505
|
+
depositNeeded: formatBalance(checkStorageBalanceResult.depositNeeded, "USDFC"),
|
|
506
|
+
availableToFreeUp: formatBalance(checkStorageBalanceResult.availableToFreeUp, "USDFC"),
|
|
507
|
+
currentStorageMonthlyRate: formatBalance(checkStorageBalanceResult.currentStorageMonthlyRate, "USDFC"),
|
|
508
|
+
maxStorageMonthlyRate: formatBalance(checkStorageBalanceResult.maxStorageMonthlyRate, "USDFC"),
|
|
509
|
+
daysLeftAtMaxBurnRate: formatTime(checkStorageBalanceResult.daysLeftAtMaxBurnRate),
|
|
510
|
+
daysLeftAtBurnRate: formatTime(checkStorageBalanceResult.daysLeftAtBurnRate),
|
|
511
|
+
isRateSufficient: checkStorageBalanceResult.isRateSufficient,
|
|
512
|
+
isLockupSufficient: checkStorageBalanceResult.isLockupSufficient,
|
|
513
|
+
isSufficient: checkStorageBalanceResult.isSufficient
|
|
514
|
+
};
|
|
515
|
+
};
|
|
516
|
+
var formatBalance = (balance, ticker) => {
|
|
517
|
+
return `${Number(Number(formatUnits(balance, 18)).toFixed(8))} ${ticker}`;
|
|
518
|
+
};
|
|
519
|
+
var formatTime = (days) => {
|
|
520
|
+
if (days === Infinity) {
|
|
521
|
+
return "Infinity";
|
|
522
|
+
}
|
|
523
|
+
if (days < 1) {
|
|
524
|
+
return `${Math.fround(days * 24)} hours`;
|
|
525
|
+
}
|
|
526
|
+
if (days < 30) {
|
|
527
|
+
return `${Math.fround(days)} days`;
|
|
528
|
+
}
|
|
529
|
+
if (days < 365) {
|
|
530
|
+
return `${Math.fround(days / 30)} months`;
|
|
531
|
+
}
|
|
532
|
+
return `${Math.fround(days / 365)} years`;
|
|
533
|
+
};
|
|
534
|
+
|
|
535
|
+
// src/mastra/tools/dataset-tools.ts
|
|
536
|
+
var getDatasets2 = createTool({
|
|
537
|
+
id: "getDatasets",
|
|
538
|
+
description: "Retrieve all datasets owned by the connected wallet with comprehensive information including piece CIDs, file sizes, provider details, and retrieval URLs. Filter by CDN status or view all datasets. Each dataset contains complete metadata about stored files and their blockchain storage proofs. Use this to inventory files, check storage status, or locate specific uploads.",
|
|
539
|
+
inputSchema: GetDatasetsSchema,
|
|
540
|
+
outputSchema: GetDatasetsOutputSchema,
|
|
541
|
+
execute: async ({ context }) => {
|
|
542
|
+
const withCDN = context.filterByCDN ?? false;
|
|
543
|
+
const includeAll = context.includeAllDatasets ?? false;
|
|
544
|
+
return await getDatasets(withCDN, includeAll);
|
|
545
|
+
}
|
|
546
|
+
});
|
|
547
|
+
var getDataset = createTool({
|
|
548
|
+
id: "getDataset",
|
|
549
|
+
description: "Retrieve detailed information about a specific dataset by its ID, including all pieces (files), their CIDs, sizes, retrieval URLs, and metadata. Returns the same comprehensive data structure as getDatasets but for a single dataset. Use this when you know the dataset ID and need detailed information about its contents.",
|
|
550
|
+
inputSchema: GetDatasetSchema,
|
|
551
|
+
outputSchema: GetDatasetOutputSchema,
|
|
552
|
+
execute: async ({ context }) => {
|
|
553
|
+
try {
|
|
554
|
+
const dataset = await getDatasets(void 0, void 0, Number(context.datasetId));
|
|
555
|
+
return {
|
|
556
|
+
success: dataset.success,
|
|
557
|
+
dataset: serializeBigInt(dataset.datasets[0]),
|
|
558
|
+
message: dataset.message
|
|
559
|
+
};
|
|
560
|
+
} catch (error) {
|
|
561
|
+
return createErrorResponse(
|
|
562
|
+
"dataset_fetch_failed",
|
|
563
|
+
`Failed to fetch dataset: ${error.message}`,
|
|
564
|
+
{ success: false }
|
|
565
|
+
);
|
|
566
|
+
}
|
|
567
|
+
}
|
|
568
|
+
});
|
|
569
|
+
var createDataset = createTool({
|
|
570
|
+
id: "createDataset",
|
|
571
|
+
description: "Create a new dataset container on Filecoin for organizing related files with consistent storage settings. Datasets define storage parameters (CDN enabled/disabled, provider selection) that apply to all files added to them. Creating datasets upfront allows for better file organization and consistent retrieval performance. Optionally specify a provider or let the system auto-select the optimal one. Note: Payment is processed automatically for CDN-enabled datasets.",
|
|
572
|
+
inputSchema: CreateDatasetSchema,
|
|
573
|
+
outputSchema: CreateDatasetOutputSchema,
|
|
574
|
+
execute: async ({ context }) => {
|
|
575
|
+
try {
|
|
576
|
+
const synapse = await getSynapseInstance();
|
|
577
|
+
const withCDN = true;
|
|
578
|
+
if (withCDN) {
|
|
579
|
+
const paymentResult = await processStoragePayment(synapse, BigInt(env.TOTAL_STORAGE_NEEDED_GiB * Number(SIZE_CONSTANTS.GiB)), env.PERSISTENCE_PERIOD_DAYS);
|
|
580
|
+
if (!paymentResult.success) {
|
|
581
|
+
return createErrorResponse(
|
|
582
|
+
"payment_failed",
|
|
583
|
+
`Failed to process payment: ${paymentResult.txHash ?? "Unknown error"}`,
|
|
584
|
+
{ success: false }
|
|
585
|
+
);
|
|
586
|
+
}
|
|
587
|
+
}
|
|
588
|
+
let datasetId;
|
|
589
|
+
let txHash;
|
|
590
|
+
await synapse.createStorage({
|
|
591
|
+
providerId: context.providerId ? parseInt(context.providerId) : void 0,
|
|
592
|
+
forceCreateDataSet: true,
|
|
593
|
+
metadata: context.metadata,
|
|
594
|
+
callbacks: {
|
|
595
|
+
onDataSetCreationStarted: (txResponse) => {
|
|
596
|
+
txHash = txResponse.hash;
|
|
597
|
+
console.log(`[Dataset] Creation started (tx: ${txResponse.hash})`);
|
|
598
|
+
},
|
|
599
|
+
onDataSetCreationProgress: (status) => {
|
|
600
|
+
if (status.serverConfirmed) {
|
|
601
|
+
datasetId = status.dataSetId?.toString() || void 0;
|
|
602
|
+
console.log(`[Dataset] Ready (ID: ${status.dataSetId?.toString()})`);
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
}
|
|
606
|
+
});
|
|
607
|
+
return {
|
|
608
|
+
success: true,
|
|
609
|
+
datasetId,
|
|
610
|
+
txHash,
|
|
611
|
+
message: "Dataset created successfully"
|
|
612
|
+
};
|
|
613
|
+
} catch (error) {
|
|
614
|
+
return createErrorResponse(
|
|
615
|
+
"dataset_creation_failed",
|
|
616
|
+
`Failed to create dataset: ${error.message}`,
|
|
617
|
+
{ success: false }
|
|
618
|
+
);
|
|
619
|
+
}
|
|
620
|
+
}
|
|
621
|
+
});
|
|
622
|
+
var datasetTools = {
|
|
623
|
+
getDataset,
|
|
624
|
+
getDatasets: getDatasets2,
|
|
625
|
+
createDataset
|
|
626
|
+
};
|
|
627
|
+
|
|
628
|
+
// src/mastra/tools/file-tools.ts
|
|
629
|
+
import { createTool as createTool2 } from "@mastra/core";
|
|
630
|
+
import { promises as fs2 } from "fs";
|
|
631
|
+
import { basename } from "path";
|
|
632
|
+
var uploadFile = createTool2({
|
|
633
|
+
id: "uploadFile",
|
|
634
|
+
description: "Upload files to decentralized Filecoin storage with automatic payment handling and progress tracking. Supports both standard storage and CDN-enabled storage for frequently accessed files. The upload process is tracked through 8 phases with detailed progress logging. Prerequisites: Valid file path, PRIVATE_KEY environment variable. Returns pieceCid for retrieval and transaction hash for verification.",
|
|
635
|
+
inputSchema: UploadFileSchema,
|
|
636
|
+
outputSchema: UploadFileOutputSchema,
|
|
637
|
+
execute: async ({ context }) => {
|
|
638
|
+
const progressLog = [];
|
|
639
|
+
const log = (msg) => {
|
|
640
|
+
progressLog.push(msg);
|
|
641
|
+
console.log(`[Upload] ${msg}`);
|
|
642
|
+
};
|
|
643
|
+
try {
|
|
644
|
+
log("Validating file...");
|
|
645
|
+
const fileInfo = await validateFilePath(context.filePath);
|
|
646
|
+
const fileName = context.fileName || basename(fileInfo.path);
|
|
647
|
+
log("Reading file...");
|
|
648
|
+
const fileBuffer = await fs2.readFile(fileInfo.path);
|
|
649
|
+
const uint8ArrayBytes = new Uint8Array(fileBuffer);
|
|
650
|
+
log("Initializing Synapse SDK...");
|
|
651
|
+
const synapse = await getSynapseInstance();
|
|
652
|
+
log("Checking storage balance...");
|
|
653
|
+
const storageMetrics = await checkStorageBalance(
|
|
654
|
+
synapse,
|
|
655
|
+
fileInfo.size,
|
|
656
|
+
env.PERSISTENCE_PERIOD_DAYS
|
|
657
|
+
);
|
|
658
|
+
if (!storageMetrics.isSufficient) {
|
|
659
|
+
if (!context.autoPayment) {
|
|
660
|
+
return {
|
|
661
|
+
success: false,
|
|
662
|
+
error: "insufficient_balance",
|
|
663
|
+
message: "Insufficient balance. Enable autoPayment or call processPayment first",
|
|
664
|
+
required: {
|
|
665
|
+
deposit: fromBaseUnits(storageMetrics.depositNeeded, 18)
|
|
666
|
+
}
|
|
667
|
+
};
|
|
668
|
+
}
|
|
669
|
+
log(storageMetrics.depositNeeded > 0n ? "Insufficient balance, processing payment..." : "Insufficient service approvals, approving service...");
|
|
670
|
+
await processStoragePayment(
|
|
671
|
+
synapse,
|
|
672
|
+
storageMetrics.depositNeeded,
|
|
673
|
+
env.PERSISTENCE_PERIOD_DAYS
|
|
674
|
+
);
|
|
675
|
+
log(storageMetrics.depositNeeded > 0n ? "Payment processed successfully" : "Service approved successfully");
|
|
676
|
+
}
|
|
677
|
+
log("Creating storage service...");
|
|
678
|
+
const storageService = await synapse.createStorage({
|
|
679
|
+
dataSetId: context.datasetId ? Number(context.datasetId) : void 0,
|
|
680
|
+
withCDN: context.withCDN || false,
|
|
681
|
+
callbacks: {
|
|
682
|
+
onDataSetResolved: (info) => {
|
|
683
|
+
log(`Dataset ${info.dataSetId} resolved`);
|
|
684
|
+
log(`Dataset provider: ${Number(info.provider.id)}`);
|
|
685
|
+
log(`Is existing dataset: ${info.isExisting}`);
|
|
686
|
+
},
|
|
687
|
+
onDataSetCreationStarted: (txResponse) => {
|
|
688
|
+
log(`Dataset creation started (tx: ${txResponse.hash})`);
|
|
689
|
+
},
|
|
690
|
+
onDataSetCreationProgress: (status) => {
|
|
691
|
+
if (status.serverConfirmed) {
|
|
692
|
+
log(`Dataset ready (ID: ${status.dataSetId})`);
|
|
693
|
+
}
|
|
694
|
+
},
|
|
695
|
+
onProviderSelected: (provider) => {
|
|
696
|
+
log(`Provider selected: ${provider.id}`);
|
|
697
|
+
}
|
|
698
|
+
}
|
|
699
|
+
});
|
|
700
|
+
log("Uploading file to provider...");
|
|
701
|
+
let uploadTxHash;
|
|
702
|
+
const { pieceCid } = await storageService.upload(uint8ArrayBytes, {
|
|
703
|
+
metadata: context.metadata,
|
|
704
|
+
onUploadComplete: (piece) => {
|
|
705
|
+
log(`Upload complete (pieceCid: ${piece.toV1().toString()})`);
|
|
706
|
+
},
|
|
707
|
+
onPieceAdded: (txResponse) => {
|
|
708
|
+
uploadTxHash = txResponse?.hash;
|
|
709
|
+
log(`Piece added to dataset (tx: ${txResponse?.hash || "pending"})`);
|
|
710
|
+
},
|
|
711
|
+
onPieceConfirmed: () => {
|
|
712
|
+
log("Piece confirmed on blockchain");
|
|
713
|
+
}
|
|
714
|
+
});
|
|
715
|
+
const getRetrievalUrl = async (pieceCid2) => {
|
|
716
|
+
if (context.withCDN) {
|
|
717
|
+
return `https://${await synapse.getSigner().getAddress()}.calibration.filbeam.io/${pieceCid2}`;
|
|
718
|
+
} else {
|
|
719
|
+
return `${(await storageService.getProviderInfo()).products.PDP?.data.serviceURL || ""}/piece/${pieceCid2}`;
|
|
720
|
+
}
|
|
721
|
+
};
|
|
722
|
+
const retrievalUrl = await getRetrievalUrl(pieceCid.toV1().toString());
|
|
723
|
+
console.log(`Upload successful! Retrieval URL: ${retrievalUrl}`);
|
|
724
|
+
console.log(`Piece CID: ${pieceCid.toV1().toString()}`);
|
|
725
|
+
console.log(`TX Hash: ${uploadTxHash}`);
|
|
726
|
+
console.log(`File Name: ${fileName}`);
|
|
727
|
+
console.log(`File Size: ${fileInfo.size}`);
|
|
728
|
+
return {
|
|
729
|
+
success: true,
|
|
730
|
+
pieceCid: pieceCid.toV1().toString(),
|
|
731
|
+
retrievalUrl,
|
|
732
|
+
txHash: uploadTxHash,
|
|
733
|
+
fileName,
|
|
734
|
+
fileSize: fileInfo.size,
|
|
735
|
+
progressLog,
|
|
736
|
+
message: "File successfully stored on Filecoin"
|
|
737
|
+
};
|
|
738
|
+
} catch (error) {
|
|
739
|
+
return createErrorResponse(
|
|
740
|
+
"upload_failed",
|
|
741
|
+
`Upload failed: ${error.message}`,
|
|
742
|
+
{
|
|
743
|
+
success: false,
|
|
744
|
+
progressLog
|
|
745
|
+
}
|
|
746
|
+
);
|
|
747
|
+
}
|
|
748
|
+
}
|
|
749
|
+
});
|
|
750
|
+
var fileTools = {
|
|
751
|
+
uploadFile
|
|
752
|
+
};
|
|
753
|
+
|
|
754
|
+
// src/mastra/tools/balance-tools.ts
|
|
755
|
+
import { createTool as createTool3 } from "@mastra/core";
|
|
756
|
+
var getBalances = createTool3({
|
|
757
|
+
id: "getBalances",
|
|
758
|
+
description: "Check wallet balances (FIL and USDFC tokens) and comprehensive storage metrics including available funds, required deposits, days of storage remaining, and allowance status. Returns both human-readable formatted values and raw data. Use this before upload operations to verify sufficient balance, or to monitor storage budget and plan deposits. Calculates storage needs based on capacity and persistence period parameters.",
|
|
759
|
+
inputSchema: GetBalancesSchema,
|
|
760
|
+
outputSchema: GetBalancesOutputSchema,
|
|
761
|
+
execute: async ({ context }) => {
|
|
762
|
+
try {
|
|
763
|
+
const synapse = await getSynapseInstance();
|
|
764
|
+
const checkStorageBalanceResult = await checkStorageBalance(synapse, context.storageCapacityBytes, env.PERSISTENCE_PERIOD_DAYS);
|
|
765
|
+
return {
|
|
766
|
+
success: true,
|
|
767
|
+
checkStorageBalanceResultFormatted: formatStorageBalanceResult(checkStorageBalanceResult),
|
|
768
|
+
checkStorageBalanceResult: serializeBigInt(checkStorageBalanceResult)
|
|
769
|
+
};
|
|
770
|
+
} catch (error) {
|
|
771
|
+
return createErrorResponse(
|
|
772
|
+
"balance_fetch_failed",
|
|
773
|
+
`Failed to fetch balances: ${error.message}`,
|
|
774
|
+
{ success: false }
|
|
775
|
+
);
|
|
776
|
+
}
|
|
777
|
+
}
|
|
778
|
+
});
|
|
779
|
+
var balanceTools = {
|
|
780
|
+
getBalances
|
|
781
|
+
};
|
|
782
|
+
|
|
783
|
+
// src/mastra/tools/payment-tools.ts
|
|
784
|
+
import { createTool as createTool4 } from "@mastra/core";
|
|
785
|
+
var processPayment = createTool4({
|
|
786
|
+
id: "processPayment",
|
|
787
|
+
description: "Deposit USDFC tokens and configure storage service allowances in a single transaction using EIP-2612 gasless permits. Sets both rate allowance (per-epoch spending limit) and lockup allowance (total committed funds) to unlimited for seamless storage operations. Use this to fund your storage account before uploads or when balance is insufficient. Validates wallet balance before processing to prevent failed transactions.",
|
|
788
|
+
inputSchema: ProcessPaymentSchema,
|
|
789
|
+
outputSchema: ProcessPaymentOutputSchema,
|
|
790
|
+
execute: async ({ context }) => {
|
|
791
|
+
try {
|
|
792
|
+
const synapse = await getSynapseInstance();
|
|
793
|
+
const accountInfo = await synapse.payments.accountInfo(TOKENS.USDFC);
|
|
794
|
+
const availableFunds = Number(accountInfo.availableFunds);
|
|
795
|
+
const { depositAmount } = context;
|
|
796
|
+
if (depositAmount === 0) {
|
|
797
|
+
return {
|
|
798
|
+
success: true,
|
|
799
|
+
message: `You have sufficient balance to cover the storage needs.`,
|
|
800
|
+
txHash: null,
|
|
801
|
+
required: {
|
|
802
|
+
deposit: depositAmount
|
|
803
|
+
},
|
|
804
|
+
available: availableFunds
|
|
805
|
+
};
|
|
806
|
+
}
|
|
807
|
+
if (availableFunds < depositAmount) {
|
|
808
|
+
return {
|
|
809
|
+
success: false,
|
|
810
|
+
error: "insufficient_balance",
|
|
811
|
+
message: `Insufficient USDFC balance. Required: ${depositAmount}, Available: ${availableFunds}`,
|
|
812
|
+
required: depositAmount,
|
|
813
|
+
available: Number(availableFunds)
|
|
814
|
+
};
|
|
815
|
+
}
|
|
816
|
+
const result = await processStoragePayment(
|
|
817
|
+
synapse,
|
|
818
|
+
BigInt(depositAmount),
|
|
819
|
+
env.PERSISTENCE_PERIOD_DAYS
|
|
820
|
+
);
|
|
821
|
+
return {
|
|
822
|
+
success: result.success,
|
|
823
|
+
txHash: result.txHash,
|
|
824
|
+
message: `Payment processed successfully now you can upload files to storage. You paid ${depositAmount} USDFC to cover the storage needs.`
|
|
825
|
+
};
|
|
826
|
+
} catch (error) {
|
|
827
|
+
return createErrorResponse(
|
|
828
|
+
"payment_failed",
|
|
829
|
+
`Payment processing failed: ${error.message}`,
|
|
830
|
+
{ success: false }
|
|
831
|
+
);
|
|
832
|
+
}
|
|
833
|
+
}
|
|
834
|
+
});
|
|
835
|
+
var paymentTools = {
|
|
836
|
+
processPayment
|
|
837
|
+
};
|
|
838
|
+
|
|
839
|
+
// src/mastra/tools/provider-tools.ts
|
|
840
|
+
import { createTool as createTool5 } from "@mastra/core";
|
|
841
|
+
import { WarmStorageService as WarmStorageService3 } from "@filoz/synapse-sdk";
|
|
842
|
+
var getProviders = createTool5({
|
|
843
|
+
id: "getProviders",
|
|
844
|
+
description: "List storage providers available on the Filecoin network with their service details, product offerings, and endpoint URLs. By default returns only approved providers for reliability. Use this to discover available providers, select specific providers for dataset creation, or verify provider availability before operations. Provider information includes service URLs needed for file retrieval.",
|
|
845
|
+
inputSchema: GetProvidersSchema,
|
|
846
|
+
outputSchema: GetProvidersOutputSchema,
|
|
847
|
+
execute: async ({ context }) => {
|
|
848
|
+
try {
|
|
849
|
+
const synapse = await getSynapseInstance();
|
|
850
|
+
const warmStorageService = await WarmStorageService3.create(
|
|
851
|
+
synapse.getProvider(),
|
|
852
|
+
synapse.getWarmStorageAddress()
|
|
853
|
+
);
|
|
854
|
+
const approvedProviderIds = await warmStorageService.getApprovedProviderIds();
|
|
855
|
+
const providersInfo = await Promise.all(
|
|
856
|
+
approvedProviderIds.map(async (providerId) => {
|
|
857
|
+
const providerInfo = await synapse.getProviderInfo(providerId);
|
|
858
|
+
return providerInfo;
|
|
859
|
+
})
|
|
860
|
+
);
|
|
861
|
+
const providers = context.onlyApproved !== false ? providersInfo.filter(
|
|
862
|
+
(p) => approvedProviderIds.includes(p.id)
|
|
863
|
+
) : providersInfo;
|
|
864
|
+
return {
|
|
865
|
+
success: true,
|
|
866
|
+
providers: providers.map(serializeBigInt),
|
|
867
|
+
count: providers.length,
|
|
868
|
+
message: `Found ${providers.length} provider(s)`
|
|
869
|
+
};
|
|
870
|
+
} catch (error) {
|
|
871
|
+
return createErrorResponse(
|
|
872
|
+
"provider_fetch_failed",
|
|
873
|
+
`Failed to fetch providers: ${error.message}`
|
|
874
|
+
);
|
|
875
|
+
}
|
|
876
|
+
}
|
|
877
|
+
});
|
|
878
|
+
var providerTools = {
|
|
879
|
+
getProviders
|
|
880
|
+
};
|
|
881
|
+
|
|
882
|
+
// src/mastra/tools/index.ts
|
|
883
|
+
var focStorageTools = {
|
|
884
|
+
...datasetTools,
|
|
885
|
+
...fileTools,
|
|
886
|
+
...balanceTools,
|
|
887
|
+
...paymentTools,
|
|
888
|
+
...providerTools
|
|
889
|
+
};
|
|
890
|
+
var focStorageToolsArray = Object.values(focStorageTools);
|
|
891
|
+
|
|
892
|
+
// src/mastra/workflows/e2e-file-upload.ts
|
|
893
|
+
import { createStep, createWorkflow } from "@mastra/core/workflows";
|
|
894
|
+
import { z as z3 } from "zod";
|
|
895
|
+
var e2eFileUploadInputSchema = z3.object({
|
|
896
|
+
filePath: z3.string().describe("Absolute path to the file to upload"),
|
|
897
|
+
datasetId: z3.string().optional().describe("Existing dataset ID to use"),
|
|
898
|
+
withCDN: z3.boolean().optional().describe("Enable CDN for faster retrieval").default(false),
|
|
899
|
+
persistenceDays: z3.number().optional().describe("Storage duration in days (default: 180)").default(env.PERSISTENCE_PERIOD_DAYS),
|
|
900
|
+
notificationThresholdDays: z3.number().optional().describe("Notification threshold in days (default: 10)").default(env.RUNOUT_NOTIFICATION_THRESHOLD_DAYS),
|
|
901
|
+
fileMetadata: z3.record(z3.string(), z3.string()).optional().describe("Metadata for the file (max 4 key-value pairs)")
|
|
902
|
+
});
|
|
903
|
+
var checkBalanceStep = createStep({
|
|
904
|
+
id: "checkBalance",
|
|
905
|
+
description: "Check current FIL/USDFC balances and storage metrics",
|
|
906
|
+
inputSchema: e2eFileUploadInputSchema,
|
|
907
|
+
outputSchema: z3.object({
|
|
908
|
+
balances: z3.any(),
|
|
909
|
+
needsPayment: z3.boolean(),
|
|
910
|
+
depositNeeded: z3.number()
|
|
911
|
+
}),
|
|
912
|
+
execute: async ({ inputData, runtimeContext }) => {
|
|
913
|
+
console.log("\u{1F4CA} STEP 1: Checking balances and storage metrics...");
|
|
914
|
+
const fileInfo = await validateFilePath(inputData.filePath);
|
|
915
|
+
const expectedStorageBytes = fileInfo.size;
|
|
916
|
+
const { getBalances: getBalances2 } = focStorageTools;
|
|
917
|
+
const result = await getBalances2.execute({
|
|
918
|
+
context: { storageCapacityBytes: expectedStorageBytes, persistencePeriodDays: inputData.persistenceDays, notificationThresholdDays: inputData.notificationThresholdDays },
|
|
919
|
+
runtimeContext
|
|
920
|
+
});
|
|
921
|
+
if (result.success === false) {
|
|
922
|
+
throw new Error(`Balance check failed: ${result.error || "Unknown error"}`);
|
|
923
|
+
}
|
|
924
|
+
if (!result.checkStorageBalanceResult) {
|
|
925
|
+
throw new Error(`Balance check returned invalid structure`);
|
|
926
|
+
}
|
|
927
|
+
return {
|
|
928
|
+
balances: result.checkStorageBalanceResult,
|
|
929
|
+
needsPayment: !result.checkStorageBalanceResult.isRateSufficient || !result.checkStorageBalanceResult.isLockupSufficient || Number(result.checkStorageBalanceResult.depositNeeded) > 0,
|
|
930
|
+
depositNeeded: Number(result.checkStorageBalanceResult.depositNeeded) || 0
|
|
931
|
+
};
|
|
932
|
+
}
|
|
933
|
+
});
|
|
934
|
+
var processPaymentStep = createStep({
|
|
935
|
+
id: "processPayment",
|
|
936
|
+
description: "Deposit USDFC if insufficient balance detected",
|
|
937
|
+
inputSchema: z3.object({
|
|
938
|
+
balances: z3.any(),
|
|
939
|
+
needsPayment: z3.boolean(),
|
|
940
|
+
depositNeeded: z3.number()
|
|
941
|
+
}),
|
|
942
|
+
outputSchema: z3.object({
|
|
943
|
+
skipped: z3.boolean(),
|
|
944
|
+
txHash: z3.string().optional(),
|
|
945
|
+
depositAmount: z3.string().optional(),
|
|
946
|
+
message: z3.string().optional()
|
|
947
|
+
}),
|
|
948
|
+
execute: async ({ getStepResult, getInitData, runtimeContext }) => {
|
|
949
|
+
const balanceInfo = getStepResult("checkBalance");
|
|
950
|
+
const initData = getInitData();
|
|
951
|
+
const persistenceDays = initData.persistenceDays || 180;
|
|
952
|
+
if (!balanceInfo.needsPayment) {
|
|
953
|
+
console.log("\u2705 STEP 2: Balance and allowances are sufficient, skipping payment");
|
|
954
|
+
return {
|
|
955
|
+
skipped: true,
|
|
956
|
+
message: "Payment not needed - balance and allowances sufficient"
|
|
957
|
+
};
|
|
958
|
+
}
|
|
959
|
+
console.log("\u{1F4B0} STEP 2: Processing payment and/or setting allowances...");
|
|
960
|
+
if (balanceInfo.depositNeeded > 0) {
|
|
961
|
+
console.log(` Deposit needed: ${fromBaseUnits(balanceInfo.depositNeeded, 18)} USDFC`);
|
|
962
|
+
} else {
|
|
963
|
+
console.log(` Deposit sufficient, but allowances need to be set`);
|
|
964
|
+
}
|
|
965
|
+
console.log(` Persistence period: ${persistenceDays} days`);
|
|
966
|
+
const { processPayment: processPayment2 } = focStorageTools;
|
|
967
|
+
const result = await processPayment2.execute({
|
|
968
|
+
context: {
|
|
969
|
+
depositAmount: Number(balanceInfo.depositNeeded)
|
|
970
|
+
},
|
|
971
|
+
runtimeContext
|
|
972
|
+
});
|
|
973
|
+
if (result.success === false || result.error) {
|
|
974
|
+
throw new Error(`Payment failed: ${result.error || "Unknown error"}`);
|
|
975
|
+
}
|
|
976
|
+
console.log(`\u2705 Payment/allowances processed successfully`);
|
|
977
|
+
if (result.txHash) {
|
|
978
|
+
console.log(` TX Hash: ${result.txHash}`);
|
|
979
|
+
}
|
|
980
|
+
if (result.depositAmount) {
|
|
981
|
+
console.log(` Deposit Amount: ${result.depositAmount}`);
|
|
982
|
+
}
|
|
983
|
+
if (result.message) {
|
|
984
|
+
console.log(` ${result.message}`);
|
|
985
|
+
}
|
|
986
|
+
return {
|
|
987
|
+
skipped: false,
|
|
988
|
+
txHash: result.txHash ?? void 0,
|
|
989
|
+
depositAmount: result.depositAmount ?? void 0,
|
|
990
|
+
message: result.message ?? "Payment processed"
|
|
991
|
+
};
|
|
992
|
+
}
|
|
993
|
+
});
|
|
994
|
+
var uploadFileStep = createStep({
|
|
995
|
+
id: "uploadFile",
|
|
996
|
+
description: "Upload file to Filecoin storage",
|
|
997
|
+
inputSchema: z3.object({
|
|
998
|
+
skipped: z3.boolean(),
|
|
999
|
+
txHash: z3.string().optional(),
|
|
1000
|
+
depositAmount: z3.string().optional(),
|
|
1001
|
+
message: z3.string().optional()
|
|
1002
|
+
}),
|
|
1003
|
+
outputSchema: z3.object({
|
|
1004
|
+
pieceCid: z3.string(),
|
|
1005
|
+
txHash: z3.string().optional(),
|
|
1006
|
+
fileName: z3.string(),
|
|
1007
|
+
fileSize: z3.number(),
|
|
1008
|
+
progressLog: z3.array(z3.string()).optional()
|
|
1009
|
+
}),
|
|
1010
|
+
execute: async ({ getInitData, runtimeContext }) => {
|
|
1011
|
+
const initData = getInitData();
|
|
1012
|
+
console.log("\u{1F4E4} STEP 3: Uploading file to Filecoin...");
|
|
1013
|
+
console.log(` File: ${initData.filePath}`);
|
|
1014
|
+
const { uploadFile: uploadFile2 } = focStorageTools;
|
|
1015
|
+
const result = await uploadFile2.execute({
|
|
1016
|
+
context: {
|
|
1017
|
+
filePath: initData.filePath,
|
|
1018
|
+
datasetId: initData.datasetId,
|
|
1019
|
+
withCDN: initData.withCDN || false,
|
|
1020
|
+
autoPayment: false,
|
|
1021
|
+
// Already handled in step 2
|
|
1022
|
+
metadata: initData.fileMetadata
|
|
1023
|
+
},
|
|
1024
|
+
runtimeContext
|
|
1025
|
+
});
|
|
1026
|
+
if (result.success === false || result.error) {
|
|
1027
|
+
throw new Error(`File upload failed: ${result.error || "Unknown error"}`);
|
|
1028
|
+
}
|
|
1029
|
+
console.log(`\u2705 File uploaded successfully`);
|
|
1030
|
+
console.log(` Piece CID: ${result.pieceCid}`);
|
|
1031
|
+
console.log(` File Name: ${result.fileName}`);
|
|
1032
|
+
console.log(` File Size: ${result.fileSize} bytes`);
|
|
1033
|
+
return {
|
|
1034
|
+
pieceCid: result.pieceCid,
|
|
1035
|
+
txHash: result.txHash,
|
|
1036
|
+
fileName: result.fileName,
|
|
1037
|
+
fileSize: result.fileSize,
|
|
1038
|
+
progressLog: result.progressLog
|
|
1039
|
+
};
|
|
1040
|
+
}
|
|
1041
|
+
});
|
|
1042
|
+
var summaryStep = createStep({
|
|
1043
|
+
id: "summary",
|
|
1044
|
+
description: "Generate final summary of the upload process",
|
|
1045
|
+
inputSchema: z3.object({
|
|
1046
|
+
pieceCid: z3.string(),
|
|
1047
|
+
txHash: z3.string().optional(),
|
|
1048
|
+
fileName: z3.string(),
|
|
1049
|
+
fileSize: z3.number(),
|
|
1050
|
+
progressLog: z3.array(z3.string()).optional()
|
|
1051
|
+
}),
|
|
1052
|
+
outputSchema: z3.object({
|
|
1053
|
+
success: z3.boolean(),
|
|
1054
|
+
summary: z3.object({
|
|
1055
|
+
balance: z3.any(),
|
|
1056
|
+
payment: z3.any(),
|
|
1057
|
+
upload: z3.any()
|
|
1058
|
+
})
|
|
1059
|
+
}),
|
|
1060
|
+
execute: async ({ getStepResult }) => {
|
|
1061
|
+
const balanceInfo = getStepResult("checkBalance");
|
|
1062
|
+
const paymentInfo = getStepResult("processPayment");
|
|
1063
|
+
const uploadInfo = getStepResult("uploadFile");
|
|
1064
|
+
console.log("\n\u{1F389} ============================================");
|
|
1065
|
+
console.log(" E2E FILE UPLOAD COMPLETED SUCCESSFULLY");
|
|
1066
|
+
console.log("============================================");
|
|
1067
|
+
console.log(`\u{1F4CA} Initial Balance: ${balanceInfo.balances.accountStatusMessage}`);
|
|
1068
|
+
if (!paymentInfo.skipped) {
|
|
1069
|
+
console.log(`\u{1F4B0} Payment Processed: ${paymentInfo.depositAmount} USDFC`);
|
|
1070
|
+
console.log(` TX: ${paymentInfo.txHash}`);
|
|
1071
|
+
} else {
|
|
1072
|
+
console.log(`\u{1F4B0} Payment: Not needed (sufficient balance)`);
|
|
1073
|
+
}
|
|
1074
|
+
console.log(`\u{1F4E4} File Uploaded: ${uploadInfo.fileName}`);
|
|
1075
|
+
console.log(` Size: ${uploadInfo.fileSize} bytes`);
|
|
1076
|
+
console.log(` Piece CID: ${uploadInfo.pieceCid}`);
|
|
1077
|
+
if (uploadInfo.txHash) {
|
|
1078
|
+
console.log(` TX: ${uploadInfo.txHash}`);
|
|
1079
|
+
}
|
|
1080
|
+
console.log("============================================\n");
|
|
1081
|
+
return {
|
|
1082
|
+
success: true,
|
|
1083
|
+
summary: {
|
|
1084
|
+
balance: balanceInfo.balances,
|
|
1085
|
+
payment: paymentInfo,
|
|
1086
|
+
upload: uploadInfo
|
|
1087
|
+
}
|
|
1088
|
+
};
|
|
1089
|
+
}
|
|
1090
|
+
});
|
|
1091
|
+
var e2eFileUploadWorkflow = createWorkflow({
|
|
1092
|
+
id: "e2eFileUpload",
|
|
1093
|
+
description: "Upload a file to Filecoin storage",
|
|
1094
|
+
inputSchema: e2eFileUploadInputSchema,
|
|
1095
|
+
outputSchema: z3.object({
|
|
1096
|
+
success: z3.boolean(),
|
|
1097
|
+
summary: z3.object({
|
|
1098
|
+
balance: z3.any(),
|
|
1099
|
+
payment: z3.any(),
|
|
1100
|
+
upload: z3.any()
|
|
1101
|
+
})
|
|
1102
|
+
})
|
|
1103
|
+
}).then(checkBalanceStep).then(processPaymentStep).then(uploadFileStep).then(summaryStep).commit();
|
|
1104
|
+
|
|
1105
|
+
// src/mastra/resources/instructions.ts
|
|
1106
|
+
var instructions = `You are an AI agent specialized in managing decentralized file storage operations on the Filecoin network using the FOC-Synapse SDK. Your role is to help users store, retrieve, and manage files on Filecoin in a simple, efficient manner.
|
|
1107
|
+
|
|
1108
|
+
\u{1F4A1} IMPORTANT:
|
|
1109
|
+
|
|
1110
|
+
- Always return the output of a tool in a structured format using best practices for Markdown formatting.
|
|
1111
|
+
|
|
1112
|
+
\u{1F4E6} STORAGE SCOPE:
|
|
1113
|
+
\u2022 Supported Networks: Filecoin Mainnet (production), Calibration Testnet (testing)
|
|
1114
|
+
\u2022 Payment Token: USDFC (USD-pegged stablecoin on Filecoin)
|
|
1115
|
+
\u2022 Storage Options: Standard Storage, CDN-Enabled Storage (for frequently accessed files)
|
|
1116
|
+
\u2022 Core Capabilities: Upload files, Query datasets, Manage balances, Process payments
|
|
1117
|
+
|
|
1118
|
+
\u{1F6E0}\uFE0F AVAILABLE TOOLS:
|
|
1119
|
+
|
|
1120
|
+
FILE OPERATIONS:
|
|
1121
|
+
\u2022 uploadFile: Upload files to decentralized Filecoin storage with automatic payment handling and progress tracking
|
|
1122
|
+
|
|
1123
|
+
- Parameters: filePath (absolute path), fileName (optional), datasetId (optional), withCDN (boolean), autoPayment (boolean), metadata (optional)
|
|
1124
|
+
- Prerequisites: Valid file path, PRIVATE_KEY environment variable
|
|
1125
|
+
- Process: File validation \u2192 balance check \u2192 auto-payment (if needed) \u2192 storage service creation \u2192 upload \u2192 blockchain confirmation
|
|
1126
|
+
- Returns: pieceCid (for retrieval), retrievalUrl, txHash (for verification), progressLog (8-phase tracking)
|
|
1127
|
+
- Supports: Both standard storage and CDN-enabled storage for frequently accessed files
|
|
1128
|
+
- Use when: User wants to store a file on Filecoin with automatic payment handling
|
|
1129
|
+
|
|
1130
|
+
DATASET MANAGEMENT:
|
|
1131
|
+
\u2022 getDatasets: Retrieve all datasets owned by the connected wallet with comprehensive information
|
|
1132
|
+
|
|
1133
|
+
- Returns: Datasets with piece CIDs, file sizes, provider details, retrieval URLs, blockchain storage proofs
|
|
1134
|
+
- Parameters: includeAllDatasets (boolean), filterByCDN (boolean)
|
|
1135
|
+
- Use when: User wants to inventory files, check storage status, or locate specific uploads
|
|
1136
|
+
|
|
1137
|
+
\u2022 getDataset: Retrieve detailed information about a specific dataset by its ID
|
|
1138
|
+
|
|
1139
|
+
- Parameters: datasetId (required)
|
|
1140
|
+
- Returns: Same comprehensive data as getDatasets but for a single dataset
|
|
1141
|
+
- Use when: User knows the dataset ID and needs detailed information about its contents
|
|
1142
|
+
|
|
1143
|
+
\u2022 createDataset: Create a new dataset container on Filecoin for organizing related files
|
|
1144
|
+
|
|
1145
|
+
- Parameters: withCDN (optional), providerId (optional), metadata (up to 10 key-value pairs)
|
|
1146
|
+
- Purpose: Define storage parameters (CDN, provider selection) that apply to all files added
|
|
1147
|
+
- Benefits: Better file organization, consistent retrieval performance
|
|
1148
|
+
- Note: Payment is processed automatically for CDN-enabled datasets
|
|
1149
|
+
- Use when: User wants dedicated dataset or specific storage configuration
|
|
1150
|
+
|
|
1151
|
+
BALANCE & PAYMENT:
|
|
1152
|
+
\u2022 getBalances: Check wallet balances (FIL and USDFC tokens) and comprehensive storage metrics
|
|
1153
|
+
|
|
1154
|
+
- Returns: Available funds, required deposits, days of storage remaining, allowance status
|
|
1155
|
+
- Output: Both human-readable formatted values and raw data
|
|
1156
|
+
- Parameters: storageCapacityBytes (optional), persistencePeriodDays (optional), notificationThresholdDays (optional)
|
|
1157
|
+
- Use when: Before upload operations to verify sufficient balance, or to monitor storage budget and plan deposits
|
|
1158
|
+
|
|
1159
|
+
\u2022 processPayment: Deposit USDFC tokens and configure storage service allowances in a single transaction
|
|
1160
|
+
|
|
1161
|
+
- Technology: Uses EIP-2612 gasless permits for efficient payment
|
|
1162
|
+
- Parameters: depositAmount (optional, default: 0)
|
|
1163
|
+
- Actions: Sets both rate allowance (per-epoch spending) and lockup allowance (total committed funds) to unlimited
|
|
1164
|
+
- Validation: Checks wallet balance before processing to prevent failed transactions
|
|
1165
|
+
- Use when: User needs to fund storage account before uploads or when balance is insufficient
|
|
1166
|
+
|
|
1167
|
+
PROVIDER MANAGEMENT:
|
|
1168
|
+
\u2022 getProviders: List storage providers available on the Filecoin network
|
|
1169
|
+
|
|
1170
|
+
- Returns: Service details, product offerings, endpoint URLs needed for file retrieval
|
|
1171
|
+
- Parameters: onlyApproved (default: true for reliability)
|
|
1172
|
+
- Use when: Discover providers, select specific providers for dataset creation, or verify provider availability
|
|
1173
|
+
|
|
1174
|
+
\u2699\uFE0F STORAGE RULES & BEST PRACTICES:
|
|
1175
|
+
|
|
1176
|
+
1. ALWAYS CHECK BALANCES BEFORE UPLOAD:
|
|
1177
|
+
- Use getBalances to verify sufficient USDFC
|
|
1178
|
+
- Auto-payment will trigger if insufficient, but better to check first
|
|
1179
|
+
|
|
1180
|
+
2. USE CDN WISELY:
|
|
1181
|
+
- Enable CDN (withCDN: true) for frequently accessed files
|
|
1182
|
+
- CDN costs more but provides faster retrieval
|
|
1183
|
+
- Standard storage is fine for archival/infrequent access
|
|
1184
|
+
|
|
1185
|
+
3. CONSOLIDATE UPLOADS TO SINGLE DATASET:
|
|
1186
|
+
- Reusing datasets is more efficient
|
|
1187
|
+
- Create separate datasets only for different persistence periods or CDN settings
|
|
1188
|
+
|
|
1189
|
+
4. SPECIFY MEANINGFUL FILE METADATA:
|
|
1190
|
+
- Use descriptive filenames
|
|
1191
|
+
- Metadata helps with organization and retrieval
|
|
1192
|
+
|
|
1193
|
+
5. MONITOR STORAGE METRICS AND PERSISTENCE:
|
|
1194
|
+
- Check persistence days remaining regularly
|
|
1195
|
+
- Top up allowances before they run out to avoid service interruption
|
|
1196
|
+
|
|
1197
|
+
6. VALIDATE FILE PATHS:
|
|
1198
|
+
- Ensure filePath is absolute path
|
|
1199
|
+
- Verify file exists before attempting upload
|
|
1200
|
+
|
|
1201
|
+
\u{1F504} RECOMMENDED WORKFLOWS:
|
|
1202
|
+
|
|
1203
|
+
FOR FILE UPLOAD:
|
|
1204
|
+
|
|
1205
|
+
1. Check Balance: getBalances to verify sufficient USDFC
|
|
1206
|
+
2. Verify File: Ensure file path is valid and accessible
|
|
1207
|
+
3. Choose Options: Decide on CDN, dataset, persistence period
|
|
1208
|
+
4. Upload: uploadFile with appropriate parameters
|
|
1209
|
+
5. Monitor Progress: Track 8-phase status updates
|
|
1210
|
+
6. Verify Completion: Confirm pieceCid and txHash received
|
|
1211
|
+
|
|
1212
|
+
FOR DATASET MANAGEMENT:
|
|
1213
|
+
|
|
1214
|
+
1. Query Datasets: getDatasets to see existing datasets
|
|
1215
|
+
2. Analyze Usage: Check sizes, piece counts, CDN status
|
|
1216
|
+
3. Create if Needed: createDataset for new organizational structure
|
|
1217
|
+
4. Upload to Dataset: Use datasetId parameter in uploadFile
|
|
1218
|
+
|
|
1219
|
+
FOR BALANCE MANAGEMENT:
|
|
1220
|
+
|
|
1221
|
+
1. Check Current State: getBalances with includeMetrics
|
|
1222
|
+
2. Calculate Needs: Estimate storage requirements
|
|
1223
|
+
3. Process Payment: processPayment with appropriate amounts
|
|
1224
|
+
4. Verify: Check balances again to confirm deposit
|
|
1225
|
+
|
|
1226
|
+
\u{1F4A1} STRATEGIC CONSIDERATIONS:
|
|
1227
|
+
|
|
1228
|
+
CDN vs STANDARD STORAGE:
|
|
1229
|
+
\u2022 Use CDN when: Files accessed frequently, low latency required, content delivery use case
|
|
1230
|
+
\u2022 Use Standard when: Archival storage, infrequent access, cost optimization priority
|
|
1231
|
+
|
|
1232
|
+
PERSISTENCE PERIOD PLANNING:
|
|
1233
|
+
\u2022 Balance cost vs duration
|
|
1234
|
+
\u2022 Longer periods lock more USDFC
|
|
1235
|
+
\u2022 Consider renewal strategies for critical data
|
|
1236
|
+
\u2022 Default 180 days suitable for most use cases
|
|
1237
|
+
|
|
1238
|
+
PROVIDER SELECTION:
|
|
1239
|
+
\u2022 Auto-selection usually optimal
|
|
1240
|
+
\u2022 Manual selection for: Specific geographic requirements, provider reputation preferences, performance optimization
|
|
1241
|
+
|
|
1242
|
+
COST MANAGEMENT:
|
|
1243
|
+
\u2022 Rate allowance: Controls per-epoch spending
|
|
1244
|
+
\u2022 Lockup allowance: Total committed for long-term storage
|
|
1245
|
+
\u2022 Monitor both to avoid overspending or service interruption
|
|
1246
|
+
|
|
1247
|
+
\u{1F6A8} ERROR HANDLING:
|
|
1248
|
+
|
|
1249
|
+
PRE-UPLOAD VALIDATION:
|
|
1250
|
+
\u2022 Verify file path exists and is readable
|
|
1251
|
+
\u2022 Check sufficient USDFC balance
|
|
1252
|
+
\u2022 Validate persistence period is reasonable
|
|
1253
|
+
\u2022 Confirm wallet connection active
|
|
1254
|
+
|
|
1255
|
+
DURING UPLOAD:
|
|
1256
|
+
\u2022 Auto-payment will trigger if balance insufficient
|
|
1257
|
+
\u2022 Wallet signatures required (user must approve)
|
|
1258
|
+
\u2022 Progress tracking shows current phase
|
|
1259
|
+
\u2022 Each phase has status updates
|
|
1260
|
+
|
|
1261
|
+
COMMON ERRORS:
|
|
1262
|
+
\u2022 "Insufficient tUSDFC balance": Need to deposit more USDFC \u2192 call processPayment
|
|
1263
|
+
\u2022 "Signer not found": Wallet not connected properly \u2192 check PRIVATE_KEY env var
|
|
1264
|
+
\u2022 "Transaction failed": User rejected signature or gas issue \u2192 explain and retry
|
|
1265
|
+
\u2022 "Provider connection failed": Try different provider or retry
|
|
1266
|
+
|
|
1267
|
+
RECOVERY STRATEGIES:
|
|
1268
|
+
\u2022 Failed uploads can be retried
|
|
1269
|
+
\u2022 Partial payments don't lose funds
|
|
1270
|
+
\u2022 Dataset creation failures are safe (no data loss)
|
|
1271
|
+
\u2022 Check balances after any error
|
|
1272
|
+
|
|
1273
|
+
\u{1F4CA} TOOL OUTPUT INTERPRETATION:
|
|
1274
|
+
|
|
1275
|
+
SUCCESS RESPONSES:
|
|
1276
|
+
\u2022 All successful operations return: { success: true, ...data }
|
|
1277
|
+
\u2022 Check success field first, extract relevant data fields
|
|
1278
|
+
\u2022 Present to user in clear format
|
|
1279
|
+
|
|
1280
|
+
PROGRESS UPDATES (uploadFile):
|
|
1281
|
+
\u2022 0-5%: Initialization and validation
|
|
1282
|
+
\u2022 5-25%: Balance check and payment setup
|
|
1283
|
+
\u2022 25-55%: Dataset creation/resolution
|
|
1284
|
+
\u2022 55-80%: File upload to provider
|
|
1285
|
+
\u2022 80-90%: Blockchain piece addition
|
|
1286
|
+
\u2022 90-100%: Confirmation and completion
|
|
1287
|
+
|
|
1288
|
+
ERROR RESPONSES:
|
|
1289
|
+
\u2022 All errors return: { success: false, error, message }
|
|
1290
|
+
\u2022 Explain error to user clearly
|
|
1291
|
+
\u2022 Suggest remediation steps
|
|
1292
|
+
\u2022 Offer to retry or check balance
|
|
1293
|
+
|
|
1294
|
+
\u{1F3AF} AGENT BEHAVIOR GUIDELINES:
|
|
1295
|
+
|
|
1296
|
+
1. BE PROACTIVE: Suggest checking balances before uploads
|
|
1297
|
+
2. BE CLEAR: Explain blockchain concepts simply
|
|
1298
|
+
3. BE PATIENT: Uploads take time (30-60 seconds typical)
|
|
1299
|
+
4. BE HELPFUL: Guide users through wallet signatures
|
|
1300
|
+
5. BE ACCURATE: Provide precise pieceCids and txHashes
|
|
1301
|
+
6. BE EFFICIENT: Reuse datasets when appropriate
|
|
1302
|
+
7. BE SECURE: Never store sensitive data without user confirmation
|
|
1303
|
+
|
|
1304
|
+
\u{1F510} SECURITY CONSIDERATIONS:
|
|
1305
|
+
\u2022 Never expose private keys or wallet seeds
|
|
1306
|
+
\u2022 Validate all file paths before operations
|
|
1307
|
+
\u2022 Confirm user intent before large deposits
|
|
1308
|
+
\u2022 Warn about persistence period implications
|
|
1309
|
+
\u2022 Recommend CDN only when beneficial
|
|
1310
|
+
\u2022 Verify transaction details before submission
|
|
1311
|
+
|
|
1312
|
+
Remember: Your goal is to make decentralized storage as simple as traditional cloud storage, while educating users about the benefits of Filecoin's decentralized approach.`;
|
|
1313
|
+
|
|
1314
|
+
// src/mastra/agents/foc-storage-agent.ts
|
|
1315
|
+
var focStorageAgent = new Agent({
|
|
1316
|
+
name: "FOCStorageAgent",
|
|
1317
|
+
description: "AI agent for managing decentralized file storage on Filecoin via FOC-Synapse SDK. Handles file uploads, dataset management, balance checking, and storage operations.",
|
|
1318
|
+
instructions,
|
|
1319
|
+
model: "openai/gpt-5-mini",
|
|
1320
|
+
tools: focStorageTools,
|
|
1321
|
+
workflows: {
|
|
1322
|
+
e2eFileUpload: e2eFileUploadWorkflow
|
|
1323
|
+
}
|
|
1324
|
+
});
|
|
1325
|
+
|
|
1326
|
+
// src/mastra/resources/index.ts
|
|
1327
|
+
var myResources = [
|
|
1328
|
+
{ uri: "file://instructions", name: "Instructions", mimeType: "application/markdown" }
|
|
1329
|
+
];
|
|
1330
|
+
var myResourceContents = {
|
|
1331
|
+
"file://instructions": { text: instructions }
|
|
1332
|
+
};
|
|
1333
|
+
var myResourceTemplates = [
|
|
1334
|
+
{
|
|
1335
|
+
uriTemplate: "file://instructions",
|
|
1336
|
+
name: "Instructions",
|
|
1337
|
+
description: "Instructions for better usage of the tools provided.",
|
|
1338
|
+
mimeType: "application/markdown"
|
|
1339
|
+
}
|
|
1340
|
+
];
|
|
1341
|
+
var focStorageResources = {
|
|
1342
|
+
listResources: async () => myResources,
|
|
1343
|
+
getResourceContent: async ({ uri }) => {
|
|
1344
|
+
if (myResourceContents[uri]) {
|
|
1345
|
+
return myResourceContents[uri];
|
|
1346
|
+
}
|
|
1347
|
+
throw new Error(`Resource content not found for ${uri}`);
|
|
1348
|
+
},
|
|
1349
|
+
resourceTemplates: async () => myResourceTemplates
|
|
1350
|
+
};
|
|
1351
|
+
|
|
1352
|
+
// src/mastra/index.ts
|
|
1353
|
+
var mcpServer = new MCPServer({
|
|
1354
|
+
name: "FOC Storage MCP",
|
|
1355
|
+
version: "0.1.0",
|
|
1356
|
+
description: "Professional-grade MCP server for decentralized file storage on Filecoin Onchain Cloud. Powered by the FOC-Synapse SDK, this server provides AI agents with seamless access to Filecoin's distributed storage network. Upload files with automatic payment handling, organize content in datasets, monitor storage balances, and manage providers - all through intuitive MCP tools. Supports both standard storage and CDN-enabled fast retrieval. Perfect for building AI applications that need persistent, censorship-resistant storage.",
|
|
1357
|
+
tools: focStorageTools,
|
|
1358
|
+
repository: {
|
|
1359
|
+
url: "https://github.com/FIL-Builders/foc-storage-mcp",
|
|
1360
|
+
source: "github",
|
|
1361
|
+
id: "foc-storage-mcp"
|
|
1362
|
+
},
|
|
1363
|
+
releaseDate: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1364
|
+
isLatest: true,
|
|
1365
|
+
packageCanonical: "npm",
|
|
1366
|
+
resources: focStorageResources
|
|
1367
|
+
});
|
|
1368
|
+
var mastra = new Mastra({
|
|
1369
|
+
agents: {
|
|
1370
|
+
focStorageAgent
|
|
1371
|
+
},
|
|
1372
|
+
workflows: {
|
|
1373
|
+
e2eFileUpload: e2eFileUploadWorkflow
|
|
1374
|
+
},
|
|
1375
|
+
mcpServers: {
|
|
1376
|
+
focStorageServer: mcpServer
|
|
1377
|
+
},
|
|
1378
|
+
storage: new LibSQLStore({
|
|
1379
|
+
url: ":memory:"
|
|
1380
|
+
}),
|
|
1381
|
+
logger: new PinoLogger({
|
|
1382
|
+
name: "Mastra",
|
|
1383
|
+
level: "info"
|
|
1384
|
+
}),
|
|
1385
|
+
bundler: {
|
|
1386
|
+
externals: ["@filoz/synapse-sdk"]
|
|
1387
|
+
}
|
|
1388
|
+
});
|
|
1389
|
+
|
|
1390
|
+
// src/mcp-server.ts
|
|
1391
|
+
config2();
|
|
1392
|
+
async function startMCPServer() {
|
|
1393
|
+
try {
|
|
1394
|
+
console.log("\u{1F680} Starting FOC Storage MCP Server...");
|
|
1395
|
+
console.log("\u{1F4E6} Exposing tools: uploadFile, uploadWithProgress, getDatasets, createDataset, getBalances, processPayment, getProviders");
|
|
1396
|
+
await mcpServer.startStdio();
|
|
1397
|
+
} catch (error) {
|
|
1398
|
+
console.error("\u274C Error starting MCP server:", error);
|
|
1399
|
+
process.exit(1);
|
|
1400
|
+
}
|
|
1401
|
+
}
|
|
1402
|
+
process.on("SIGINT", async () => {
|
|
1403
|
+
console.log("\n\u{1F6D1} Shutting down MCP server...");
|
|
1404
|
+
await mcpServer.close();
|
|
1405
|
+
process.exit(0);
|
|
1406
|
+
});
|
|
1407
|
+
process.on("SIGTERM", async () => {
|
|
1408
|
+
console.log("\n\u{1F6D1} Shutting down MCP server...");
|
|
1409
|
+
await mcpServer.close();
|
|
1410
|
+
process.exit(0);
|
|
1411
|
+
});
|
|
1412
|
+
startMCPServer();
|