@merkl/api 0.10.352 → 0.10.354

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/dist/database/api/.generated/edge.js +10 -4
  2. package/dist/database/api/.generated/index-browser.js +7 -1
  3. package/dist/database/api/.generated/index.d.ts +7 -1
  4. package/dist/database/api/.generated/index.js +10 -4
  5. package/dist/database/api/.generated/package.json +1 -1
  6. package/dist/database/api/.generated/schema.prisma +6 -0
  7. package/dist/database/api/.generated/wasm.js +7 -1
  8. package/dist/src/eden/index.d.ts +251 -22
  9. package/dist/src/index.d.ts +89 -6
  10. package/dist/src/jobs/breakdowns.d.ts +1 -0
  11. package/dist/src/jobs/breakdowns.js +55 -0
  12. package/dist/src/jobs/etl/pendings.d.ts +1 -0
  13. package/dist/src/jobs/etl/pendings.js +74 -0
  14. package/dist/src/jobs/etl/reward-breakdowns.d.ts +1 -0
  15. package/dist/src/jobs/etl/reward-breakdowns.js +82 -0
  16. package/dist/src/jobs/etl/rewards.d.ts +1 -0
  17. package/dist/src/jobs/etl/rewards.js +102 -0
  18. package/dist/src/jobs/rewards.d.ts +1 -0
  19. package/dist/src/jobs/rewards.js +74 -0
  20. package/dist/src/libs/campaigns/campaignTypes/ERC20SubTypes/processor/GenericProcessor.d.ts +4 -0
  21. package/dist/src/libs/campaigns/campaignTypes/ERC20SubTypes/processor/GenericProcessor.js +2 -1
  22. package/dist/src/modules/v4/bucket/bucket.service.d.ts +4 -1
  23. package/dist/src/modules/v4/bucket/bucket.service.js +83 -2
  24. package/dist/src/modules/v4/campaign/campaign.controller.d.ts +2 -2
  25. package/dist/src/modules/v4/campaign/campaign.model.d.ts +3 -0
  26. package/dist/src/modules/v4/dynamicData/dynamicData.controller.d.ts +72 -0
  27. package/dist/src/modules/v4/dynamicData/dynamicData.controller.js +16 -1
  28. package/dist/src/modules/v4/dynamicData/dynamicData.model.d.ts +101 -0
  29. package/dist/src/modules/v4/dynamicData/dynamicData.model.js +47 -0
  30. package/dist/src/modules/v4/dynamicData/dynamicData.service.d.ts +13 -1
  31. package/dist/src/modules/v4/dynamicData/dynamicData.service.js +52 -1
  32. package/dist/src/modules/v4/opportunity/opportunity.controller.d.ts +3 -3
  33. package/dist/src/modules/v4/opportunity/opportunity.model.d.ts +6 -0
  34. package/dist/src/modules/v4/opportunity/opportunity.service.d.ts +2 -2
  35. package/dist/src/modules/v4/opportunity/opportunity.service.js +4 -0
  36. package/dist/src/modules/v4/opportunity/subservices/getHyperdriveMetadata.service.d.ts +3 -0
  37. package/dist/src/modules/v4/opportunity/subservices/getHyperdriveMetadata.service.js +15 -0
  38. package/dist/src/modules/v4/programPayload/programPayload.controller.d.ts +12 -1
  39. package/dist/src/modules/v4/programPayload/programPayload.controller.js +3 -2
  40. package/dist/src/modules/v4/programPayload/programPayload.repository.d.ts +4 -1
  41. package/dist/src/modules/v4/programPayload/programPayload.repository.js +101 -16
  42. package/dist/src/modules/v4/programPayload/programPayload.service.d.ts +4 -4
  43. package/dist/src/modules/v4/programPayload/programPayload.service.js +30 -13
  44. package/dist/src/modules/v4/protocol/protocol.model.d.ts +1 -1
  45. package/dist/src/modules/v4/protocol/protocol.model.js +1 -0
  46. package/dist/src/modules/v4/router.d.ts +89 -6
  47. package/dist/tsconfig.package.tsbuildinfo +1 -1
  48. package/package.json +4 -2
@@ -0,0 +1,55 @@
1
+ import { CampaignService, TokenService } from "../modules/v4";
2
+ import { BucketService } from "../modules/v4/bucket/bucket.service";
3
+ import { RewardService } from "../modules/v4/reward";
4
+ import { log } from "../utils/logger";
5
+ import { apiDbClient } from "../utils/prisma";
6
+ import moment from "moment";
7
+ // @dev Trigger at the end of the rewards job
8
+ export const writeBreakdownsToAPI = async (chainId, root) => {
9
+ const start = moment().unix();
10
+ const environmentName = process.env.ENV === "staging" ? "staging" : "production";
11
+ const breakdownBatches = [[]];
12
+ const breakdownBatchPromises = [];
13
+ let currentBreakdownBatchIndex = 0;
14
+ const pushBreakdowns = async (index) => {
15
+ log.local(`pushing breakdown batch ${index} (${breakdownBatches[index].length} points) to API DB`);
16
+ // Upsert users in case they don't exist yet
17
+ const breakdownsToCreate = [];
18
+ for (const b of breakdownBatches[index]) {
19
+ const campaignId = CampaignService.hashId({ distributionChain: chainId, campaignId: b.campaignId });
20
+ const rewardTokenId = TokenService.hashId({ chainId, address: b.token });
21
+ const rewardId = RewardService.hashId(root, b.recipient, rewardTokenId);
22
+ breakdownsToCreate.push({
23
+ rewardId,
24
+ protocolId: b.protocolId ? b.protocolId : undefined,
25
+ campaignId,
26
+ reason: b.reason ? b.reason : "",
27
+ amount: b.amount,
28
+ claimed: b.claimed,
29
+ pending: b.pending,
30
+ auxiliaryData1: b.auxiliaryData1 ?? "",
31
+ auxiliaryData2: b.auxiliaryData2 ?? "",
32
+ });
33
+ }
34
+ return (await apiDbClient.rewardBreakdown.createMany({
35
+ data: breakdownsToCreate,
36
+ skipDuplicates: true, // To avoid revert if data already exists
37
+ })).count;
38
+ };
39
+ await BucketService.readStreamFromBucket(`breakdowns/${chainId}-${root}.gz`, `merkl-rewards-lake-${environmentName}`, `merkl-data-${environmentName}`, async (x) => {
40
+ const breakdowns = JSON.parse(x);
41
+ for (const b of breakdowns) {
42
+ breakdownBatches[currentBreakdownBatchIndex].push(b);
43
+ if (breakdownBatches[currentBreakdownBatchIndex].length >= 30_000) {
44
+ breakdownBatchPromises.push(pushBreakdowns(currentBreakdownBatchIndex));
45
+ breakdownBatches.push([]);
46
+ currentBreakdownBatchIndex++;
47
+ }
48
+ }
49
+ return;
50
+ });
51
+ // Final batch
52
+ breakdownBatchPromises.push(pushBreakdowns(currentBreakdownBatchIndex));
53
+ const breakdownsCreated = (await Promise.all(breakdownBatchPromises)).reduce((acc, x) => acc + x, 0);
54
+ log.info(`✅ Successfully created ${breakdownsCreated} breakdowns in ${moment().unix() - start}sec`);
55
+ };
@@ -0,0 +1 @@
1
+ export declare const main: () => Promise<void>;
@@ -0,0 +1,74 @@
1
+ // ─── Pendings ETL ─────────────────────────────────────────────────────────────
2
+ if (!process.env.ENV || !process.env.FILENAME)
3
+ throw new Error("[ENV]: missing variable");
4
+ import { BucketService } from "../../modules/v4/bucket/bucket.service";
5
+ import { RewardService } from "../../modules/v4/reward";
6
+ import moment from "moment";
7
+ import { log } from "../../utils/logger";
8
+ // ─── Constants ───────────────────────────────────────────────
9
+ const BATCH_SIZE = 20_000;
10
+ // ─── Global Variables ────────────────────────────────────────
11
+ const [chainIdString, root, campaignId] = process.env.FILENAME.split("_");
12
+ const chainId = Number.parseInt(chainIdString);
13
+ const pendingsToCreate = [[]];
14
+ const failedBatches = [];
15
+ // ─── Extract ─────────────────────────────────────────────────────────────────
16
+ const extract = async () => {
17
+ let count = 0;
18
+ let currentBatchIndex = 0;
19
+ await BucketService.readStreamFromBucket(`pendings/${process.env.FILENAME}.gz`, `merkl-rewards-lake-${process.env.ENV}`, `merkl-data-${process.env.ENV}`, async (x) => {
20
+ pendingsToCreate[currentBatchIndex].push(transform(JSON.parse(x)));
21
+ if (pendingsToCreate[currentBatchIndex].length >= BATCH_SIZE) {
22
+ try {
23
+ count += await load(pendingsToCreate[currentBatchIndex]);
24
+ log.info(`Successfully inserted a batch of ${count} rewards`);
25
+ }
26
+ catch (err) {
27
+ console.error(`Failed to insert a batch, adding it to the fail queue.\n${err}`);
28
+ failedBatches.push(currentBatchIndex);
29
+ }
30
+ currentBatchIndex++;
31
+ pendingsToCreate.push([]);
32
+ }
33
+ return;
34
+ });
35
+ // ─── Current Batch Not In DB Yet ─────────────────────────────────────
36
+ try {
37
+ const count = await load(pendingsToCreate[currentBatchIndex]);
38
+ if (count !== 0)
39
+ log.info(`Successfully inserted a batch of ${count} rewards`);
40
+ }
41
+ catch (err) {
42
+ console.error(`Failed to insert a batch, adding it to the fail queue.\n${err}`);
43
+ failedBatches.push(currentBatchIndex);
44
+ }
45
+ return count;
46
+ };
47
+ // ─── Transform ───────────────────────────────────────────────────────────────
48
+ const transform = (pending) => {
49
+ return pending;
50
+ };
51
+ // ─── Load ────────────────────────────────────────────────────────────────────
52
+ const load = async (pendings) => {
53
+ const { updated, created } = await RewardService.updatePendings({
54
+ distributionChainId: chainId,
55
+ rewardToken: pendings[0].rewardToken,
56
+ campaignId,
57
+ root,
58
+ data: pendings,
59
+ });
60
+ return updated + created;
61
+ };
62
+ // ─────────────────────────────────────────────────────────────────────────────
63
+ export const main = async () => {
64
+ const start = moment().unix();
65
+ // ─── Start Rewards ETL ───────────────────────────────────────────────
66
+ const count = await extract();
67
+ log.info(`✅ Successfully created ${count} new records in ${moment().unix() - start} sec`);
68
+ if (failedBatches.length !== 0)
69
+ log.info(`${failedBatches.length}/${pendingsToCreate.length} batches failed.`);
70
+ if (failedBatches.length === 0) {
71
+ await BucketService.deleteFile(`rewards/${process.env.FILENAME}.gz`, `merkl-rewards-lake-${process.env.ENV}`, `merkl-data-${process.env.ENV}`);
72
+ }
73
+ };
74
+ main();
@@ -0,0 +1 @@
1
+ export declare const main: () => Promise<void>;
@@ -0,0 +1,82 @@
1
+ // ─── Reward Breakdowns ETL ───────────────────────────────────────────────────
2
+ if (!process.env.ENV || !process.env.CHAIN_ID || !process.env.ROOT)
3
+ throw new Error("[ENV]: missing variable");
4
+ import { BucketService } from "../../modules/v4/bucket/bucket.service";
5
+ import { log } from "../../utils/logger";
6
+ import { apiDbClient } from "../../utils/prisma";
7
+ import moment from "moment";
8
+ // ─── Constants ───────────────────────────────────────────────
9
+ const BATCH_SIZE = 30_000;
10
+ // ─── Global Variables ────────────────────────────────────────
11
+ const rewardBreakdownsToCreate = [[]];
12
+ const failedBatches = [];
13
+ // ─── Extract ─────────────────────────────────────────────────────────────────
14
+ const extract = async () => {
15
+ let currentBatchIndex = 0;
16
+ let count = 0;
17
+ await BucketService.readStreamFromBucket(`breakdowns/${process.env.CHAIN_ID}-${process.env.ROOT}.gz`, `merkl-rewards-lake-${process.env.ENV}`, `merkl-data-${process.env.ENV}`, async (x) => {
18
+ const breakdowns = JSON.parse(x);
19
+ for (const breakdown of breakdowns) {
20
+ rewardBreakdownsToCreate[currentBatchIndex].push(transform(breakdown));
21
+ if (rewardBreakdownsToCreate[currentBatchIndex].length >= BATCH_SIZE) {
22
+ try {
23
+ count += await load(rewardBreakdownsToCreate[currentBatchIndex]);
24
+ }
25
+ catch (err) {
26
+ console.error(`Failed to insert a batch, adding it to the fail queue.\n${err}`);
27
+ failedBatches.push(currentBatchIndex);
28
+ }
29
+ currentBatchIndex++;
30
+ rewardBreakdownsToCreate.push([]);
31
+ }
32
+ }
33
+ return;
34
+ });
35
+ // ─── Current Batch Not In DB Yet ─────────────────────────────────────
36
+ try {
37
+ const count = await load(rewardBreakdownsToCreate[currentBatchIndex]);
38
+ if (count !== 0)
39
+ log.info(`Successfully inserted a batch of ${count} rewards`);
40
+ }
41
+ catch (err) {
42
+ console.error(`Failed to insert a batch, adding it to the fail queue.\n${err}`);
43
+ failedBatches.push(currentBatchIndex);
44
+ }
45
+ return count;
46
+ };
47
+ // ─── Transform ───────────────────────────────────────────────────────────────
48
+ const transform = (rewardBreakdown) => {
49
+ const campaignId = Bun.hash(`${process.env.CHAIN_ID}${rewardBreakdown.campaignId}`).toString();
50
+ const rewardTokenId = Bun.hash(`${process.env.CHAIN_ID}${rewardBreakdown.token}`).toString();
51
+ const rewardId = Bun.hash(`${process.env.ROOT}${rewardBreakdown.recipient}${rewardTokenId}`).toString();
52
+ return {
53
+ rewardId,
54
+ protocolId: rewardBreakdown.protocolId ? rewardBreakdown.protocolId : undefined,
55
+ campaignId,
56
+ reason: rewardBreakdown.reason ? rewardBreakdown.reason : "",
57
+ amount: rewardBreakdown.amount,
58
+ claimed: rewardBreakdown.claimed,
59
+ pending: rewardBreakdown.pending,
60
+ auxiliaryData1: rewardBreakdown.auxiliaryData1 ?? "",
61
+ auxiliaryData2: rewardBreakdown.auxiliaryData2 ?? "",
62
+ };
63
+ };
64
+ // ─── Load ────────────────────────────────────────────────────────────────────
65
+ const load = async (rewardBreakdowns) => {
66
+ return (await apiDbClient.rewardBreakdown.createMany({
67
+ data: rewardBreakdowns,
68
+ skipDuplicates: true,
69
+ })).count;
70
+ };
71
+ // ─── Main ────────────────────────────────────────────────────────────────────
72
+ export const main = async () => {
73
+ const start = moment().unix();
74
+ const count = await extract();
75
+ log.info(`✅ Successfully created ${count} new records in ${moment().unix() - start} sec`);
76
+ if (failedBatches.length !== 0)
77
+ log.info(`${failedBatches.length}/${rewardBreakdownsToCreate.length} batches failed.`);
78
+ if (failedBatches.length === 0) {
79
+ await BucketService.deleteFile(`rewards/${process.env.CHAIN_ID}-${process.env.ROOT}.gz`, `merkl-rewards-lake-${process.env.ENV}`, `merkl-data-${process.env.ENV}`);
80
+ }
81
+ };
82
+ main();
@@ -0,0 +1 @@
1
+ export declare const main: () => Promise<void>;
@@ -0,0 +1,102 @@
1
+ // ─── Rewards ETL ─────────────────────────────────────────────────────────────
2
+ if (!process.env.ENV || !process.env.CHAIN_ID || !process.env.ROOT)
3
+ throw new Error("[ENV]: missing variable");
4
+ import { BucketService } from "../../modules/v4/bucket/bucket.service";
5
+ import { log } from "../../utils/logger";
6
+ import { apiDbClient } from "../../utils/prisma";
7
+ import moment from "moment";
8
+ // ─── Constants ───────────────────────────────────────────────
9
+ const BATCH_SIZE = 20_000;
10
+ // ─── Global Variables ────────────────────────────────────────
11
+ const rewardsToCreate = [[]];
12
+ const failedBatches = [];
13
+ // ─── Extract ─────────────────────────────────────────────────────────────────
14
+ const extract = async () => {
15
+ let count = 0;
16
+ let currentBatchIndex = 0;
17
+ await BucketService.readStreamFromBucket(`rewards/${process.env.CHAIN_ID}-${process.env.ROOT}.gz`, `merkl-rewards-lake-${process.env.ENV}`, `merkl-data-${process.env.ENV}`, async (x) => {
18
+ rewardsToCreate[currentBatchIndex].push(transform(JSON.parse(x)));
19
+ if (rewardsToCreate[currentBatchIndex].length >= BATCH_SIZE) {
20
+ try {
21
+ count += await load(rewardsToCreate[currentBatchIndex]);
22
+ log.info(`Successfully inserted a batch of ${count} rewards`);
23
+ }
24
+ catch (err) {
25
+ console.error(`Failed to insert a batch, adding it to the fail queue.\n${err}`);
26
+ failedBatches.push(currentBatchIndex);
27
+ }
28
+ currentBatchIndex++;
29
+ rewardsToCreate.push([]);
30
+ }
31
+ return;
32
+ });
33
+ // ─── Current Batch Not In DB Yet ─────────────────────────────────────
34
+ try {
35
+ const count = await load(rewardsToCreate[currentBatchIndex]);
36
+ if (count !== 0)
37
+ log.info(`Successfully inserted a batch of ${count} rewards`);
38
+ }
39
+ catch (err) {
40
+ console.error(`Failed to insert a batch, adding it to the fail queue.\n${err}`);
41
+ failedBatches.push(currentBatchIndex);
42
+ }
43
+ return count;
44
+ };
45
+ // ─── Transform ───────────────────────────────────────────────────────────────
46
+ const transform = (reward) => {
47
+ const rewardTokenId = Bun.hash(`${process.env.CHAIN_ID}${reward.rewardToken}`).toString();
48
+ const id = Bun.hash(`${process.env.ROOT}${reward.recipient}${rewardTokenId}`).toString();
49
+ return {
50
+ id,
51
+ root: reward.root,
52
+ amount: reward.amount,
53
+ pending: reward.pending,
54
+ claimed: reward.claimed,
55
+ recipient: reward.recipient,
56
+ rewardTokenId,
57
+ proofs: reward.proofs,
58
+ };
59
+ };
60
+ // ─── Load ────────────────────────────────────────────────────────────────────
61
+ const load = async (rewards) => {
62
+ // ─── Load Users ──────────────────────────────────────────────────────
63
+ await apiDbClient.user.createMany({
64
+ data: rewards.map(r => {
65
+ return {
66
+ address: r.recipient,
67
+ };
68
+ }),
69
+ skipDuplicates: true,
70
+ });
71
+ // ─── Load Rewards ────────────────────────────────────────────────────
72
+ return (await apiDbClient.reward.createMany({
73
+ data: rewards,
74
+ skipDuplicates: true,
75
+ })).count;
76
+ };
77
+ // ─────────────────────────────────────────────────────────────────────────────
78
+ export const main = async () => {
79
+ const start = moment().unix();
80
+ // ─── Create Merkle Root If Not Exists ────────────────────────────────
81
+ await apiDbClient.merklRoot.upsert({
82
+ create: {
83
+ root: process.env.ROOT,
84
+ chainId: +process.env.CHAIN_ID,
85
+ epoch: Math.floor(start / 3_600),
86
+ timestamp: start,
87
+ },
88
+ update: {},
89
+ where: {
90
+ root: process.env.ROOT,
91
+ },
92
+ });
93
+ // ─── Start Rewards ETL ───────────────────────────────────────────────
94
+ const count = await extract();
95
+ log.info(`✅ Successfully created ${count} new records in ${moment().unix() - start} sec`);
96
+ if (failedBatches.length !== 0)
97
+ log.info(`${failedBatches.length}/${rewardsToCreate.length} batches failed.`);
98
+ if (failedBatches.length === 0) {
99
+ await BucketService.deleteFile(`rewards/${process.env.CHAIN_ID}-${process.env.ROOT}.gz`, `merkl-rewards-lake-${process.env.ENV}`, `merkl-data-${process.env.ENV}`);
100
+ }
101
+ };
102
+ main();
@@ -0,0 +1 @@
1
+ export declare const writeRewardsToAPI: (chainId: number, root: string) => Promise<void>;
@@ -0,0 +1,74 @@
1
+ import { TokenService } from "../modules/v4";
2
+ import { BucketService } from "../modules/v4/bucket/bucket.service";
3
+ import { RewardService } from "../modules/v4/reward";
4
+ import { log } from "../utils/logger";
5
+ import { apiDbClient } from "../utils/prisma";
6
+ import { HOUR } from "@sdk";
7
+ import moment from "moment";
8
+ import { writeBreakdownsToAPI } from "./breakdowns";
9
+ export const writeRewardsToAPI = async (chainId, root) => {
10
+ const start = moment().unix();
11
+ const environmentName = process.env.ENV === "staging" ? "staging" : "production";
12
+ const now = moment().unix();
13
+ // Upsert root
14
+ await apiDbClient.merklRoot.upsert({
15
+ create: {
16
+ root,
17
+ chainId,
18
+ epoch: Math.floor(now / HOUR),
19
+ timestamp: now,
20
+ },
21
+ update: {},
22
+ where: {
23
+ root,
24
+ },
25
+ });
26
+ // Read reward stream and populate the table
27
+ const rewardBatches = [[]];
28
+ const rewardBatchPromises = [];
29
+ let currentRewardBatchIndex = 0;
30
+ const pushRewards = async (index) => {
31
+ log.local(`pushing rewards batch ${index} (${rewardBatches[index].length} points) to API DB`);
32
+ // Upsert users in case they don't exist yet
33
+ await apiDbClient.user.createMany({
34
+ data: rewardBatches[index].map(r => {
35
+ return {
36
+ address: r.recipient,
37
+ };
38
+ }),
39
+ skipDuplicates: true,
40
+ });
41
+ const rewardsToCreate = rewardBatches[index].map(reward => {
42
+ const rewardTokenId = TokenService.hashId({ chainId, address: reward.rewardToken });
43
+ const id = RewardService.hashId(root, reward.recipient, rewardTokenId);
44
+ return {
45
+ id,
46
+ root: reward.root,
47
+ amount: reward.amount,
48
+ pending: reward.pending,
49
+ claimed: reward.claimed,
50
+ recipient: reward.recipient,
51
+ rewardTokenId,
52
+ proofs: reward.proofs,
53
+ };
54
+ });
55
+ return (await apiDbClient.reward.createMany({
56
+ data: rewardsToCreate,
57
+ skipDuplicates: true, // To avoid revert if data already exists
58
+ })).count;
59
+ };
60
+ await BucketService.readStreamFromBucket(`rewards/${chainId}-${root}.gz`, `merkl-rewards-lake-${environmentName}`, `merkl-data-${environmentName}`, async (x) => {
61
+ rewardBatches[currentRewardBatchIndex].push(JSON.parse(x));
62
+ if (rewardBatches[currentRewardBatchIndex].length >= 20_000) {
63
+ rewardBatchPromises.push(pushRewards(currentRewardBatchIndex));
64
+ currentRewardBatchIndex++;
65
+ rewardBatches.push([]);
66
+ }
67
+ return;
68
+ });
69
+ // Final batch
70
+ rewardBatchPromises.push(pushRewards(currentRewardBatchIndex));
71
+ const rewardsCreated = (await Promise.all(rewardBatchPromises)).reduce((acc, x) => acc + x, 0);
72
+ log.info(`✅ Successfully created ${rewardsCreated} rewards in ${moment().unix() - start}sec`);
73
+ await writeBreakdownsToAPI(chainId, root);
74
+ };
@@ -18,18 +18,22 @@ export type callKeys = mandatoryCallKeys & {
18
18
  [key: string]: string;
19
19
  };
20
20
  export type mandatoryCallKeys = {
21
+ type: tokenType;
22
+ protocol: string;
21
23
  tokenAddress: string;
22
24
  blacklistedSupply: string;
23
25
  totalSupply: string;
24
26
  whitelistedSupply: string;
25
27
  };
26
28
  export type dataRaw = {
29
+ protocol: string;
27
30
  tokenAddress: string;
28
31
  blacklistedSupply: string;
29
32
  totalSupply: string;
30
33
  whitelistedSupply: string;
31
34
  };
32
35
  export type dataType = {
36
+ protocol: string;
33
37
  type: tokenType;
34
38
  tokenAddress: string;
35
39
  totalSupply: number;
@@ -39,6 +39,7 @@ export class GenericProcessor {
39
39
  const priceTargetToken = (await pricer.get({ symbol: campaign.campaignParameters.symbolTargetToken })) ?? 0;
40
40
  const tvl = whitelistedSupplyTargetToken * priceTargetToken;
41
41
  return {
42
+ protocol: typeInfo.protocol,
42
43
  type,
43
44
  whitelistedSupplyTargetToken,
44
45
  totalSupply,
@@ -49,7 +50,7 @@ export class GenericProcessor {
49
50
  };
50
51
  }
51
52
  computeRound1(type, typeInfo) {
52
- typeInfo = { ...tokenTypeToProtocol[type], ...typeInfo };
53
+ typeInfo = { type, ...tokenTypeToProtocol[type], ...typeInfo };
53
54
  this.processingRound1(typeInfo);
54
55
  if (this.debug) {
55
56
  console.log("Round 1", {
@@ -1,5 +1,8 @@
1
1
  import type { SaveData } from "node_modules/@google-cloud/storage/build/esm/src/file";
2
2
  export declare abstract class BucketService {
3
- private static storage;
4
3
  static upload(bucketName: string, filename: string, data: SaveData, isPublic: boolean): Promise<string>;
4
+ static writeStreamToBucket<T>(data: T[], fileName: string, bucketName: string, projectID?: string, hook?: (data: T) => string, isPublic?: boolean, withLog?: boolean): Promise<void>;
5
+ static readStreamFromBucket<T>(fileName: string, bucketName: string, projectID?: string, hook?: (line: string) => T, withLog?: boolean): Promise<T[]>;
6
+ static deleteFile(fileName: string, bucketName: string, projectID: string): Promise<void>;
7
+ static exists(fileName: string, bucketName: string, projectID: string): Promise<boolean>;
5
8
  }
@@ -1,11 +1,92 @@
1
+ import { log } from "../../../utils/logger";
1
2
  import { Storage } from "@google-cloud/storage";
2
3
  export class BucketService {
3
- static storage = new Storage({ projectId: "angle-production-1" });
4
4
  static async upload(bucketName, filename, data, isPublic) {
5
- const file = await BucketService.storage.bucket(bucketName).file(filename);
5
+ const storage = new Storage({ projectId: "angle-production-1" });
6
+ const file = await storage.bucket(bucketName).file(filename);
6
7
  await file.save(data);
7
8
  if (isPublic)
8
9
  await file.makePublic();
9
10
  return `https://storage.googleapis.com/${bucketName}/${filename}`;
10
11
  }
12
+ static async writeStreamToBucket(data, fileName, bucketName, projectID, hook = JSON.stringify, isPublic = false, withLog = true) {
13
+ const storage = new Storage({ projectId: projectID });
14
+ const bucket = storage.bucket(bucketName);
15
+ const file = bucket.file(fileName);
16
+ const [exist] = await file.exists();
17
+ if (exist) {
18
+ await file.delete();
19
+ }
20
+ await file.save("", { resumable: false });
21
+ await file.setMetadata({
22
+ cacheControl: "public, max-age=300",
23
+ });
24
+ if (isPublic) {
25
+ await file.makePublic();
26
+ }
27
+ const writeStream = file.createWriteStream({
28
+ resumable: false,
29
+ gzip: true, // Enable gzip compression
30
+ });
31
+ const streamPromise = new Promise((resolve, reject) => {
32
+ writeStream.on("error", reject);
33
+ writeStream.on("finish", resolve);
34
+ });
35
+ for (const d of data) {
36
+ await writeStream.write(`${hook(d)}\n`);
37
+ }
38
+ await writeStream.end();
39
+ await streamPromise;
40
+ withLog && log.local(`✅ successfully updated https://storage.cloud.google.com/${bucketName}/${fileName}`);
41
+ }
42
+ static async readStreamFromBucket(fileName, bucketName, projectID, hook = line => JSON.parse(line), withLog = true) {
43
+ const storage = new Storage({ projectId: projectID });
44
+ const bucket = storage.bucket(bucketName);
45
+ const file = bucket.file(fileName);
46
+ const [exists] = await file.exists();
47
+ if (!exists) {
48
+ throw new Error(`File ${fileName} does not exist in bucket ${bucketName}`);
49
+ }
50
+ return new Promise((resolve, reject) => {
51
+ const data = [];
52
+ let buffer = "";
53
+ file
54
+ .createReadStream({
55
+ decompress: true,
56
+ })
57
+ .on("data", async (chunk) => {
58
+ // instead of using callback functions here, we could use pipe streams
59
+ buffer += chunk.toString();
60
+ const lines = buffer.split("\n");
61
+ buffer = lines.pop() || "";
62
+ for (const line of lines) {
63
+ if (line.trim()) {
64
+ const res = hook(line);
65
+ data.push(res);
66
+ }
67
+ }
68
+ })
69
+ .on("error", reject)
70
+ .on("end", async () => {
71
+ if (buffer.trim()) {
72
+ const res = hook(buffer);
73
+ data.push(res);
74
+ }
75
+ withLog &&
76
+ log.local(`✅ Successfully read ${data.length} items from https://storage.cloud.google.com/${bucketName}/${fileName}`);
77
+ resolve(data);
78
+ });
79
+ });
80
+ }
81
+ static async deleteFile(fileName, bucketName, projectID) {
82
+ const storage = new Storage({ projectId: projectID });
83
+ const bucket = storage.bucket(bucketName);
84
+ const file = bucket.file(fileName);
85
+ await file.delete();
86
+ }
87
+ static async exists(fileName, bucketName, projectID) {
88
+ const storage = new Storage({ projectId: projectID });
89
+ const [exists] = await storage.bucket(bucketName).file(fileName).exists();
90
+ return exists;
91
+ }
11
92
  }
@@ -112,7 +112,7 @@ export declare const CampaignController: Elysia<"/campaigns", false, {
112
112
  body: unknown;
113
113
  params: {};
114
114
  query: {
115
- type?: "INVALID" | "ERC20" | "CLAMM" | "ERC20_SNAPSHOT" | "JSON_AIRDROP" | "SILO" | "RADIANT" | "MORPHO" | "DOLOMITE" | "BADGER" | "COMPOUND" | "AJNA" | "EULER" | "UNISWAP_V4" | "ION" | "EIGENLAYER" | "ERC20TRANSFERS" | "ERC20LOGPROCESSOR" | "ERC20REBASELOGPROCESSOR" | "VEST" | undefined;
115
+ type?: "INVALID" | "ERC20" | "CLAMM" | "ERC20_SNAPSHOT" | "JSON_AIRDROP" | "SILO" | "RADIANT" | "MORPHO" | "DOLOMITE" | "BADGER" | "COMPOUND" | "AJNA" | "EULER" | "UNISWAP_V4" | "ION" | "EIGENLAYER" | "ERC20TRANSFERS" | "ERC20LOGPROCESSOR" | "ERC20REBASELOGPROCESSOR" | "VEST" | "ERC20_FIX_APR" | "HYPERDRIVELOGPROCESSOR" | "HYPERDRIVELOGFIXPROCESSOR" | undefined;
116
116
  items?: number | undefined;
117
117
  subType?: number | undefined;
118
118
  page?: number | undefined;
@@ -184,7 +184,7 @@ export declare const CampaignController: Elysia<"/campaigns", false, {
184
184
  body: unknown;
185
185
  params: {};
186
186
  query: {
187
- type?: "INVALID" | "ERC20" | "CLAMM" | "ERC20_SNAPSHOT" | "JSON_AIRDROP" | "SILO" | "RADIANT" | "MORPHO" | "DOLOMITE" | "BADGER" | "COMPOUND" | "AJNA" | "EULER" | "UNISWAP_V4" | "ION" | "EIGENLAYER" | "ERC20TRANSFERS" | "ERC20LOGPROCESSOR" | "ERC20REBASELOGPROCESSOR" | "VEST" | undefined;
187
+ type?: "INVALID" | "ERC20" | "CLAMM" | "ERC20_SNAPSHOT" | "JSON_AIRDROP" | "SILO" | "RADIANT" | "MORPHO" | "DOLOMITE" | "BADGER" | "COMPOUND" | "AJNA" | "EULER" | "UNISWAP_V4" | "ION" | "EIGENLAYER" | "ERC20TRANSFERS" | "ERC20LOGPROCESSOR" | "ERC20REBASELOGPROCESSOR" | "VEST" | "ERC20_FIX_APR" | "HYPERDRIVELOGPROCESSOR" | "HYPERDRIVELOGFIXPROCESSOR" | undefined;
188
188
  items?: number | undefined;
189
189
  subType?: number | undefined;
190
190
  page?: number | undefined;
@@ -141,6 +141,9 @@ export declare const GetCampaignQueryDto: import("@sinclair/typebox").TObject<{
141
141
  ERC20LOGPROCESSOR: "ERC20LOGPROCESSOR";
142
142
  ERC20REBASELOGPROCESSOR: "ERC20REBASELOGPROCESSOR";
143
143
  VEST: "VEST";
144
+ ERC20_FIX_APR: "ERC20_FIX_APR";
145
+ HYPERDRIVELOGPROCESSOR: "HYPERDRIVELOGPROCESSOR";
146
+ HYPERDRIVELOGFIXPROCESSOR: "HYPERDRIVELOGFIXPROCESSOR";
144
147
  }>>;
145
148
  subType: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TNumber>;
146
149
  campaignId: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TString>;