payment-kit 1.25.7 → 1.25.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/api/src/crons/index.ts +24 -0
- package/api/src/libs/archive/config.ts +254 -0
- package/api/src/libs/archive/executor.ts +729 -0
- package/api/src/libs/archive/index.ts +7 -0
- package/api/src/libs/archive/lock.ts +50 -0
- package/api/src/libs/archive/policy.ts +55 -0
- package/api/src/libs/archive/query.ts +136 -0
- package/api/src/libs/archive/snapshot.ts +291 -0
- package/api/src/libs/archive/store.ts +200 -0
- package/api/src/libs/credit-grant.ts +133 -0
- package/api/src/queues/archive.ts +32 -0
- package/api/src/routes/archive.ts +176 -0
- package/api/src/routes/credit-grants.ts +57 -4
- package/api/src/routes/index.ts +2 -0
- package/api/src/routes/payment-stats.ts +167 -20
- package/api/src/store/migrations/20260129-add-grantor-did-index.ts +52 -0
- package/api/src/store/migrations/20260203-archive.ts +12 -0
- package/api/src/store/migrations/20260204-revenue-snapshot.ts +19 -0
- package/api/src/store/models/archive-lock.ts +55 -0
- package/api/src/store/models/archive-metadata.ts +132 -0
- package/api/src/store/models/index.ts +9 -0
- package/api/src/store/models/revenue-snapshot.ts +110 -0
- package/api/tests/libs/archive-config.spec.ts +185 -0
- package/api/tests/libs/archive-executor.spec.ts +678 -0
- package/api/tests/libs/archive-lock.spec.ts +130 -0
- package/api/tests/libs/archive-policy.spec.ts +255 -0
- package/api/tests/libs/archive-query.spec.ts +267 -0
- package/api/tests/libs/archive-store.spec.ts +159 -0
- package/api/tests/libs/credit-grant.spec.ts +184 -0
- package/blocklet.prefs.json +187 -0
- package/blocklet.yml +1 -1
- package/package.json +10 -10
- package/src/locales/en.tsx +4 -0
- package/src/locales/zh.tsx +4 -0
- package/src/pages/admin/overview.tsx +2 -0
- package/vite.config.ts +1 -0
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
|
|
4
|
+
import config from '@blocklet/sdk/lib/config';
|
|
5
|
+
import { Sequelize, DataTypes } from 'sequelize';
|
|
6
|
+
|
|
7
|
+
import logger from '../logger';
|
|
8
|
+
|
|
9
|
+
export function getArchiveDir(): string {
|
|
10
|
+
const dataDir = config.env?.dataDir || '/tmp';
|
|
11
|
+
const archiveDir = path.join(dataDir, 'archive');
|
|
12
|
+
if (!fs.existsSync(archiveDir)) {
|
|
13
|
+
fs.mkdirSync(archiveDir, { recursive: true });
|
|
14
|
+
}
|
|
15
|
+
return archiveDir;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export function getArchiveFilePath(fileName: string): string {
|
|
19
|
+
return path.join(getArchiveDir(), fileName);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Get archive file path for a specific year based on record's created_at.
|
|
24
|
+
* Archive files are organized by data year (e.g., archive-2024.db contains 2024 data).
|
|
25
|
+
*/
|
|
26
|
+
export function getArchiveFilePathForYear(year: number): string {
|
|
27
|
+
return path.join(getArchiveDir(), `archive-${year}.db`);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Extract year from a record's created_at (fallback to updated_at, then current year).
|
|
32
|
+
*/
|
|
33
|
+
export function getRecordYear(record: any): number {
|
|
34
|
+
const createdAt = record?.created_at;
|
|
35
|
+
const updatedAt = record?.updated_at;
|
|
36
|
+
const dateValue = createdAt || updatedAt;
|
|
37
|
+
|
|
38
|
+
if (dateValue) {
|
|
39
|
+
const date = dateValue instanceof Date ? dateValue : new Date(dateValue);
|
|
40
|
+
if (!Number.isNaN(date.getTime())) {
|
|
41
|
+
return date.getFullYear();
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
return new Date().getFullYear();
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export function openArchiveSequelize(filePath: string): Sequelize {
|
|
49
|
+
return new Sequelize({
|
|
50
|
+
dialect: 'sqlite',
|
|
51
|
+
storage: filePath,
|
|
52
|
+
logging: false,
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
function normalizeCreateTableSql(sql: string): string {
|
|
57
|
+
return sql.replace(/^CREATE TABLE\s+/i, 'CREATE TABLE IF NOT EXISTS ');
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
function normalizeCreateIndexSql(sql: string): string {
|
|
61
|
+
return sql
|
|
62
|
+
.replace(/^CREATE INDEX\s+/i, 'CREATE INDEX IF NOT EXISTS ')
|
|
63
|
+
.replace(/^CREATE UNIQUE INDEX\s+/i, 'CREATE UNIQUE INDEX IF NOT EXISTS ');
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
type ColumnInfo = { name: string; type: string; notnull: number; dflt_value: any; pk: number };
|
|
67
|
+
|
|
68
|
+
export async function ensureArchiveTable(
|
|
69
|
+
tableName: string,
|
|
70
|
+
mainSequelize: Sequelize,
|
|
71
|
+
archiveSequelize: Sequelize
|
|
72
|
+
): Promise<void> {
|
|
73
|
+
// 1. Copy table schema (CREATE TABLE IF NOT EXISTS)
|
|
74
|
+
const [tableRows] = await mainSequelize.query('SELECT sql FROM sqlite_master WHERE type = :type AND name = :name', {
|
|
75
|
+
replacements: { type: 'table', name: tableName },
|
|
76
|
+
});
|
|
77
|
+
const tableRow = (Array.isArray(tableRows) ? tableRows[0] : null) as { sql?: string } | null;
|
|
78
|
+
const createSql = tableRow?.sql;
|
|
79
|
+
if (!createSql) {
|
|
80
|
+
throw new Error(`table schema not found: ${tableName}`);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const normalizedSql = normalizeCreateTableSql(createSql);
|
|
84
|
+
await archiveSequelize.query(normalizedSql);
|
|
85
|
+
|
|
86
|
+
// 2. Copy indexes (PRIMARY KEY auto-indexes have sql=NULL, skip them)
|
|
87
|
+
const [indexRows] = await mainSequelize.query(
|
|
88
|
+
'SELECT sql FROM sqlite_master WHERE type = :type AND tbl_name = :tableName AND sql IS NOT NULL',
|
|
89
|
+
{ replacements: { type: 'index', tableName } }
|
|
90
|
+
);
|
|
91
|
+
for (const indexRow of indexRows as { sql?: string }[]) {
|
|
92
|
+
if (indexRow?.sql) {
|
|
93
|
+
try {
|
|
94
|
+
const indexSql = normalizeCreateIndexSql(indexRow.sql);
|
|
95
|
+
// eslint-disable-next-line no-await-in-loop
|
|
96
|
+
await archiveSequelize.query(indexSql);
|
|
97
|
+
} catch (err: any) {
|
|
98
|
+
// Ignore "index already exists" errors
|
|
99
|
+
if (!err.message?.includes('already exists')) {
|
|
100
|
+
logger.warn('failed to create archive index', { tableName, error: err.message });
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// 3. Sync missing columns from main DB to archive DB
|
|
107
|
+
// This handles schema migrations: if main DB has new columns, add them to archive DB
|
|
108
|
+
const [mainColumns] = await mainSequelize.query(`PRAGMA table_info("${tableName}")`);
|
|
109
|
+
const [archiveColumns] = await archiveSequelize.query(`PRAGMA table_info("${tableName}")`);
|
|
110
|
+
|
|
111
|
+
const mainColumnNames = new Set((mainColumns as ColumnInfo[]).map((c) => c.name));
|
|
112
|
+
const archiveColumnNames = new Set((archiveColumns as ColumnInfo[]).map((c) => c.name));
|
|
113
|
+
|
|
114
|
+
// Find columns in main DB but not in archive DB
|
|
115
|
+
const missingColumns = (mainColumns as ColumnInfo[]).filter((c) => !archiveColumnNames.has(c.name));
|
|
116
|
+
|
|
117
|
+
for (const col of missingColumns) {
|
|
118
|
+
try {
|
|
119
|
+
// SQLite ALTER TABLE ADD COLUMN only supports columns with default values or NULL
|
|
120
|
+
// For NOT NULL columns without defaults, we need to add them as nullable
|
|
121
|
+
const nullable = col.notnull === 0 || col.dflt_value !== null;
|
|
122
|
+
const defaultClause = col.dflt_value !== null ? ` DEFAULT ${col.dflt_value}` : '';
|
|
123
|
+
|
|
124
|
+
// eslint-disable-next-line no-await-in-loop
|
|
125
|
+
await archiveSequelize.query(
|
|
126
|
+
`ALTER TABLE "${tableName}" ADD COLUMN "${col.name}" ${col.type}${nullable ? '' : ' NULL'}${defaultClause}`
|
|
127
|
+
);
|
|
128
|
+
logger.info('added missing column to archive table', { tableName, column: col.name });
|
|
129
|
+
} catch (err: any) {
|
|
130
|
+
// Ignore "duplicate column" errors (race condition protection)
|
|
131
|
+
if (!err.message?.includes('duplicate column')) {
|
|
132
|
+
logger.warn('failed to add missing column to archive table', {
|
|
133
|
+
tableName,
|
|
134
|
+
column: col.name,
|
|
135
|
+
error: err.message,
|
|
136
|
+
});
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// 4. Add archived_at column if not exists
|
|
142
|
+
if (!archiveColumnNames.has('archived_at') && !mainColumnNames.has('archived_at')) {
|
|
143
|
+
const queryInterface = archiveSequelize.getQueryInterface();
|
|
144
|
+
await queryInterface.addColumn(tableName, 'archived_at', {
|
|
145
|
+
type: DataTypes.DATE,
|
|
146
|
+
allowNull: false,
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
export function listArchiveFiles(): string[] {
|
|
152
|
+
const archiveDir = getArchiveDir();
|
|
153
|
+
const files = fs.readdirSync(archiveDir).filter((name) => name.endsWith('.db'));
|
|
154
|
+
files.sort();
|
|
155
|
+
return files.map((name) => path.join(archiveDir, name));
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
export function getFileSize(filePath: string): number {
|
|
159
|
+
try {
|
|
160
|
+
return fs.statSync(filePath).size;
|
|
161
|
+
} catch (error) {
|
|
162
|
+
logger.warn('stat archive file failed', { filePath, error });
|
|
163
|
+
return 0;
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
/**
|
|
168
|
+
* Remove oldest archive files when exceeding maxFiles limit.
|
|
169
|
+
* Archive files are organized by year (archive-YYYY.db), so maxFiles effectively means
|
|
170
|
+
* "keep N years of archived data" (default 10 years).
|
|
171
|
+
*
|
|
172
|
+
* This is part of the data retention policy: archive files older than maxFiles years
|
|
173
|
+
* are considered disposable history. The archived data has already served its purpose
|
|
174
|
+
* (query window, compliance retention period). If longer retention is needed,
|
|
175
|
+
* external backup should be configured before enabling archive cleanup.
|
|
176
|
+
*
|
|
177
|
+
* Compliance note: this deletion is a deliberate product decision — archived data beyond
|
|
178
|
+
* the retention window (default 10 years) is treated as expired. Operators requiring
|
|
179
|
+
* longer audit trails should configure external backup before enabling data retention.
|
|
180
|
+
*/
|
|
181
|
+
export function cleanupOldArchiveFiles(maxFiles: number): string[] {
|
|
182
|
+
const files = listArchiveFiles();
|
|
183
|
+
if (files.length <= maxFiles) {
|
|
184
|
+
return [];
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
// Files are sorted ascending by name (oldest first), remove the oldest ones
|
|
188
|
+
const toRemove = files.slice(0, files.length - maxFiles);
|
|
189
|
+
const removed: string[] = [];
|
|
190
|
+
for (const filePath of toRemove) {
|
|
191
|
+
try {
|
|
192
|
+
fs.unlinkSync(filePath);
|
|
193
|
+
removed.push(path.basename(filePath));
|
|
194
|
+
logger.info('removed old archive file', { filePath });
|
|
195
|
+
} catch (error) {
|
|
196
|
+
logger.warn('failed to remove old archive file', { filePath, error });
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
return removed;
|
|
200
|
+
}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { BN } from '@ocap/util';
|
|
2
|
+
import { Op } from 'sequelize';
|
|
2
3
|
|
|
3
4
|
import { CreditGrant, Customer, PaymentCurrency, Subscription } from '../store/models';
|
|
4
5
|
import { formatMetadata } from './util';
|
|
@@ -145,3 +146,135 @@ export function calculateExpiresAt(validDurationValue: number, validDurationUnit
|
|
|
145
146
|
|
|
146
147
|
return expiresAt.unix();
|
|
147
148
|
}
|
|
149
|
+
|
|
150
|
+
/**
|
|
151
|
+
* Get credit grant statistics with flexible filtering
|
|
152
|
+
*/
|
|
153
|
+
export async function getCreditGrantStats(params: {
|
|
154
|
+
grantedBy?: string;
|
|
155
|
+
category?: 'paid' | 'promotional';
|
|
156
|
+
currencyId: string;
|
|
157
|
+
startDate: number;
|
|
158
|
+
endDate: number;
|
|
159
|
+
timezoneOffset?: number;
|
|
160
|
+
}) {
|
|
161
|
+
const { grantedBy, category, currencyId, startDate, endDate, timezoneOffset } = params;
|
|
162
|
+
const offset = typeof timezoneOffset === 'number' ? timezoneOffset : 0;
|
|
163
|
+
|
|
164
|
+
// Fetch currency once at the start (since currencyId is required, results will only contain this currency)
|
|
165
|
+
const currency = await PaymentCurrency.findByPk(currencyId, {
|
|
166
|
+
attributes: ['id', 'name', 'symbol', 'decimal'],
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
if (!currency) {
|
|
170
|
+
return {
|
|
171
|
+
stats: {
|
|
172
|
+
currency_id: currencyId,
|
|
173
|
+
currency: null,
|
|
174
|
+
grant_count: 0,
|
|
175
|
+
total_granted: '0',
|
|
176
|
+
total_remaining: '0',
|
|
177
|
+
total_consumed: '0',
|
|
178
|
+
},
|
|
179
|
+
daily_stats: [],
|
|
180
|
+
};
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
const currencyJson = currency.toJSON();
|
|
184
|
+
|
|
185
|
+
// Build where clause
|
|
186
|
+
const where: any = {
|
|
187
|
+
currency_id: currencyId,
|
|
188
|
+
created_at: {
|
|
189
|
+
[Op.gte]: new Date(startDate * 1000),
|
|
190
|
+
[Op.lte]: new Date(endDate * 1000),
|
|
191
|
+
},
|
|
192
|
+
};
|
|
193
|
+
|
|
194
|
+
if (grantedBy) {
|
|
195
|
+
where['metadata.granted_by'] = grantedBy;
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
if (category) {
|
|
199
|
+
where.category = category;
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
const grants = (await CreditGrant.findAll({
|
|
203
|
+
where,
|
|
204
|
+
attributes: ['amount', 'remaining_amount', 'created_at'],
|
|
205
|
+
raw: true,
|
|
206
|
+
})) as any[];
|
|
207
|
+
|
|
208
|
+
const dailyMap = new Map<
|
|
209
|
+
string,
|
|
210
|
+
{
|
|
211
|
+
date: string;
|
|
212
|
+
grant_count: number;
|
|
213
|
+
total_granted: BN;
|
|
214
|
+
total_remaining: BN;
|
|
215
|
+
}
|
|
216
|
+
>();
|
|
217
|
+
|
|
218
|
+
const aggregate = {
|
|
219
|
+
grant_count: 0,
|
|
220
|
+
total_granted: new BN(0),
|
|
221
|
+
total_remaining: new BN(0),
|
|
222
|
+
};
|
|
223
|
+
|
|
224
|
+
grants.forEach((grant) => {
|
|
225
|
+
const date = dayjs.utc(grant.created_at).utcOffset(offset).format('YYYY-MM-DD');
|
|
226
|
+
const amount = grant.amount || '0';
|
|
227
|
+
const remainingAmount = grant.remaining_amount || '0';
|
|
228
|
+
if (!dailyMap.has(date)) {
|
|
229
|
+
dailyMap.set(date, {
|
|
230
|
+
date,
|
|
231
|
+
grant_count: 0,
|
|
232
|
+
total_granted: new BN(0),
|
|
233
|
+
total_remaining: new BN(0),
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
const daily = dailyMap.get(date)!;
|
|
238
|
+
daily.grant_count += 1;
|
|
239
|
+
daily.total_granted = daily.total_granted.add(new BN(amount));
|
|
240
|
+
daily.total_remaining = daily.total_remaining.add(new BN(remainingAmount));
|
|
241
|
+
|
|
242
|
+
aggregate.grant_count += 1;
|
|
243
|
+
aggregate.total_granted = aggregate.total_granted.add(new BN(amount));
|
|
244
|
+
aggregate.total_remaining = aggregate.total_remaining.add(new BN(remainingAmount));
|
|
245
|
+
});
|
|
246
|
+
|
|
247
|
+
const dailyStats = Array.from(dailyMap.values())
|
|
248
|
+
.sort((a, b) => a.date.localeCompare(b.date))
|
|
249
|
+
.map((day) => {
|
|
250
|
+
const totalGranted = day.total_granted.toString();
|
|
251
|
+
const totalRemaining = day.total_remaining.toString();
|
|
252
|
+
const totalConsumed = day.total_granted.sub(day.total_remaining).toString();
|
|
253
|
+
|
|
254
|
+
return {
|
|
255
|
+
date: day.date,
|
|
256
|
+
currency_id: currencyId,
|
|
257
|
+
grant_count: day.grant_count,
|
|
258
|
+
total_granted: totalGranted,
|
|
259
|
+
total_remaining: totalRemaining,
|
|
260
|
+
total_consumed: totalConsumed,
|
|
261
|
+
};
|
|
262
|
+
});
|
|
263
|
+
|
|
264
|
+
const totalGranted = aggregate.total_granted.toString();
|
|
265
|
+
const totalRemaining = aggregate.total_remaining.toString();
|
|
266
|
+
const totalConsumed = aggregate.total_granted.sub(aggregate.total_remaining).toString();
|
|
267
|
+
const statsWithConsumed = {
|
|
268
|
+
currency_id: currencyId,
|
|
269
|
+
currency: currencyJson,
|
|
270
|
+
grant_count: aggregate.grant_count,
|
|
271
|
+
total_granted: totalGranted,
|
|
272
|
+
total_remaining: totalRemaining,
|
|
273
|
+
total_consumed: totalConsumed,
|
|
274
|
+
};
|
|
275
|
+
|
|
276
|
+
return {
|
|
277
|
+
stats: statsWithConsumed,
|
|
278
|
+
daily_stats: dailyStats,
|
|
279
|
+
};
|
|
280
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { nanoid } from 'nanoid';
|
|
2
|
+
|
|
3
|
+
import logger from '../libs/logger';
|
|
4
|
+
import createQueue from '../libs/queue';
|
|
5
|
+
import { runArchiveJob } from '../libs/archive/executor';
|
|
6
|
+
|
|
7
|
+
export type ArchiveQueueJob = {
|
|
8
|
+
tables?: string[];
|
|
9
|
+
dryRun?: boolean;
|
|
10
|
+
triggeredBy: 'cron' | 'manual';
|
|
11
|
+
triggeredByUserId?: string;
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
export const archiveQueue = createQueue<ArchiveQueueJob>({
|
|
15
|
+
name: 'archive',
|
|
16
|
+
onJob: (job) => runArchiveJob(job),
|
|
17
|
+
options: {
|
|
18
|
+
concurrency: 1,
|
|
19
|
+
maxRetries: 1,
|
|
20
|
+
},
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
export const enqueueArchiveJob = (job: ArchiveQueueJob) => {
|
|
24
|
+
const id = nanoid();
|
|
25
|
+
archiveQueue.push({ id, job, persist: false });
|
|
26
|
+
logger.info('archive job queued', { id, job });
|
|
27
|
+
return id;
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
archiveQueue.on('failed', ({ id, job, error }) => {
|
|
31
|
+
logger.error('archive job failed', { id, job, error });
|
|
32
|
+
});
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
import { Router } from 'express';
|
|
2
|
+
import Joi from 'joi';
|
|
3
|
+
|
|
4
|
+
import dayjs from '../libs/dayjs';
|
|
5
|
+
import { authenticate } from '../libs/security';
|
|
6
|
+
import logger from '../libs/logger';
|
|
7
|
+
import { createFlexibleEvent } from '../libs/audit';
|
|
8
|
+
import { getRetentionConfig } from '../libs/archive/config';
|
|
9
|
+
import { previewArchive } from '../libs/archive/executor';
|
|
10
|
+
import { queryArchive } from '../libs/archive/query';
|
|
11
|
+
import { enqueueArchiveJob } from '../queues/archive';
|
|
12
|
+
import { ArchiveMetadata } from '../store/models/archive-metadata';
|
|
13
|
+
import { listArchiveFiles, getFileSize } from '../libs/archive/store';
|
|
14
|
+
|
|
15
|
+
const router = Router();
|
|
16
|
+
const authAdmin = authenticate({ component: true, roles: ['owner', 'admin'] });
|
|
17
|
+
|
|
18
|
+
const querySchema = Joi.object({
|
|
19
|
+
id: Joi.string().optional(),
|
|
20
|
+
customer_id: Joi.string().optional(),
|
|
21
|
+
from: Joi.number().integer().required(),
|
|
22
|
+
to: Joi.number().integer().optional(),
|
|
23
|
+
page: Joi.number().integer().min(1).default(1),
|
|
24
|
+
limit: Joi.number().integer().min(1).max(100).default(20),
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
router.get('/status', authAdmin, async (_req, res) => {
|
|
28
|
+
try {
|
|
29
|
+
const config = getRetentionConfig();
|
|
30
|
+
const lastRun = await ArchiveMetadata.findOne({ order: [['created_at', 'DESC']] });
|
|
31
|
+
const oldest = await ArchiveMetadata.findOne({ order: [['created_at', 'ASC']] });
|
|
32
|
+
const archives = await listArchiveFiles();
|
|
33
|
+
const totalSize = archives.reduce((sum, file) => sum + getFileSize(file), 0);
|
|
34
|
+
|
|
35
|
+
// Calculate next Wednesday at scheduled hour (cron: 0 0 ${hour} * * 3)
|
|
36
|
+
const scheduleHour = config.schedule.hour;
|
|
37
|
+
let nextRun = dayjs().day(3).hour(scheduleHour).minute(0).second(0).millisecond(0);
|
|
38
|
+
if (nextRun.isBefore(dayjs())) {
|
|
39
|
+
nextRun = nextRun.add(1, 'week');
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
res.json({
|
|
43
|
+
enabled: config.enabled,
|
|
44
|
+
config,
|
|
45
|
+
lastRun: lastRun
|
|
46
|
+
? {
|
|
47
|
+
id: lastRun.id,
|
|
48
|
+
timestamp: lastRun.created_at,
|
|
49
|
+
duration_ms: lastRun.duration_ms,
|
|
50
|
+
status: lastRun.status,
|
|
51
|
+
tables_processed: Object.keys(lastRun.tables || {}).length,
|
|
52
|
+
total_archived: lastRun.total_records,
|
|
53
|
+
total_failed: Object.values(lastRun.tables || {}).reduce(
|
|
54
|
+
(sum: number, x: any) => sum + (x.failed_count || 0),
|
|
55
|
+
0
|
|
56
|
+
),
|
|
57
|
+
}
|
|
58
|
+
: undefined,
|
|
59
|
+
nextScheduledRun: config.schedule.enabled ? nextRun.unix() : undefined,
|
|
60
|
+
archives: {
|
|
61
|
+
count: archives.length,
|
|
62
|
+
totalSize,
|
|
63
|
+
oldestArchive: oldest?.archive_file,
|
|
64
|
+
newestArchive: lastRun?.archive_file,
|
|
65
|
+
},
|
|
66
|
+
});
|
|
67
|
+
} catch (error: any) {
|
|
68
|
+
logger.error('archive status error', error);
|
|
69
|
+
res.status(500).json({ error: error.message });
|
|
70
|
+
}
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
router.get('/:table', authAdmin, async (req, res) => {
|
|
74
|
+
try {
|
|
75
|
+
const config = getRetentionConfig();
|
|
76
|
+
if (!config.enabled) {
|
|
77
|
+
return res.status(400).json({ error: 'Archive is disabled' });
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
const { value, error } = querySchema.validate(req.query, { stripUnknown: true, convert: true });
|
|
81
|
+
if (error) {
|
|
82
|
+
return res.status(400).json({ error: error.message });
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
const { table } = req.params;
|
|
86
|
+
if (!table) {
|
|
87
|
+
return res.status(400).json({ error: 'table is required' });
|
|
88
|
+
}
|
|
89
|
+
if (!Object.prototype.hasOwnProperty.call(config.tables, table)) {
|
|
90
|
+
return res.status(400).json({ error: `Unsupported table: ${table}` });
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
if (value.from === undefined) {
|
|
94
|
+
return res.status(400).json({ error: 'from is required' });
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
const result = await queryArchive(
|
|
98
|
+
{
|
|
99
|
+
table,
|
|
100
|
+
id: value.id,
|
|
101
|
+
customer_id: value.customer_id,
|
|
102
|
+
from: value.from,
|
|
103
|
+
to: value.to,
|
|
104
|
+
page: value.page,
|
|
105
|
+
limit: value.limit,
|
|
106
|
+
},
|
|
107
|
+
req.user?.did
|
|
108
|
+
);
|
|
109
|
+
|
|
110
|
+
await createFlexibleEvent(
|
|
111
|
+
'archive.query',
|
|
112
|
+
'Archive',
|
|
113
|
+
table,
|
|
114
|
+
{
|
|
115
|
+
table,
|
|
116
|
+
time_range: { from: value.from, to: value.to },
|
|
117
|
+
result_count: result.data.length,
|
|
118
|
+
archive_files: result.archiveFiles,
|
|
119
|
+
},
|
|
120
|
+
{ requestedBy: req.user?.did }
|
|
121
|
+
);
|
|
122
|
+
|
|
123
|
+
return res.json({
|
|
124
|
+
data: result.data,
|
|
125
|
+
pagination: {
|
|
126
|
+
page: value.page,
|
|
127
|
+
limit: value.limit,
|
|
128
|
+
total: result.total,
|
|
129
|
+
hasMore: value.page * value.limit < result.total,
|
|
130
|
+
},
|
|
131
|
+
source: {
|
|
132
|
+
archive_files: result.archiveFiles,
|
|
133
|
+
},
|
|
134
|
+
});
|
|
135
|
+
} catch (error: any) {
|
|
136
|
+
logger.error('archive query error', error);
|
|
137
|
+
return res.status(500).json({ error: error.message });
|
|
138
|
+
}
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
const runSchema = Joi.object({
|
|
142
|
+
tables: Joi.array().items(Joi.string()).optional(),
|
|
143
|
+
dryRun: Joi.boolean().optional(),
|
|
144
|
+
});
|
|
145
|
+
|
|
146
|
+
router.post('/run', authAdmin, async (req, res) => {
|
|
147
|
+
try {
|
|
148
|
+
const config = getRetentionConfig();
|
|
149
|
+
if (!config.enabled) {
|
|
150
|
+
return res.status(400).json({ error: 'Archive is disabled' });
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
const { value, error } = runSchema.validate(req.body, { stripUnknown: true });
|
|
154
|
+
if (error) {
|
|
155
|
+
return res.status(400).json({ error: error.message });
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
if (value?.dryRun) {
|
|
159
|
+
const preview = await previewArchive({ tables: value.tables });
|
|
160
|
+
return res.json({ status: 'dry_run_complete', preview });
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
const jobId = enqueueArchiveJob({
|
|
164
|
+
tables: value.tables,
|
|
165
|
+
triggeredBy: 'manual',
|
|
166
|
+
triggeredByUserId: req.user?.did,
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
return res.json({ status: 'started', jobId });
|
|
170
|
+
} catch (error: any) {
|
|
171
|
+
logger.error('archive run error', error);
|
|
172
|
+
return res.status(500).json({ error: error.message });
|
|
173
|
+
}
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
export default router;
|
|
@@ -20,7 +20,7 @@ import {
|
|
|
20
20
|
Product,
|
|
21
21
|
Subscription,
|
|
22
22
|
} from '../store/models';
|
|
23
|
-
import { createCreditGrant } from '../libs/credit-grant';
|
|
23
|
+
import { createCreditGrant, getCreditGrantStats } from '../libs/credit-grant';
|
|
24
24
|
import { expireGrant } from '../queues/credit-grant';
|
|
25
25
|
import { getMeterPriceIdsFromSubscription } from '../libs/subscription';
|
|
26
26
|
import { blocklet } from '../libs/auth';
|
|
@@ -48,7 +48,7 @@ const authPortal = authenticate<CreditGrant>({
|
|
|
48
48
|
const creditGrantSchema = Joi.object({
|
|
49
49
|
amount: Joi.number().required(),
|
|
50
50
|
currency_id: Joi.string().max(15).optional(),
|
|
51
|
-
customer_id: Joi.string().max(
|
|
51
|
+
customer_id: Joi.string().max(45).required(),
|
|
52
52
|
name: Joi.string().max(255).optional(),
|
|
53
53
|
category: Joi.string().valid('paid', 'promotional').required(),
|
|
54
54
|
priority: Joi.number().integer().min(0).max(100).default(50),
|
|
@@ -661,6 +661,59 @@ router.get('/verify-availability', authMine, async (req, res) => {
|
|
|
661
661
|
}
|
|
662
662
|
});
|
|
663
663
|
|
|
664
|
+
// Schema for stats endpoint
|
|
665
|
+
const statsSchema = Joi.object({
|
|
666
|
+
// Credit granted by did
|
|
667
|
+
// The did that grants the credit is not necessarily the component that send the request.
|
|
668
|
+
granted_by: Joi.string().optional(),
|
|
669
|
+
category: Joi.string().valid('paid', 'promotional').optional(),
|
|
670
|
+
currency_id: Joi.string().required(),
|
|
671
|
+
start_date: Joi.number().integer().required(),
|
|
672
|
+
end_date: Joi.number().integer().required(),
|
|
673
|
+
timezone_offset: Joi.number()
|
|
674
|
+
.integer()
|
|
675
|
+
.min(-12 * 60)
|
|
676
|
+
.max(14 * 60)
|
|
677
|
+
.optional(),
|
|
678
|
+
});
|
|
679
|
+
|
|
680
|
+
// Get credit grant statistics with flexible filtering
|
|
681
|
+
router.get('/stats', auth, async (req, res) => {
|
|
682
|
+
try {
|
|
683
|
+
const { error, value } = statsSchema.validate(req.query, { stripUnknown: true });
|
|
684
|
+
if (error) {
|
|
685
|
+
return res.status(400).json({ error: error.message });
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
const {
|
|
689
|
+
granted_by: grantedBy,
|
|
690
|
+
category,
|
|
691
|
+
currency_id: currencyId,
|
|
692
|
+
start_date: startDate,
|
|
693
|
+
end_date: endDate,
|
|
694
|
+
timezone_offset: timezoneOffset,
|
|
695
|
+
} = value;
|
|
696
|
+
|
|
697
|
+
if (startDate > endDate) {
|
|
698
|
+
return res.status(400).json({ error: 'start_date must be less than or equal to end_date' });
|
|
699
|
+
}
|
|
700
|
+
|
|
701
|
+
const result = await getCreditGrantStats({
|
|
702
|
+
grantedBy,
|
|
703
|
+
category,
|
|
704
|
+
currencyId,
|
|
705
|
+
startDate,
|
|
706
|
+
endDate,
|
|
707
|
+
timezoneOffset,
|
|
708
|
+
});
|
|
709
|
+
|
|
710
|
+
return res.json(result);
|
|
711
|
+
} catch (err: any) {
|
|
712
|
+
logger.error('Error getting credit grant stats', { error: err.message, query: req.query });
|
|
713
|
+
return res.status(400).json({ error: err.message });
|
|
714
|
+
}
|
|
715
|
+
});
|
|
716
|
+
|
|
664
717
|
router.get('/:id', authPortal, async (req, res) => {
|
|
665
718
|
const creditGrant = (await CreditGrant.findByPk(req.params.id, {
|
|
666
719
|
include: [
|
|
@@ -745,7 +798,7 @@ router.post('/', auth, async (req, res) => {
|
|
|
745
798
|
const creditGrant = await createCreditGrant({
|
|
746
799
|
amount: unitAmount,
|
|
747
800
|
currency_id: currencyId,
|
|
748
|
-
customer_id:
|
|
801
|
+
customer_id: customer.id,
|
|
749
802
|
name: req.body.name,
|
|
750
803
|
category: req.body.category,
|
|
751
804
|
priority: req.body.priority,
|
|
@@ -764,7 +817,7 @@ router.post('/', auth, async (req, res) => {
|
|
|
764
817
|
paymentCurrency,
|
|
765
818
|
});
|
|
766
819
|
} catch (err: any) {
|
|
767
|
-
logger.error('create credit grant failed', { error: err
|
|
820
|
+
logger.error('create credit grant failed', { error: err, request: req.body });
|
|
768
821
|
return res.status(400).json({ error: err.message });
|
|
769
822
|
}
|
|
770
823
|
});
|
package/api/src/routes/index.ts
CHANGED
|
@@ -38,6 +38,7 @@ import webhookAttempts from './webhook-attempts';
|
|
|
38
38
|
import webhookEndpoints from './webhook-endpoints';
|
|
39
39
|
import vendor from './vendor';
|
|
40
40
|
import tool from './tool';
|
|
41
|
+
import archive from './archive';
|
|
41
42
|
|
|
42
43
|
const router = Router();
|
|
43
44
|
|
|
@@ -97,5 +98,6 @@ router.use('/webhook-attempts', webhookAttempts);
|
|
|
97
98
|
router.use('/webhook-endpoints', webhookEndpoints);
|
|
98
99
|
router.use('/vendors', vendor);
|
|
99
100
|
router.use('/tool', tool);
|
|
101
|
+
router.use('/archive', archive);
|
|
100
102
|
|
|
101
103
|
export default router;
|