@mzedstudio/uploadthingtrack 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +176 -0
- package/README.md +264 -0
- package/dist/client/index.d.ts +157 -0
- package/dist/client/index.d.ts.map +1 -0
- package/dist/client/index.js +121 -0
- package/dist/client/index.js.map +1 -0
- package/dist/component/_generated/api.d.ts +48 -0
- package/dist/component/_generated/api.d.ts.map +1 -0
- package/dist/component/_generated/api.js +31 -0
- package/dist/component/_generated/api.js.map +1 -0
- package/dist/component/_generated/component.d.ts +119 -0
- package/dist/component/_generated/component.d.ts.map +1 -0
- package/dist/component/_generated/component.js +11 -0
- package/dist/component/_generated/component.js.map +1 -0
- package/dist/component/_generated/dataModel.d.ts +46 -0
- package/dist/component/_generated/dataModel.d.ts.map +1 -0
- package/dist/component/_generated/dataModel.js +11 -0
- package/dist/component/_generated/dataModel.js.map +1 -0
- package/dist/component/_generated/server.d.ts +121 -0
- package/dist/component/_generated/server.d.ts.map +1 -0
- package/dist/component/_generated/server.js +78 -0
- package/dist/component/_generated/server.js.map +1 -0
- package/dist/component/access.d.ts +9 -0
- package/dist/component/access.d.ts.map +1 -0
- package/dist/component/access.js +31 -0
- package/dist/component/access.js.map +1 -0
- package/dist/component/callbacks.d.ts +16 -0
- package/dist/component/callbacks.d.ts.map +1 -0
- package/dist/component/callbacks.js +147 -0
- package/dist/component/callbacks.js.map +1 -0
- package/dist/component/cleanup.d.ts +9 -0
- package/dist/component/cleanup.d.ts.map +1 -0
- package/dist/component/cleanup.js +32 -0
- package/dist/component/cleanup.js.map +1 -0
- package/dist/component/config.d.ts +42 -0
- package/dist/component/config.d.ts.map +1 -0
- package/dist/component/config.js +87 -0
- package/dist/component/config.js.map +1 -0
- package/dist/component/convex.config.d.ts +3 -0
- package/dist/component/convex.config.d.ts.map +1 -0
- package/dist/component/convex.config.js +3 -0
- package/dist/component/convex.config.js.map +1 -0
- package/dist/component/files.d.ts +72 -0
- package/dist/component/files.d.ts.map +1 -0
- package/dist/component/files.js +153 -0
- package/dist/component/files.js.map +1 -0
- package/dist/component/queries.d.ts +43 -0
- package/dist/component/queries.d.ts.map +1 -0
- package/dist/component/queries.js +113 -0
- package/dist/component/queries.js.map +1 -0
- package/dist/component/schema.d.ts +97 -0
- package/dist/component/schema.d.ts.map +1 -0
- package/dist/component/schema.js +42 -0
- package/dist/component/schema.js.map +1 -0
- package/dist/component/stats.d.ts +7 -0
- package/dist/component/stats.d.ts.map +1 -0
- package/dist/component/stats.js +20 -0
- package/dist/component/stats.js.map +1 -0
- package/dist/component/types.d.ts +78 -0
- package/dist/component/types.d.ts.map +1 -0
- package/dist/component/types.js +34 -0
- package/dist/component/types.js.map +1 -0
- package/package.json +84 -0
- package/src/client/index.ts +277 -0
- package/src/component/_generated/api.ts +64 -0
- package/src/component/_generated/component.ts +159 -0
- package/src/component/_generated/dataModel.ts +60 -0
- package/src/component/_generated/server.ts +156 -0
- package/src/component/access.ts +39 -0
- package/src/component/callbacks.ts +173 -0
- package/src/component/cleanup.ts +40 -0
- package/src/component/config.ts +115 -0
- package/src/component/convex.config.ts +2 -0
- package/src/component/files.ts +186 -0
- package/src/component/queries.ts +121 -0
- package/src/component/schema.ts +44 -0
- package/src/component/stats.ts +23 -0
- package/src/component/types.ts +49 -0
- package/src/test.ts +21 -0
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import type { AccessRule } from "./types";
|
|
2
|
+
|
|
3
|
+
export function canAccess(params: {
|
|
4
|
+
ownerId: string;
|
|
5
|
+
viewerId?: string | null;
|
|
6
|
+
fileRule?: AccessRule;
|
|
7
|
+
folderRule?: AccessRule;
|
|
8
|
+
}): boolean {
|
|
9
|
+
const { ownerId, viewerId } = params;
|
|
10
|
+
if (viewerId && viewerId === ownerId) return true;
|
|
11
|
+
|
|
12
|
+
const rule = params.fileRule ?? params.folderRule;
|
|
13
|
+
if (!rule) return false;
|
|
14
|
+
|
|
15
|
+
if (viewerId && rule.denyUserIds?.includes(viewerId)) return false;
|
|
16
|
+
|
|
17
|
+
if (rule.visibility === "public") return true;
|
|
18
|
+
|
|
19
|
+
if (!viewerId) return false;
|
|
20
|
+
|
|
21
|
+
if (rule.visibility === "private") return false;
|
|
22
|
+
|
|
23
|
+
if (rule.visibility === "restricted") {
|
|
24
|
+
return Boolean(rule.allowUserIds?.includes(viewerId));
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
return false;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export function sanitizeAccessRule(rule?: AccessRule): AccessRule | undefined {
|
|
31
|
+
if (!rule) return undefined;
|
|
32
|
+
const unique = (list?: string[]) =>
|
|
33
|
+
list ? Array.from(new Set(list.filter((value) => value.length > 0))) : undefined;
|
|
34
|
+
return {
|
|
35
|
+
visibility: rule.visibility,
|
|
36
|
+
allowUserIds: unique(rule.allowUserIds),
|
|
37
|
+
denyUserIds: unique(rule.denyUserIds),
|
|
38
|
+
};
|
|
39
|
+
}
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
import { action } from "./_generated/server";
|
|
2
|
+
import { v } from "convex/values";
|
|
3
|
+
import { internal } from "./_generated/api";
|
|
4
|
+
|
|
5
|
+
function normalizeSignature(signature: string): string {
|
|
6
|
+
const trimmed = signature.trim();
|
|
7
|
+
if (trimmed.startsWith("hmac-sha256=")) {
|
|
8
|
+
return trimmed.slice("hmac-sha256=".length);
|
|
9
|
+
}
|
|
10
|
+
return trimmed;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function hexToBytes(hex: string): Uint8Array | null {
|
|
14
|
+
if (hex.length % 2 !== 0) return null;
|
|
15
|
+
const bytes = new Uint8Array(hex.length / 2);
|
|
16
|
+
for (let i = 0; i < bytes.length; i += 1) {
|
|
17
|
+
const chunk = hex.slice(i * 2, i * 2 + 2);
|
|
18
|
+
const value = Number.parseInt(chunk, 16);
|
|
19
|
+
if (Number.isNaN(value)) return null;
|
|
20
|
+
bytes[i] = value;
|
|
21
|
+
}
|
|
22
|
+
return bytes;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function timingSafeEqualBytes(a: Uint8Array, b: Uint8Array) {
|
|
26
|
+
if (a.length !== b.length) return false;
|
|
27
|
+
let diff = 0;
|
|
28
|
+
for (let i = 0; i < a.length; i += 1) {
|
|
29
|
+
diff |= a[i] ^ b[i];
|
|
30
|
+
}
|
|
31
|
+
return diff === 0;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
async function hmacSha256Hex(key: string, message: string) {
|
|
35
|
+
if (!globalThis.crypto?.subtle) {
|
|
36
|
+
throw new Error("WebCrypto unavailable in this runtime.");
|
|
37
|
+
}
|
|
38
|
+
const encoder = new TextEncoder();
|
|
39
|
+
const cryptoKey = await globalThis.crypto.subtle.importKey(
|
|
40
|
+
"raw",
|
|
41
|
+
encoder.encode(key),
|
|
42
|
+
{ name: "HMAC", hash: "SHA-256" },
|
|
43
|
+
false,
|
|
44
|
+
["sign"],
|
|
45
|
+
);
|
|
46
|
+
const signature = await globalThis.crypto.subtle.sign(
|
|
47
|
+
"HMAC",
|
|
48
|
+
cryptoKey,
|
|
49
|
+
encoder.encode(message),
|
|
50
|
+
);
|
|
51
|
+
const bytes = new Uint8Array(signature);
|
|
52
|
+
return Array.from(bytes, (value) => value.toString(16).padStart(2, "0")).join("");
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
async function verifySignature(rawBody: string, signature: string, apiKey: string) {
|
|
56
|
+
const expected = await hmacSha256Hex(apiKey, rawBody);
|
|
57
|
+
const actual = normalizeSignature(signature);
|
|
58
|
+
|
|
59
|
+
const expectedBytes = hexToBytes(expected);
|
|
60
|
+
const actualBytes = hexToBytes(actual);
|
|
61
|
+
if (!expectedBytes || !actualBytes) return false;
|
|
62
|
+
|
|
63
|
+
return timingSafeEqualBytes(expectedBytes, actualBytes);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
function extractUserId(metadata: any): string | undefined {
|
|
67
|
+
if (!metadata) return undefined;
|
|
68
|
+
return (
|
|
69
|
+
metadata.userId ??
|
|
70
|
+
metadata.ownerId ??
|
|
71
|
+
metadata.uploadedBy ??
|
|
72
|
+
metadata.user
|
|
73
|
+
);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
function extractTags(metadata: any): string[] | undefined {
|
|
77
|
+
if (!metadata) return undefined;
|
|
78
|
+
if (Array.isArray(metadata.tags)) return metadata.tags;
|
|
79
|
+
if (typeof metadata.tags === "string") {
|
|
80
|
+
return metadata.tags
|
|
81
|
+
.split(",")
|
|
82
|
+
.map((tag: string) => tag.trim())
|
|
83
|
+
.filter(Boolean);
|
|
84
|
+
}
|
|
85
|
+
return undefined;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
function toNumber(value: any): number | undefined {
|
|
89
|
+
if (typeof value === "number") return value;
|
|
90
|
+
if (typeof value === "string") {
|
|
91
|
+
const parsed = Number(value);
|
|
92
|
+
return Number.isNaN(parsed) ? undefined : parsed;
|
|
93
|
+
}
|
|
94
|
+
return undefined;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
type CallbackResult =
|
|
98
|
+
| { ok: true; fileId: string; hook: string }
|
|
99
|
+
| { ok: false; error: string };
|
|
100
|
+
|
|
101
|
+
export const handleUploadthingCallback = action({
|
|
102
|
+
args: {
|
|
103
|
+
rawBody: v.string(),
|
|
104
|
+
signature: v.string(),
|
|
105
|
+
hook: v.string(),
|
|
106
|
+
apiKey: v.optional(v.string()),
|
|
107
|
+
},
|
|
108
|
+
handler: async (ctx, args): Promise<CallbackResult> => {
|
|
109
|
+
const globals = await ctx.runQuery(internal.config.getGlobalsInternal, {});
|
|
110
|
+
const apiKey = args.apiKey ?? globals.uploadthingApiKey;
|
|
111
|
+
if (!apiKey) {
|
|
112
|
+
throw new Error("UploadThing API key not configured.");
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
const isValid = await verifySignature(args.rawBody, args.signature, apiKey);
|
|
116
|
+
if (!isValid) {
|
|
117
|
+
return { ok: false, error: "invalid_signature" };
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
let payload: any;
|
|
121
|
+
try {
|
|
122
|
+
payload = JSON.parse(args.rawBody);
|
|
123
|
+
} catch (error) {
|
|
124
|
+
return { ok: false, error: "invalid_json" };
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
const file = payload.file ?? payload;
|
|
128
|
+
const metadata = payload.metadata ?? file.metadata ?? {};
|
|
129
|
+
|
|
130
|
+
const key = file.key ?? file.fileKey ?? file.id;
|
|
131
|
+
const url = file.url ?? file.fileUrl;
|
|
132
|
+
const name = file.name ?? file.filename ?? file.fileName;
|
|
133
|
+
const size = toNumber(file.size ?? file.fileSize);
|
|
134
|
+
const mimeType = file.type ?? file.mimeType ?? file.contentType;
|
|
135
|
+
const customId = file.customId ?? file.customID;
|
|
136
|
+
const fileType = file.fileType ?? metadata.fileType;
|
|
137
|
+
|
|
138
|
+
if (!key || !url || !name || size === undefined || !mimeType) {
|
|
139
|
+
return { ok: false, error: "missing_file_fields" };
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
const userId = extractUserId(metadata) ?? payload.userId;
|
|
143
|
+
if (!userId) {
|
|
144
|
+
return { ok: false, error: "missing_user_id" };
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
const options = {
|
|
148
|
+
tags: extractTags(metadata),
|
|
149
|
+
folder: metadata.folder,
|
|
150
|
+
access: metadata.access,
|
|
151
|
+
metadata,
|
|
152
|
+
expiresAt: toNumber(metadata.expiresAt),
|
|
153
|
+
ttlMs: toNumber(metadata.ttlMs),
|
|
154
|
+
fileType,
|
|
155
|
+
};
|
|
156
|
+
|
|
157
|
+
const fileId: string = await ctx.runMutation(internal.files.internalUpsertFile, {
|
|
158
|
+
file: {
|
|
159
|
+
key,
|
|
160
|
+
url,
|
|
161
|
+
name,
|
|
162
|
+
size,
|
|
163
|
+
mimeType,
|
|
164
|
+
customId,
|
|
165
|
+
fileType,
|
|
166
|
+
},
|
|
167
|
+
userId,
|
|
168
|
+
options,
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
return { ok: true, fileId, hook: args.hook };
|
|
172
|
+
},
|
|
173
|
+
});
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { action } from "./_generated/server";
|
|
2
|
+
import { v } from "convex/values";
|
|
3
|
+
import { internal } from "./_generated/api";
|
|
4
|
+
|
|
5
|
+
export const cleanupExpired = action({
|
|
6
|
+
args: {
|
|
7
|
+
batchSize: v.optional(v.number()),
|
|
8
|
+
dryRun: v.optional(v.boolean()),
|
|
9
|
+
},
|
|
10
|
+
returns: v.object({
|
|
11
|
+
deletedCount: v.number(),
|
|
12
|
+
keys: v.array(v.string()),
|
|
13
|
+
hasMore: v.boolean(),
|
|
14
|
+
}),
|
|
15
|
+
handler: async (ctx, args): Promise<{
|
|
16
|
+
deletedCount: number;
|
|
17
|
+
keys: string[];
|
|
18
|
+
hasMore: boolean;
|
|
19
|
+
}> => {
|
|
20
|
+
const globals = await ctx.runQuery(internal.config.getGlobalsInternal, {});
|
|
21
|
+
const limit = args.batchSize ?? (globals as any).deleteBatchSize ?? 100;
|
|
22
|
+
|
|
23
|
+
const expired = (await ctx.runQuery(internal.queries.expiredBatch, {
|
|
24
|
+
now: Date.now(),
|
|
25
|
+
limit,
|
|
26
|
+
})) as { key: string; _id: string }[];
|
|
27
|
+
|
|
28
|
+
const keys = expired.map((item) => item.key);
|
|
29
|
+
|
|
30
|
+
if (!args.dryRun && keys.length > 0) {
|
|
31
|
+
await ctx.runMutation(internal.files.deleteFilesByKey, { keys });
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
return {
|
|
35
|
+
deletedCount: args.dryRun ? 0 : keys.length,
|
|
36
|
+
keys,
|
|
37
|
+
hasMore: expired.length >= limit,
|
|
38
|
+
};
|
|
39
|
+
},
|
|
40
|
+
});
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
import { internalQuery, mutation, query } from "./_generated/server";
|
|
2
|
+
import { v } from "convex/values";
|
|
3
|
+
import { configUpdateValidator } from "./types";
|
|
4
|
+
|
|
5
|
+
const GLOBALS_ID = "globals" as const;
|
|
6
|
+
|
|
7
|
+
export type Globals = {
|
|
8
|
+
uploadthingApiKey?: string;
|
|
9
|
+
defaultTtlMs?: number;
|
|
10
|
+
ttlByMimeType?: Record<string, number>;
|
|
11
|
+
ttlByFileType?: Record<string, number>;
|
|
12
|
+
deleteRemoteOnExpire?: boolean;
|
|
13
|
+
deleteBatchSize?: number;
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
async function readGlobals(db: any): Promise<Globals> {
|
|
17
|
+
const record = await db
|
|
18
|
+
.query("globals")
|
|
19
|
+
.withIndex("by_singleton", (q: any) => q.eq("singleton", GLOBALS_ID))
|
|
20
|
+
.unique();
|
|
21
|
+
|
|
22
|
+
if (!record) return {};
|
|
23
|
+
|
|
24
|
+
return {
|
|
25
|
+
uploadthingApiKey: record.uploadthingApiKey,
|
|
26
|
+
defaultTtlMs: record.defaultTtlMs,
|
|
27
|
+
ttlByMimeType: record.ttlByMimeType,
|
|
28
|
+
ttlByFileType: record.ttlByFileType,
|
|
29
|
+
deleteRemoteOnExpire: record.deleteRemoteOnExpire,
|
|
30
|
+
deleteBatchSize: record.deleteBatchSize,
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export async function loadGlobals(ctx: { db: any }): Promise<Globals> {
|
|
35
|
+
return await readGlobals(ctx.db);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export const getGlobalsInternal = internalQuery({
|
|
39
|
+
args: {},
|
|
40
|
+
handler: async (ctx) => {
|
|
41
|
+
return await readGlobals(ctx.db);
|
|
42
|
+
},
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
export function computeExpiresAt(params: {
|
|
46
|
+
now: number;
|
|
47
|
+
mimeType?: string;
|
|
48
|
+
fileType?: string;
|
|
49
|
+
expiresAt?: number;
|
|
50
|
+
ttlMs?: number;
|
|
51
|
+
globals?: Globals;
|
|
52
|
+
}): number | undefined {
|
|
53
|
+
if (params.expiresAt !== undefined) return params.expiresAt;
|
|
54
|
+
if (params.ttlMs !== undefined) return params.now + params.ttlMs;
|
|
55
|
+
|
|
56
|
+
const globals = params.globals;
|
|
57
|
+
if (!globals) return undefined;
|
|
58
|
+
|
|
59
|
+
if (params.fileType && globals.ttlByFileType?.[params.fileType] !== undefined) {
|
|
60
|
+
return params.now + globals.ttlByFileType[params.fileType];
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
if (params.mimeType && globals.ttlByMimeType?.[params.mimeType] !== undefined) {
|
|
64
|
+
return params.now + globals.ttlByMimeType[params.mimeType];
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if (globals.defaultTtlMs !== undefined) {
|
|
68
|
+
return params.now + globals.defaultTtlMs;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
return undefined;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
export const setConfig = mutation({
|
|
75
|
+
args: {
|
|
76
|
+
config: configUpdateValidator,
|
|
77
|
+
replace: v.optional(v.boolean()),
|
|
78
|
+
},
|
|
79
|
+
handler: async (ctx, args) => {
|
|
80
|
+
const existing = await ctx.db
|
|
81
|
+
.query("globals")
|
|
82
|
+
.withIndex("by_singleton", (q) => q.eq("singleton", GLOBALS_ID))
|
|
83
|
+
.unique();
|
|
84
|
+
|
|
85
|
+
if (!existing) {
|
|
86
|
+
const record = { singleton: GLOBALS_ID, ...args.config };
|
|
87
|
+
await ctx.db.insert("globals", record);
|
|
88
|
+
return { created: true };
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
const update = args.replace
|
|
92
|
+
? { singleton: GLOBALS_ID, ...args.config }
|
|
93
|
+
: { ...existing, ...args.config };
|
|
94
|
+
delete (update as any)._id;
|
|
95
|
+
delete (update as any)._creationTime;
|
|
96
|
+
|
|
97
|
+
await ctx.db.patch(existing._id, update);
|
|
98
|
+
return { created: false };
|
|
99
|
+
},
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
export const getConfig = query({
|
|
103
|
+
args: {},
|
|
104
|
+
handler: async (ctx) => {
|
|
105
|
+
const globals = await loadGlobals(ctx);
|
|
106
|
+
return {
|
|
107
|
+
defaultTtlMs: globals.defaultTtlMs,
|
|
108
|
+
ttlByMimeType: globals.ttlByMimeType,
|
|
109
|
+
ttlByFileType: globals.ttlByFileType,
|
|
110
|
+
deleteRemoteOnExpire: globals.deleteRemoteOnExpire,
|
|
111
|
+
deleteBatchSize: globals.deleteBatchSize,
|
|
112
|
+
hasApiKey: Boolean(globals.uploadthingApiKey),
|
|
113
|
+
};
|
|
114
|
+
},
|
|
115
|
+
});
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
import { mutation, internalMutation } from "./_generated/server";
|
|
2
|
+
import { v } from "convex/values";
|
|
3
|
+
import { computeExpiresAt, loadGlobals } from "./config";
|
|
4
|
+
import { accessRuleValidator, fileInfoValidator, fileUpsertOptionsValidator } from "./types";
|
|
5
|
+
import { sanitizeAccessRule } from "./access";
|
|
6
|
+
|
|
7
|
+
async function upsertFileRecord(ctx: any, params: {
|
|
8
|
+
file: {
|
|
9
|
+
key: string;
|
|
10
|
+
url: string;
|
|
11
|
+
name: string;
|
|
12
|
+
size: number;
|
|
13
|
+
mimeType: string;
|
|
14
|
+
uploadedAt?: number;
|
|
15
|
+
fileType?: string;
|
|
16
|
+
customId?: string;
|
|
17
|
+
};
|
|
18
|
+
userId: string;
|
|
19
|
+
options?: {
|
|
20
|
+
tags?: string[];
|
|
21
|
+
folder?: string;
|
|
22
|
+
access?: any;
|
|
23
|
+
metadata?: any;
|
|
24
|
+
expiresAt?: number;
|
|
25
|
+
ttlMs?: number;
|
|
26
|
+
fileType?: string;
|
|
27
|
+
};
|
|
28
|
+
}) {
|
|
29
|
+
const now = Date.now();
|
|
30
|
+
const globals = await loadGlobals(ctx);
|
|
31
|
+
const uploadedAt = params.file.uploadedAt ?? now;
|
|
32
|
+
const fileType = params.options?.fileType ?? params.file.fileType;
|
|
33
|
+
const expiresAt = computeExpiresAt({
|
|
34
|
+
now: uploadedAt,
|
|
35
|
+
mimeType: params.file.mimeType,
|
|
36
|
+
fileType,
|
|
37
|
+
expiresAt: params.options?.expiresAt,
|
|
38
|
+
ttlMs: params.options?.ttlMs,
|
|
39
|
+
globals,
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
const existing = await ctx.db
|
|
43
|
+
.query("files")
|
|
44
|
+
.withIndex("by_key", (q: any) => q.eq("key", params.file.key))
|
|
45
|
+
.unique();
|
|
46
|
+
|
|
47
|
+
const patch: any = {
|
|
48
|
+
url: params.file.url,
|
|
49
|
+
name: params.file.name,
|
|
50
|
+
size: params.file.size,
|
|
51
|
+
mimeType: params.file.mimeType,
|
|
52
|
+
uploadedAt,
|
|
53
|
+
userId: params.userId,
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
if (params.file.customId !== undefined) patch.customId = params.file.customId;
|
|
57
|
+
if (fileType !== undefined) patch.fileType = fileType;
|
|
58
|
+
|
|
59
|
+
if (params.options?.tags !== undefined) patch.tags = params.options.tags;
|
|
60
|
+
if (params.options?.folder !== undefined) patch.folder = params.options.folder;
|
|
61
|
+
if (params.options?.metadata !== undefined) patch.metadata = params.options.metadata;
|
|
62
|
+
if (params.options?.access !== undefined) {
|
|
63
|
+
patch.access = sanitizeAccessRule(params.options.access);
|
|
64
|
+
}
|
|
65
|
+
if (expiresAt !== undefined) patch.expiresAt = expiresAt;
|
|
66
|
+
|
|
67
|
+
if (existing) {
|
|
68
|
+
patch.replacedAt = now;
|
|
69
|
+
await ctx.db.patch(existing._id, patch);
|
|
70
|
+
return existing._id;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
return await ctx.db.insert("files", {
|
|
74
|
+
key: params.file.key,
|
|
75
|
+
...patch,
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
export const upsertFile = mutation({
|
|
80
|
+
args: {
|
|
81
|
+
file: fileInfoValidator,
|
|
82
|
+
userId: v.string(),
|
|
83
|
+
options: v.optional(fileUpsertOptionsValidator),
|
|
84
|
+
},
|
|
85
|
+
handler: async (ctx, args) => {
|
|
86
|
+
return await upsertFileRecord(ctx, {
|
|
87
|
+
file: args.file,
|
|
88
|
+
userId: args.userId,
|
|
89
|
+
options: args.options,
|
|
90
|
+
});
|
|
91
|
+
},
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
export const internalUpsertFile = internalMutation({
|
|
95
|
+
args: {
|
|
96
|
+
file: fileInfoValidator,
|
|
97
|
+
userId: v.string(),
|
|
98
|
+
options: v.optional(fileUpsertOptionsValidator),
|
|
99
|
+
},
|
|
100
|
+
handler: async (ctx, args) => {
|
|
101
|
+
return await upsertFileRecord(ctx, {
|
|
102
|
+
file: args.file,
|
|
103
|
+
userId: args.userId,
|
|
104
|
+
options: args.options,
|
|
105
|
+
});
|
|
106
|
+
},
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
export const setFileAccess = mutation({
|
|
110
|
+
args: {
|
|
111
|
+
key: v.string(),
|
|
112
|
+
access: v.optional(v.union(accessRuleValidator, v.null())),
|
|
113
|
+
},
|
|
114
|
+
handler: async (ctx, args) => {
|
|
115
|
+
const existing = await ctx.db
|
|
116
|
+
.query("files")
|
|
117
|
+
.withIndex("by_key", (q) => q.eq("key", args.key))
|
|
118
|
+
.unique();
|
|
119
|
+
|
|
120
|
+
if (!existing) return null;
|
|
121
|
+
|
|
122
|
+
await ctx.db.patch(existing._id, {
|
|
123
|
+
access: args.access === null ? undefined : sanitizeAccessRule(args.access),
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
return existing._id;
|
|
127
|
+
},
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
export const setFolderAccess = mutation({
|
|
131
|
+
args: {
|
|
132
|
+
folder: v.string(),
|
|
133
|
+
access: v.optional(v.union(accessRuleValidator, v.null())),
|
|
134
|
+
},
|
|
135
|
+
handler: async (ctx, args) => {
|
|
136
|
+
const existing = await ctx.db
|
|
137
|
+
.query("folderRules")
|
|
138
|
+
.withIndex("by_folder", (q) => q.eq("folder", args.folder))
|
|
139
|
+
.unique();
|
|
140
|
+
|
|
141
|
+
if (args.access === null) {
|
|
142
|
+
if (existing) {
|
|
143
|
+
await ctx.db.delete(existing._id);
|
|
144
|
+
}
|
|
145
|
+
return null;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
const access = sanitizeAccessRule(args.access);
|
|
149
|
+
if (!access) {
|
|
150
|
+
return null;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
const updatedAt = Date.now();
|
|
154
|
+
|
|
155
|
+
if (existing) {
|
|
156
|
+
await ctx.db.patch(existing._id, {
|
|
157
|
+
access,
|
|
158
|
+
updatedAt,
|
|
159
|
+
});
|
|
160
|
+
return existing._id;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
return await ctx.db.insert("folderRules", {
|
|
164
|
+
folder: args.folder,
|
|
165
|
+
access,
|
|
166
|
+
updatedAt,
|
|
167
|
+
});
|
|
168
|
+
},
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
export const deleteFilesByKey = internalMutation({
|
|
172
|
+
args: {
|
|
173
|
+
keys: v.array(v.string()),
|
|
174
|
+
},
|
|
175
|
+
handler: async (ctx, args) => {
|
|
176
|
+
for (const key of args.keys) {
|
|
177
|
+
const existing = await ctx.db
|
|
178
|
+
.query("files")
|
|
179
|
+
.withIndex("by_key", (q) => q.eq("key", key))
|
|
180
|
+
.unique();
|
|
181
|
+
if (existing) {
|
|
182
|
+
await ctx.db.delete(existing._id);
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
},
|
|
186
|
+
});
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
import { query, internalQuery } from "./_generated/server";
|
|
2
|
+
import { v } from "convex/values";
|
|
3
|
+
import { canAccess } from "./access";
|
|
4
|
+
|
|
5
|
+
async function getFolderRule(ctx: any, folder?: string) {
|
|
6
|
+
if (!folder) return undefined;
|
|
7
|
+
const rule = await ctx.db
|
|
8
|
+
.query("folderRules")
|
|
9
|
+
.withIndex("by_folder", (q: any) => q.eq("folder", folder))
|
|
10
|
+
.unique();
|
|
11
|
+
return rule?.access;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export const getFileByKey = query({
|
|
15
|
+
args: {
|
|
16
|
+
key: v.string(),
|
|
17
|
+
viewerUserId: v.optional(v.string()),
|
|
18
|
+
},
|
|
19
|
+
handler: async (ctx, args) => {
|
|
20
|
+
const file = await ctx.db
|
|
21
|
+
.query("files")
|
|
22
|
+
.withIndex("by_key", (q) => q.eq("key", args.key))
|
|
23
|
+
.unique();
|
|
24
|
+
|
|
25
|
+
if (!file) return null;
|
|
26
|
+
|
|
27
|
+
const folderRule = await getFolderRule(ctx, file.folder);
|
|
28
|
+
const allowed = canAccess({
|
|
29
|
+
ownerId: file.userId,
|
|
30
|
+
viewerId: args.viewerUserId,
|
|
31
|
+
fileRule: file.access,
|
|
32
|
+
folderRule,
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
if (!allowed) return null;
|
|
36
|
+
|
|
37
|
+
return file;
|
|
38
|
+
},
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
export const listFiles = query({
|
|
42
|
+
args: {
|
|
43
|
+
ownerUserId: v.string(),
|
|
44
|
+
viewerUserId: v.optional(v.string()),
|
|
45
|
+
mimeType: v.optional(v.string()),
|
|
46
|
+
tag: v.optional(v.string()),
|
|
47
|
+
folder: v.optional(v.string()),
|
|
48
|
+
includeExpired: v.optional(v.boolean()),
|
|
49
|
+
limit: v.optional(v.number()),
|
|
50
|
+
},
|
|
51
|
+
handler: async (ctx, args) => {
|
|
52
|
+
const query = ctx.db
|
|
53
|
+
.query("files")
|
|
54
|
+
.withIndex("by_user_uploadedAt", (q: any) => q.eq("userId", args.ownerUserId))
|
|
55
|
+
.order("desc");
|
|
56
|
+
|
|
57
|
+
const results = [] as any[];
|
|
58
|
+
const now = Date.now();
|
|
59
|
+
const limit = args.limit ?? 50;
|
|
60
|
+
const folderCache = new Map<string, any>();
|
|
61
|
+
|
|
62
|
+
for await (const file of query) {
|
|
63
|
+
if (!args.includeExpired && file.expiresAt !== undefined && file.expiresAt <= now) {
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
if (args.mimeType && file.mimeType !== args.mimeType) {
|
|
67
|
+
continue;
|
|
68
|
+
}
|
|
69
|
+
if (args.tag && !file.tags?.includes(args.tag)) {
|
|
70
|
+
continue;
|
|
71
|
+
}
|
|
72
|
+
if (args.folder && file.folder !== args.folder) {
|
|
73
|
+
continue;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
let folderRule;
|
|
77
|
+
if (file.folder) {
|
|
78
|
+
if (folderCache.has(file.folder)) {
|
|
79
|
+
folderRule = folderCache.get(file.folder);
|
|
80
|
+
} else {
|
|
81
|
+
folderRule = await getFolderRule(ctx, file.folder);
|
|
82
|
+
folderCache.set(file.folder, folderRule);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
const allowed = canAccess({
|
|
86
|
+
ownerId: file.userId,
|
|
87
|
+
viewerId: args.viewerUserId,
|
|
88
|
+
fileRule: file.access,
|
|
89
|
+
folderRule,
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
if (!allowed) continue;
|
|
93
|
+
|
|
94
|
+
results.push(file);
|
|
95
|
+
if (results.length >= limit) break;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return results;
|
|
99
|
+
},
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
export const expiredBatch = internalQuery({
|
|
103
|
+
args: {
|
|
104
|
+
now: v.number(),
|
|
105
|
+
limit: v.number(),
|
|
106
|
+
},
|
|
107
|
+
handler: async (ctx, args) => {
|
|
108
|
+
const query = ctx.db
|
|
109
|
+
.query("files")
|
|
110
|
+
.withIndex("by_expiresAt", (q: any) => q.lte("expiresAt", args.now))
|
|
111
|
+
.order("asc");
|
|
112
|
+
|
|
113
|
+
const expired = [] as { key: string; _id: string }[];
|
|
114
|
+
for await (const file of query) {
|
|
115
|
+
expired.push({ key: file.key, _id: file._id });
|
|
116
|
+
if (expired.length >= args.limit) break;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
return expired;
|
|
120
|
+
},
|
|
121
|
+
});
|