@m5kdev/backend 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +1 -1
- package/CHANGELOG.md +18 -0
- package/dist/src/lib/posthog.js +7 -0
- package/dist/src/lib/sentry.js +9 -0
- package/dist/src/modules/access/access.repository.js +32 -0
- package/dist/src/modules/access/access.service.js +51 -0
- package/dist/src/modules/access/access.test.js +182 -0
- package/dist/src/modules/access/access.utils.js +20 -0
- package/dist/src/modules/ai/ai.db.js +39 -0
- package/dist/src/modules/ai/ai.prompt.js +30 -0
- package/dist/src/modules/ai/ai.repository.js +26 -0
- package/dist/src/modules/ai/ai.router.js +132 -0
- package/dist/src/modules/ai/ai.service.js +207 -0
- package/dist/src/modules/ai/ai.trpc.d.ts +5 -5
- package/dist/src/modules/ai/ai.trpc.js +20 -0
- package/dist/src/modules/ai/ideogram/ideogram.constants.js +167 -0
- package/dist/src/modules/ai/ideogram/ideogram.dto.js +49 -0
- package/dist/src/modules/ai/ideogram/ideogram.prompt.js +860 -0
- package/dist/src/modules/ai/ideogram/ideogram.repository.js +46 -0
- package/dist/src/modules/ai/ideogram/ideogram.service.js +11 -0
- package/dist/src/modules/auth/auth.db.js +215 -0
- package/dist/src/modules/auth/auth.dto.js +38 -0
- package/dist/src/modules/auth/auth.lib.d.ts +4 -4
- package/dist/src/modules/auth/auth.lib.js +284 -0
- package/dist/src/modules/auth/auth.middleware.js +52 -0
- package/dist/src/modules/auth/auth.repository.js +541 -0
- package/dist/src/modules/auth/auth.service.js +201 -0
- package/dist/src/modules/auth/auth.trpc.d.ts +18 -18
- package/dist/src/modules/auth/auth.trpc.js +157 -0
- package/dist/src/modules/auth/auth.utils.js +97 -0
- package/dist/src/modules/base/base.abstract.js +53 -0
- package/dist/src/modules/base/base.dto.js +112 -0
- package/dist/src/modules/base/base.grants.js +123 -0
- package/dist/src/modules/base/base.grants.test.js +668 -0
- package/dist/src/modules/base/base.repository.js +307 -0
- package/dist/src/modules/base/base.service.js +109 -0
- package/dist/src/modules/base/base.types.js +2 -0
- package/dist/src/modules/billing/billing.db.js +29 -0
- package/dist/src/modules/billing/billing.repository.js +235 -0
- package/dist/src/modules/billing/billing.router.js +56 -0
- package/dist/src/modules/billing/billing.service.js +147 -0
- package/dist/src/modules/billing/billing.trpc.d.ts +5 -5
- package/dist/src/modules/billing/billing.trpc.js +17 -0
- package/dist/src/modules/clay/clay.repository.js +26 -0
- package/dist/src/modules/clay/clay.service.js +24 -0
- package/dist/src/modules/connect/connect.db.js +30 -0
- package/dist/src/modules/connect/connect.dto.js +36 -0
- package/dist/src/modules/connect/connect.linkedin.js +53 -0
- package/dist/src/modules/connect/connect.oauth.js +198 -0
- package/dist/src/modules/connect/connect.repository.d.ts +7 -7
- package/dist/src/modules/connect/connect.repository.js +54 -0
- package/dist/src/modules/connect/connect.router.js +54 -0
- package/dist/src/modules/connect/connect.service.d.ts +14 -14
- package/dist/src/modules/connect/connect.service.js +114 -0
- package/dist/src/modules/connect/connect.trpc.d.ts +10 -10
- package/dist/src/modules/connect/connect.trpc.js +21 -0
- package/dist/src/modules/connect/connect.types.js +2 -0
- package/dist/src/modules/crypto/crypto.db.js +17 -0
- package/dist/src/modules/crypto/crypto.repository.js +10 -0
- package/dist/src/modules/crypto/crypto.service.js +52 -0
- package/dist/src/modules/email/email.service.js +107 -0
- package/dist/src/modules/file/file.repository.js +79 -0
- package/dist/src/modules/file/file.router.js +99 -0
- package/dist/src/modules/file/file.service.js +150 -0
- package/dist/src/modules/recurrence/recurrence.db.js +66 -0
- package/dist/src/modules/recurrence/recurrence.repository.js +39 -0
- package/dist/src/modules/recurrence/recurrence.service.js +70 -0
- package/dist/src/modules/recurrence/recurrence.trpc.d.ts +15 -15
- package/dist/src/modules/recurrence/recurrence.trpc.js +65 -0
- package/dist/src/modules/social/social.dto.js +18 -0
- package/dist/src/modules/social/social.linkedin.js +427 -0
- package/dist/src/modules/social/social.linkedin.test.js +235 -0
- package/dist/src/modules/social/social.service.js +76 -0
- package/dist/src/modules/social/social.types.js +2 -0
- package/dist/src/modules/tag/tag.db.js +42 -0
- package/dist/src/modules/tag/tag.dto.js +9 -0
- package/dist/src/modules/tag/tag.repository.js +154 -0
- package/dist/src/modules/tag/tag.service.js +31 -0
- package/dist/src/modules/tag/tag.trpc.d.ts +5 -5
- package/dist/src/modules/tag/tag.trpc.js +47 -0
- package/dist/src/modules/utils/applyPagination.js +16 -0
- package/dist/src/modules/utils/applySorting.js +18 -0
- package/dist/src/modules/utils/getConditionsFromFilters.js +200 -0
- package/dist/src/modules/video/video.service.js +84 -0
- package/dist/src/modules/webhook/webhook.constants.js +10 -0
- package/dist/src/modules/webhook/webhook.db.js +17 -0
- package/dist/src/modules/webhook/webhook.dto.js +7 -0
- package/dist/src/modules/webhook/webhook.repository.js +56 -0
- package/dist/src/modules/webhook/webhook.router.js +30 -0
- package/dist/src/modules/webhook/webhook.service.js +68 -0
- package/dist/src/modules/workflow/workflow.db.js +30 -0
- package/dist/src/modules/workflow/workflow.repository.js +105 -0
- package/dist/src/modules/workflow/workflow.service.js +37 -0
- package/dist/src/modules/workflow/workflow.trpc.d.ts +5 -5
- package/dist/src/modules/workflow/workflow.trpc.js +21 -0
- package/dist/src/modules/workflow/workflow.types.js +2 -0
- package/dist/src/modules/workflow/workflow.utils.js +173 -0
- package/dist/src/test/stubs/utils.js +5 -0
- package/dist/src/trpc/context.d.ts +5 -5
- package/dist/src/trpc/context.js +17 -0
- package/dist/src/trpc/index.js +6 -0
- package/dist/src/trpc/procedures.d.ts +56 -56
- package/dist/src/trpc/procedures.js +32 -0
- package/dist/src/trpc/utils.js +20 -0
- package/dist/src/types.d.ts +33 -33
- package/dist/src/types.js +13 -0
- package/dist/src/utils/errors.js +104 -0
- package/dist/src/utils/logger.js +11 -0
- package/dist/src/utils/posthog.js +31 -0
- package/dist/src/utils/types.js +2 -0
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +3 -3
- package/tsconfig.json +2 -0
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.uploadRouter = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const node_path_1 = tslib_1.__importDefault(require("node:path"));
|
|
6
|
+
const file_constants_1 = require("@m5kdev/commons/modules/file/file.constants");
|
|
7
|
+
const body_parser_1 = tslib_1.__importDefault(require("body-parser"));
|
|
8
|
+
const express_1 = tslib_1.__importDefault(require("express"));
|
|
9
|
+
const multer_1 = tslib_1.__importDefault(require("multer"));
|
|
10
|
+
const uuid_1 = require("uuid");
|
|
11
|
+
const file_repository_1 = require("#modules/file/file.repository");
|
|
12
|
+
const file_service_1 = require("#modules/file/file.service");
|
|
13
|
+
const fileRepository = new file_repository_1.FileRepository();
|
|
14
|
+
const fileService = new file_service_1.FileService({ file: fileRepository });
|
|
15
|
+
function validateMimeType(type, file) {
|
|
16
|
+
return file_constants_1.fileTypes[type]?.mimetypes.includes(file.mimetype);
|
|
17
|
+
}
|
|
18
|
+
function getFileExtension(file) {
|
|
19
|
+
return file.originalname.split(".").pop();
|
|
20
|
+
}
|
|
21
|
+
const storage = multer_1.default.diskStorage({
|
|
22
|
+
destination: (_req, _file, cb) => {
|
|
23
|
+
cb(null, node_path_1.default.join(__dirname, "..", "uploads"));
|
|
24
|
+
},
|
|
25
|
+
filename: (_req, file, cb) => {
|
|
26
|
+
cb(null, `${(0, uuid_1.v4)()}.${getFileExtension(file)}`);
|
|
27
|
+
},
|
|
28
|
+
});
|
|
29
|
+
const fileFilter = (req, file, cb) => {
|
|
30
|
+
const { type } = req.params;
|
|
31
|
+
if (type && validateMimeType(type, file)) {
|
|
32
|
+
cb(null, true);
|
|
33
|
+
}
|
|
34
|
+
else {
|
|
35
|
+
cb(new Error("Invalid file type"));
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
const upload = (0, multer_1.default)({ storage, fileFilter });
|
|
39
|
+
const uploadRouter = express_1.default.Router();
|
|
40
|
+
exports.uploadRouter = uploadRouter;
|
|
41
|
+
uploadRouter.post("/file/:type", upload.single("file"), (req, res) => {
|
|
42
|
+
const { file } = req;
|
|
43
|
+
if (!file) {
|
|
44
|
+
return res.status(400).json({ error: "No file uploaded" });
|
|
45
|
+
}
|
|
46
|
+
return res.json({
|
|
47
|
+
url: `${process.env.VITE_SERVER_URL}/upload/file/${file.filename}`,
|
|
48
|
+
mimetype: file.mimetype,
|
|
49
|
+
size: file.size,
|
|
50
|
+
});
|
|
51
|
+
});
|
|
52
|
+
uploadRouter.get("/file/:filename", (req, res) => {
|
|
53
|
+
res.sendFile(node_path_1.default.join(__dirname, "..", "uploads", req.params.filename));
|
|
54
|
+
});
|
|
55
|
+
uploadRouter.get("/files/:path", async (req, res) => {
|
|
56
|
+
try {
|
|
57
|
+
const url = await fileService.getS3DownloadUrl(req.params.path);
|
|
58
|
+
if (url.isErr()) {
|
|
59
|
+
console.error(url.error);
|
|
60
|
+
return res.status(500).json({ error: url.error.message });
|
|
61
|
+
}
|
|
62
|
+
return res.json({ url: url.value });
|
|
63
|
+
}
|
|
64
|
+
catch (err) {
|
|
65
|
+
console.error(err);
|
|
66
|
+
return res.status(500).json({ error: err.message || "Failed to generate presigned URL" });
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
uploadRouter.post("/s3-presigned-url", body_parser_1.default.json(), async (req, res) => {
|
|
70
|
+
const { filename, filetype } = req.body;
|
|
71
|
+
if (!filename || !filetype) {
|
|
72
|
+
return res.status(400).json({ error: "Missing filename or filetype" });
|
|
73
|
+
}
|
|
74
|
+
try {
|
|
75
|
+
const url = await fileService.getS3UploadUrl(filename, filetype);
|
|
76
|
+
if (url.isErr()) {
|
|
77
|
+
return res.status(500).json({ error: url.error.message });
|
|
78
|
+
}
|
|
79
|
+
return res.json({ url: url.value });
|
|
80
|
+
}
|
|
81
|
+
catch (err) {
|
|
82
|
+
console.error(err);
|
|
83
|
+
return res.status(500).json({ error: err.message || "Failed to generate presigned URL" });
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
uploadRouter.delete("/files/:path(*)", async (req, res) => {
|
|
87
|
+
try {
|
|
88
|
+
const result = await fileService.deleteS3Object(req.params.path);
|
|
89
|
+
if (result.isErr()) {
|
|
90
|
+
console.error(result.error);
|
|
91
|
+
return res.status(500).json({ error: result.error.message });
|
|
92
|
+
}
|
|
93
|
+
return res.json({ success: true });
|
|
94
|
+
}
|
|
95
|
+
catch (err) {
|
|
96
|
+
console.error(err);
|
|
97
|
+
return res.status(500).json({ error: err.message || "Failed to delete S3 object" });
|
|
98
|
+
}
|
|
99
|
+
});
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.FileService = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const node_fs_1 = require("node:fs");
|
|
6
|
+
const promises_1 = require("node:fs/promises");
|
|
7
|
+
const node_os_1 = require("node:os");
|
|
8
|
+
const node_path_1 = tslib_1.__importStar(require("node:path"));
|
|
9
|
+
const node_stream_1 = require("node:stream");
|
|
10
|
+
const promises_2 = require("node:stream/promises");
|
|
11
|
+
const file_constants_1 = require("@m5kdev/commons/modules/file/file.constants");
|
|
12
|
+
const neverthrow_1 = require("neverthrow");
|
|
13
|
+
const uuid_1 = require("uuid");
|
|
14
|
+
const base_service_1 = require("#modules/base/base.service");
|
|
15
|
+
class FileService extends base_service_1.BaseService {
|
|
16
|
+
isS3Path(path) {
|
|
17
|
+
return path.startsWith("s3::");
|
|
18
|
+
}
|
|
19
|
+
parseS3Path(S3Path) {
|
|
20
|
+
if (!this.isS3Path(S3Path)) {
|
|
21
|
+
return this.error("BAD_REQUEST", "Invalid S3 path");
|
|
22
|
+
}
|
|
23
|
+
const [bucket, path] = S3Path.split("s3::")[1].split("//");
|
|
24
|
+
return (0, neverthrow_1.ok)({ bucket, path });
|
|
25
|
+
}
|
|
26
|
+
wrapS3Path(path, bucket) {
|
|
27
|
+
return `s3::${bucket}//${path}`;
|
|
28
|
+
}
|
|
29
|
+
getS3UploadUrl(filename, filetype, expiresIn = 60 * 5) {
|
|
30
|
+
return this.repository.file.getS3UploadUrl(filename, filetype, expiresIn);
|
|
31
|
+
}
|
|
32
|
+
getS3DownloadUrl(path, expiresIn = 60 * 5) {
|
|
33
|
+
return this.repository.file.getS3DownloadUrl(path, expiresIn);
|
|
34
|
+
}
|
|
35
|
+
getS3Object(path) {
|
|
36
|
+
return this.repository.file.getS3Object(path);
|
|
37
|
+
}
|
|
38
|
+
deleteS3Object(path) {
|
|
39
|
+
return this.repository.file.deleteS3Object(path);
|
|
40
|
+
}
|
|
41
|
+
async uploadFileToS3(localPath, returnDownloadUrl = false) {
|
|
42
|
+
return this.throwableAsync(async () => {
|
|
43
|
+
const extension = localPath.split(".").pop()?.toLowerCase();
|
|
44
|
+
const filename = `${(0, uuid_1.v4)()}${extension ? `.${extension}` : ""}`;
|
|
45
|
+
const mimeByExt = {
|
|
46
|
+
jpg: "image/jpeg",
|
|
47
|
+
jpeg: "image/jpeg",
|
|
48
|
+
png: "image/png",
|
|
49
|
+
webp: "image/webp",
|
|
50
|
+
mp4: "video/mp4",
|
|
51
|
+
mov: "video/mov",
|
|
52
|
+
avi: "video/avi",
|
|
53
|
+
mkv: "video/mkv",
|
|
54
|
+
webm: "video/webm",
|
|
55
|
+
mp3: "audio/mp3",
|
|
56
|
+
wav: "audio/wav",
|
|
57
|
+
m4a: "audio/m4a",
|
|
58
|
+
};
|
|
59
|
+
const filetype = (extension && mimeByExt[extension]) || "application/octet-stream";
|
|
60
|
+
const presigned = await this.getS3UploadUrl(filename, filetype);
|
|
61
|
+
if (presigned.isErr())
|
|
62
|
+
return (0, neverthrow_1.err)(presigned.error);
|
|
63
|
+
const file = await (0, promises_1.readFile)(localPath);
|
|
64
|
+
const res = await fetch(presigned.value, {
|
|
65
|
+
method: "PUT",
|
|
66
|
+
body: file,
|
|
67
|
+
headers: { "Content-Type": filetype },
|
|
68
|
+
});
|
|
69
|
+
if (!res.ok) {
|
|
70
|
+
return this.error("INTERNAL_SERVER_ERROR", `Failed to upload to S3: ${res.status}`);
|
|
71
|
+
}
|
|
72
|
+
if (returnDownloadUrl) {
|
|
73
|
+
const downloadUrl = await this.getS3DownloadUrl(filename);
|
|
74
|
+
if (downloadUrl.isErr())
|
|
75
|
+
return (0, neverthrow_1.err)(downloadUrl.error);
|
|
76
|
+
return (0, neverthrow_1.ok)(downloadUrl.value);
|
|
77
|
+
}
|
|
78
|
+
return (0, neverthrow_1.ok)(filename);
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
async downloadS3ToFile(s3Path) {
|
|
82
|
+
return this.throwableAsync(async () => {
|
|
83
|
+
const extension = s3Path.split(".").pop();
|
|
84
|
+
const destinationPath = node_path_1.default.join((0, node_os_1.tmpdir)(), "s3-downloads", `${(0, uuid_1.v4)()}${extension ? `.${extension}` : ""}`);
|
|
85
|
+
const result = await this.repository.file.getS3Object(s3Path);
|
|
86
|
+
if (result.isErr())
|
|
87
|
+
return (0, neverthrow_1.err)(result.error);
|
|
88
|
+
const body = result.value.Body;
|
|
89
|
+
if (!body)
|
|
90
|
+
return this.error("NOT_FOUND", "S3 object body is empty");
|
|
91
|
+
await (0, promises_1.mkdir)((0, node_path_1.dirname)(destinationPath), { recursive: true });
|
|
92
|
+
// AWS SDK v3 SdkStream has transformToByteArray method - use it for reliable handling
|
|
93
|
+
if (typeof body === "object" &&
|
|
94
|
+
"transformToByteArray" in body &&
|
|
95
|
+
typeof body.transformToByteArray === "function") {
|
|
96
|
+
const bytes = await body.transformToByteArray();
|
|
97
|
+
await (0, promises_1.writeFile)(destinationPath, bytes);
|
|
98
|
+
return (0, neverthrow_1.ok)(destinationPath);
|
|
99
|
+
}
|
|
100
|
+
// Fallback: try streaming approaches
|
|
101
|
+
const writeStream = (0, node_fs_1.createWriteStream)(destinationPath);
|
|
102
|
+
let input = null;
|
|
103
|
+
const unknownBody = body;
|
|
104
|
+
if (typeof unknownBody === "object" &&
|
|
105
|
+
unknownBody !== null &&
|
|
106
|
+
"pipe" in unknownBody &&
|
|
107
|
+
typeof unknownBody.pipe === "function") {
|
|
108
|
+
input = unknownBody;
|
|
109
|
+
}
|
|
110
|
+
else if (typeof unknownBody === "object" &&
|
|
111
|
+
unknownBody !== null &&
|
|
112
|
+
"getReader" in unknownBody &&
|
|
113
|
+
typeof unknownBody.getReader === "function") {
|
|
114
|
+
input = node_stream_1.Readable.fromWeb(unknownBody);
|
|
115
|
+
}
|
|
116
|
+
else if (typeof unknownBody === "object" &&
|
|
117
|
+
unknownBody !== null &&
|
|
118
|
+
"stream" in unknownBody &&
|
|
119
|
+
typeof unknownBody.stream === "function") {
|
|
120
|
+
input = node_stream_1.Readable.fromWeb(unknownBody.stream());
|
|
121
|
+
}
|
|
122
|
+
if (input) {
|
|
123
|
+
await (0, promises_2.pipeline)(input, writeStream);
|
|
124
|
+
return (0, neverthrow_1.ok)(destinationPath);
|
|
125
|
+
}
|
|
126
|
+
if (typeof unknownBody === "object" &&
|
|
127
|
+
unknownBody !== null &&
|
|
128
|
+
"arrayBuffer" in unknownBody &&
|
|
129
|
+
typeof unknownBody.arrayBuffer === "function") {
|
|
130
|
+
const buffer = Buffer.from(await unknownBody.arrayBuffer());
|
|
131
|
+
await (0, promises_2.pipeline)(node_stream_1.Readable.from(buffer), writeStream);
|
|
132
|
+
return (0, neverthrow_1.ok)(destinationPath);
|
|
133
|
+
}
|
|
134
|
+
return this.error("INTERNAL_SERVER_ERROR", "Unsupported S3 body type");
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
getFileType(path) {
|
|
138
|
+
// determine the type of the file
|
|
139
|
+
const extension = path.split(".").pop();
|
|
140
|
+
if (!extension)
|
|
141
|
+
return undefined;
|
|
142
|
+
for (const [key, value] of Object.entries(file_constants_1.fileTypes)) {
|
|
143
|
+
if (value.extensions.includes(extension)) {
|
|
144
|
+
return { fileType: key, extension };
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
return undefined;
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
exports.FileService = FileService;
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.recurrenceRules = exports.recurrence = void 0;
|
|
4
|
+
const sqlite_core_1 = require("drizzle-orm/sqlite-core");
|
|
5
|
+
const uuid_1 = require("uuid");
|
|
6
|
+
const auth_db_1 = require("#modules/auth/auth.db");
|
|
7
|
+
exports.recurrence = (0, sqlite_core_1.sqliteTable)("recurrence", {
|
|
8
|
+
id: (0, sqlite_core_1.text)("id").primaryKey().$default(uuid_1.v4),
|
|
9
|
+
userId: (0, sqlite_core_1.text)("user_id").references(() => auth_db_1.users.id, { onDelete: "cascade" }),
|
|
10
|
+
organizationId: (0, sqlite_core_1.text)("organization_id").references(() => auth_db_1.organizations.id, {
|
|
11
|
+
onDelete: "cascade",
|
|
12
|
+
}),
|
|
13
|
+
teamId: (0, sqlite_core_1.text)("team_id").references(() => auth_db_1.teams.id, { onDelete: "cascade" }),
|
|
14
|
+
name: (0, sqlite_core_1.text)("name"),
|
|
15
|
+
kind: (0, sqlite_core_1.text)("kind"),
|
|
16
|
+
enabled: (0, sqlite_core_1.integer)("enabled", { mode: "boolean" }).notNull().default(true),
|
|
17
|
+
createdAt: (0, sqlite_core_1.integer)("created_at", { mode: "timestamp" })
|
|
18
|
+
.notNull()
|
|
19
|
+
.$default(() => new Date()),
|
|
20
|
+
updatedAt: (0, sqlite_core_1.integer)("updated_at", { mode: "timestamp" })
|
|
21
|
+
.notNull()
|
|
22
|
+
.$default(() => new Date()),
|
|
23
|
+
metadata: (0, sqlite_core_1.text)("metadata", { mode: "json" }).$type(),
|
|
24
|
+
});
|
|
25
|
+
exports.recurrenceRules = (0, sqlite_core_1.sqliteTable)("recurrence_rules", {
|
|
26
|
+
id: (0, sqlite_core_1.text)("id").primaryKey().$default(uuid_1.v4),
|
|
27
|
+
createdAt: (0, sqlite_core_1.integer)("created_at", { mode: "timestamp" })
|
|
28
|
+
.notNull()
|
|
29
|
+
.$default(() => new Date()),
|
|
30
|
+
updatedAt: (0, sqlite_core_1.integer)("updated_at", { mode: "timestamp" })
|
|
31
|
+
.notNull()
|
|
32
|
+
.$default(() => new Date()),
|
|
33
|
+
recurrenceId: (0, sqlite_core_1.text)("recurrence_id").references(() => exports.recurrence.id, {
|
|
34
|
+
onDelete: "cascade",
|
|
35
|
+
}),
|
|
36
|
+
// Required: frequency
|
|
37
|
+
freq: (0, sqlite_core_1.integer)("freq").notNull(),
|
|
38
|
+
// Start date; base for recurrence and source for missing instance params
|
|
39
|
+
dtstart: (0, sqlite_core_1.integer)("dtstart", { mode: "timestamp" }),
|
|
40
|
+
// Interval between each freq iteration (default 1)
|
|
41
|
+
interval: (0, sqlite_core_1.integer)("interval").notNull().default(1),
|
|
42
|
+
// Week start: MO, TU, WE, ... or integer 0–6
|
|
43
|
+
wkst: (0, sqlite_core_1.integer)("wkst"),
|
|
44
|
+
// How many occurrences to generate
|
|
45
|
+
count: (0, sqlite_core_1.integer)("count"),
|
|
46
|
+
// Last occurrence date (inclusive)
|
|
47
|
+
until: (0, sqlite_core_1.integer)("until", { mode: "timestamp" }),
|
|
48
|
+
// IANA timezone string (Intl API)
|
|
49
|
+
tzid: (0, sqlite_core_1.text)("tzid"),
|
|
50
|
+
// BYSETPOS: occurrence number(s) in the frequency period (e.g. -1 = last)
|
|
51
|
+
bysetpos: (0, sqlite_core_1.text)("bysetpos", { mode: "json" }).$type(),
|
|
52
|
+
// BYMONTH: month(s) 1–12
|
|
53
|
+
bymonth: (0, sqlite_core_1.text)("bymonth", { mode: "json" }).$type(),
|
|
54
|
+
// BYMONTHDAY: day(s) of month
|
|
55
|
+
bymonthday: (0, sqlite_core_1.text)("bymonthday", { mode: "json" }).$type(),
|
|
56
|
+
// BYYEARDAY: day(s) of year
|
|
57
|
+
byyearday: (0, sqlite_core_1.text)("byyearday", { mode: "json" }).$type(),
|
|
58
|
+
// BYWEEKNO: week number(s) (ISO8601)
|
|
59
|
+
byweekno: (0, sqlite_core_1.text)("byweekno", { mode: "json" }).$type(),
|
|
60
|
+
// BYWEEKDAY: weekday(s) 0–6, or nth e.g. { weekday: 4, n: 1 } for first Friday
|
|
61
|
+
byweekday: (0, sqlite_core_1.text)("byweekday", { mode: "json" }).$type(),
|
|
62
|
+
// BYHOUR, BYMINUTE, BYSECOND
|
|
63
|
+
byhour: (0, sqlite_core_1.text)("byhour", { mode: "json" }).$type(),
|
|
64
|
+
byminute: (0, sqlite_core_1.text)("byminute", { mode: "json" }).$type(),
|
|
65
|
+
bysecond: (0, sqlite_core_1.text)("bysecond", { mode: "json" }).$type(),
|
|
66
|
+
});
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.RecurrenceRulesRepository = exports.RecurrenceRepository = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const neverthrow_1 = require("neverthrow");
|
|
6
|
+
const base_repository_1 = require("#modules/base/base.repository");
|
|
7
|
+
const recurrence = tslib_1.__importStar(require("#modules/recurrence/recurrence.db"));
|
|
8
|
+
const schema = { ...recurrence };
|
|
9
|
+
class RecurrenceRepository extends base_repository_1.BaseTableRepository {
|
|
10
|
+
async createWithRules(recurrenceData, rulesData, tx) {
|
|
11
|
+
return this.throwableAsync(async () => {
|
|
12
|
+
const db = tx ?? this.orm;
|
|
13
|
+
const result = await db.transaction(async (trx) => {
|
|
14
|
+
const [createdRecurrence] = await trx
|
|
15
|
+
.insert(this.table)
|
|
16
|
+
.values(recurrenceData)
|
|
17
|
+
.returning();
|
|
18
|
+
if (!createdRecurrence)
|
|
19
|
+
throw new Error("Failed to create recurrence");
|
|
20
|
+
const rulesWithRecurrenceId = rulesData.map((rule) => ({
|
|
21
|
+
...rule,
|
|
22
|
+
recurrenceId: createdRecurrence.id,
|
|
23
|
+
}));
|
|
24
|
+
const insertedRules = rulesWithRecurrenceId.length > 0
|
|
25
|
+
? await trx
|
|
26
|
+
.insert(this.schema.recurrenceRules)
|
|
27
|
+
.values(rulesWithRecurrenceId)
|
|
28
|
+
.returning()
|
|
29
|
+
: [];
|
|
30
|
+
return (0, neverthrow_1.ok)({ recurrence: createdRecurrence, rules: insertedRules });
|
|
31
|
+
});
|
|
32
|
+
return result;
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
exports.RecurrenceRepository = RecurrenceRepository;
|
|
37
|
+
class RecurrenceRulesRepository extends base_repository_1.BaseTableRepository {
|
|
38
|
+
}
|
|
39
|
+
exports.RecurrenceRulesRepository = RecurrenceRulesRepository;
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.RecurrenceService = void 0;
|
|
4
|
+
const neverthrow_1 = require("neverthrow");
|
|
5
|
+
const base_service_1 = require("#modules/base/base.service");
|
|
6
|
+
const RECURRENCE_RULE_INSERT_KEYS = [
|
|
7
|
+
"freq",
|
|
8
|
+
"dtstart",
|
|
9
|
+
"interval",
|
|
10
|
+
"wkst",
|
|
11
|
+
"count",
|
|
12
|
+
"until",
|
|
13
|
+
"tzid",
|
|
14
|
+
"bysetpos",
|
|
15
|
+
"bymonth",
|
|
16
|
+
"bymonthday",
|
|
17
|
+
"byyearday",
|
|
18
|
+
"byweekno",
|
|
19
|
+
"byweekday",
|
|
20
|
+
"byhour",
|
|
21
|
+
"byminute",
|
|
22
|
+
"bysecond",
|
|
23
|
+
];
|
|
24
|
+
function mapRuleToInsert(rule) {
|
|
25
|
+
const out = {};
|
|
26
|
+
for (const key of RECURRENCE_RULE_INSERT_KEYS) {
|
|
27
|
+
if (key in rule && rule[key] !== undefined) {
|
|
28
|
+
out[key] = rule[key];
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
return out;
|
|
32
|
+
}
|
|
33
|
+
class RecurrenceService extends base_service_1.BaseService {
|
|
34
|
+
async create(data, ctx) {
|
|
35
|
+
const recurrenceData = {
|
|
36
|
+
name: data.name,
|
|
37
|
+
kind: data.kind,
|
|
38
|
+
enabled: data.enabled,
|
|
39
|
+
metadata: data.metadata ?? null,
|
|
40
|
+
userId: ctx.user?.id ?? null,
|
|
41
|
+
organizationId: ctx.session.activeOrganizationId ?? null,
|
|
42
|
+
teamId: ctx.session.activeTeamId ?? null,
|
|
43
|
+
};
|
|
44
|
+
const rulesData = data.recurrenceRules.map(mapRuleToInsert);
|
|
45
|
+
return this.repository.recurrence.createWithRules(recurrenceData, rulesData);
|
|
46
|
+
}
|
|
47
|
+
async list(query, ctx) {
|
|
48
|
+
const queryWithUser = ctx?.user ? this.addUserFilter(ctx.user.id, query, "userId") : query;
|
|
49
|
+
return this.repository.recurrence.queryList(queryWithUser);
|
|
50
|
+
}
|
|
51
|
+
async findById(id) {
|
|
52
|
+
const result = await this.repository.recurrence.findById(id);
|
|
53
|
+
if (result.isErr())
|
|
54
|
+
return (0, neverthrow_1.err)(result.error);
|
|
55
|
+
return (0, neverthrow_1.ok)(result.value ?? null);
|
|
56
|
+
}
|
|
57
|
+
async update(data) {
|
|
58
|
+
return this.repository.recurrence.update(data);
|
|
59
|
+
}
|
|
60
|
+
async updateRule(data) {
|
|
61
|
+
return this.repository.recurrenceRules.update(data);
|
|
62
|
+
}
|
|
63
|
+
async delete(data) {
|
|
64
|
+
return this.repository.recurrence.deleteById(data.id);
|
|
65
|
+
}
|
|
66
|
+
async deleteRule(data) {
|
|
67
|
+
return this.repository.recurrenceRules.deleteById(data.id);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
exports.RecurrenceService = RecurrenceService;
|
|
@@ -4,9 +4,9 @@ export declare function createRecurrenceTRPC(recurrenceService: RecurrenceServic
|
|
|
4
4
|
session: {
|
|
5
5
|
id: string;
|
|
6
6
|
userId: string;
|
|
7
|
-
expiresAt: Date;
|
|
8
|
-
createdAt: Date;
|
|
9
7
|
updatedAt: Date;
|
|
8
|
+
createdAt: Date;
|
|
9
|
+
expiresAt: Date;
|
|
10
10
|
token: string;
|
|
11
11
|
ipAddress: string | null;
|
|
12
12
|
userAgent: string | null;
|
|
@@ -18,13 +18,12 @@ export declare function createRecurrenceTRPC(recurrenceService: RecurrenceServic
|
|
|
18
18
|
};
|
|
19
19
|
user: {
|
|
20
20
|
name: string;
|
|
21
|
-
image: string | null;
|
|
22
21
|
id: string;
|
|
23
|
-
createdAt: Date;
|
|
24
22
|
updatedAt: Date;
|
|
25
23
|
email: string;
|
|
26
|
-
metadata: Record<string, unknown>;
|
|
27
24
|
emailVerified: boolean;
|
|
25
|
+
image: string | null;
|
|
26
|
+
createdAt: Date;
|
|
28
27
|
role: string | null;
|
|
29
28
|
banned: boolean | null;
|
|
30
29
|
banReason: string | null;
|
|
@@ -34,6 +33,7 @@ export declare function createRecurrenceTRPC(recurrenceService: RecurrenceServic
|
|
|
34
33
|
paymentPlanTier: string | null;
|
|
35
34
|
paymentPlanExpiresAt: Date | null;
|
|
36
35
|
preferences: string | null;
|
|
36
|
+
metadata: Record<string, unknown>;
|
|
37
37
|
onboarding: number | null;
|
|
38
38
|
flags: string | null;
|
|
39
39
|
};
|
|
@@ -82,11 +82,6 @@ export declare function createRecurrenceTRPC(recurrenceService: RecurrenceServic
|
|
|
82
82
|
recurrenceRules: {
|
|
83
83
|
interval: number;
|
|
84
84
|
freq: number;
|
|
85
|
-
count?: number | null | undefined;
|
|
86
|
-
dtstart?: Date | null | undefined;
|
|
87
|
-
wkst?: number | null | undefined;
|
|
88
|
-
until?: Date | null | undefined;
|
|
89
|
-
tzid?: string | null | undefined;
|
|
90
85
|
bysetpos?: number | number[] | null | undefined;
|
|
91
86
|
bymonth?: number | number[] | null | undefined;
|
|
92
87
|
bymonthday?: number | number[] | null | undefined;
|
|
@@ -96,6 +91,11 @@ export declare function createRecurrenceTRPC(recurrenceService: RecurrenceServic
|
|
|
96
91
|
byhour?: number | number[] | null | undefined;
|
|
97
92
|
byminute?: number | number[] | null | undefined;
|
|
98
93
|
bysecond?: number | number[] | null | undefined;
|
|
94
|
+
dtstart?: Date | null | undefined;
|
|
95
|
+
wkst?: number | null | undefined;
|
|
96
|
+
count?: number | null | undefined;
|
|
97
|
+
until?: Date | null | undefined;
|
|
98
|
+
tzid?: string | null | undefined;
|
|
99
99
|
}[];
|
|
100
100
|
metadata?: Record<string, unknown> | null | undefined;
|
|
101
101
|
};
|
|
@@ -182,11 +182,6 @@ export declare function createRecurrenceTRPC(recurrenceService: RecurrenceServic
|
|
|
182
182
|
id: string;
|
|
183
183
|
interval: number;
|
|
184
184
|
freq: number;
|
|
185
|
-
count?: number | null | undefined;
|
|
186
|
-
dtstart?: Date | null | undefined;
|
|
187
|
-
wkst?: number | null | undefined;
|
|
188
|
-
until?: Date | null | undefined;
|
|
189
|
-
tzid?: string | null | undefined;
|
|
190
185
|
bysetpos?: number | number[] | null | undefined;
|
|
191
186
|
bymonth?: number | number[] | null | undefined;
|
|
192
187
|
bymonthday?: number | number[] | null | undefined;
|
|
@@ -196,6 +191,11 @@ export declare function createRecurrenceTRPC(recurrenceService: RecurrenceServic
|
|
|
196
191
|
byhour?: number | number[] | null | undefined;
|
|
197
192
|
byminute?: number | number[] | null | undefined;
|
|
198
193
|
bysecond?: number | number[] | null | undefined;
|
|
194
|
+
dtstart?: Date | null | undefined;
|
|
195
|
+
wkst?: number | null | undefined;
|
|
196
|
+
count?: number | null | undefined;
|
|
197
|
+
until?: Date | null | undefined;
|
|
198
|
+
tzid?: string | null | undefined;
|
|
199
199
|
};
|
|
200
200
|
output: {
|
|
201
201
|
id: string;
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createRecurrenceTRPC = createRecurrenceTRPC;
|
|
4
|
+
const recurrence_schema_1 = require("@m5kdev/commons/modules/recurrence/recurrence.schema");
|
|
5
|
+
const query_schema_1 = require("@m5kdev/commons/modules/schemas/query.schema");
|
|
6
|
+
const zod_1 = require("zod");
|
|
7
|
+
const _trpc_1 = require("#trpc");
|
|
8
|
+
const createRecurrenceOutputSchema = zod_1.z.object({
|
|
9
|
+
recurrence: recurrence_schema_1.recurrenceSchema,
|
|
10
|
+
rules: zod_1.z.array(recurrence_schema_1.recurrenceRulesSchema),
|
|
11
|
+
});
|
|
12
|
+
const listRecurrenceOutputSchema = zod_1.z.object({
|
|
13
|
+
rows: zod_1.z.array(recurrence_schema_1.recurrenceSchema),
|
|
14
|
+
total: zod_1.z.number(),
|
|
15
|
+
});
|
|
16
|
+
const updateRecurrenceInputSchema = recurrence_schema_1.updateRecurrenceSchema.extend({
|
|
17
|
+
id: zod_1.z.string(),
|
|
18
|
+
});
|
|
19
|
+
const deleteRecurrenceOutputSchema = zod_1.z.object({ id: zod_1.z.string() });
|
|
20
|
+
function createRecurrenceTRPC(recurrenceService) {
|
|
21
|
+
return (0, _trpc_1.router)({
|
|
22
|
+
list: _trpc_1.procedure
|
|
23
|
+
.input(query_schema_1.querySchema.optional())
|
|
24
|
+
.output(listRecurrenceOutputSchema)
|
|
25
|
+
.query(async ({ ctx, input }) => {
|
|
26
|
+
return (0, _trpc_1.handleTRPCResult)(await recurrenceService.list(input, ctx));
|
|
27
|
+
}),
|
|
28
|
+
create: _trpc_1.procedure
|
|
29
|
+
.input(recurrence_schema_1.createRecurrenceSchema)
|
|
30
|
+
.output(createRecurrenceOutputSchema)
|
|
31
|
+
.mutation(async ({ ctx, input }) => {
|
|
32
|
+
return (0, _trpc_1.handleTRPCResult)(await recurrenceService.create(input, ctx));
|
|
33
|
+
}),
|
|
34
|
+
findById: _trpc_1.procedure
|
|
35
|
+
.input(zod_1.z.object({ id: zod_1.z.string() }))
|
|
36
|
+
.output(recurrence_schema_1.recurrenceSchema.nullable())
|
|
37
|
+
.query(async ({ input }) => {
|
|
38
|
+
return (0, _trpc_1.handleTRPCResult)(await recurrenceService.findById(input.id));
|
|
39
|
+
}),
|
|
40
|
+
update: _trpc_1.procedure
|
|
41
|
+
.input(updateRecurrenceInputSchema)
|
|
42
|
+
.output(recurrence_schema_1.recurrenceSchema)
|
|
43
|
+
.mutation(async ({ input }) => {
|
|
44
|
+
return (0, _trpc_1.handleTRPCResult)(await recurrenceService.update(input));
|
|
45
|
+
}),
|
|
46
|
+
updateRule: _trpc_1.procedure
|
|
47
|
+
.input(recurrence_schema_1.updateRecurrenceRulesSchema)
|
|
48
|
+
.output(recurrence_schema_1.recurrenceRulesSchema)
|
|
49
|
+
.mutation(async ({ input }) => {
|
|
50
|
+
return (0, _trpc_1.handleTRPCResult)(await recurrenceService.updateRule(input));
|
|
51
|
+
}),
|
|
52
|
+
delete: _trpc_1.procedure
|
|
53
|
+
.input(recurrence_schema_1.deleteRecurrenceSchema)
|
|
54
|
+
.output(deleteRecurrenceOutputSchema)
|
|
55
|
+
.mutation(async ({ input }) => {
|
|
56
|
+
return (0, _trpc_1.handleTRPCResult)(await recurrenceService.delete(input));
|
|
57
|
+
}),
|
|
58
|
+
deleteRule: _trpc_1.procedure
|
|
59
|
+
.input(recurrence_schema_1.deleteRecurrenceRulesSchema)
|
|
60
|
+
.output(deleteRecurrenceOutputSchema)
|
|
61
|
+
.mutation(async ({ input }) => {
|
|
62
|
+
return (0, _trpc_1.handleTRPCResult)(await recurrenceService.deleteRule(input));
|
|
63
|
+
}),
|
|
64
|
+
});
|
|
65
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.socialPostOutputSchema = exports.socialPostInputSchema = exports.socialMediaInputSchema = void 0;
|
|
4
|
+
const zod_1 = require("zod");
|
|
5
|
+
exports.socialMediaInputSchema = zod_1.z.object({
|
|
6
|
+
s3Path: zod_1.z.string().min(1, "Media S3 path is required"),
|
|
7
|
+
mediaType: zod_1.z.enum(["image", "video", "document"]).optional(),
|
|
8
|
+
title: zod_1.z.string().max(300).optional(),
|
|
9
|
+
description: zod_1.z.string().max(2000).optional(),
|
|
10
|
+
});
|
|
11
|
+
exports.socialPostInputSchema = zod_1.z.object({
|
|
12
|
+
text: zod_1.z.string().min(1, "Post text is required"),
|
|
13
|
+
media: zod_1.z.array(exports.socialMediaInputSchema).max(4).optional(),
|
|
14
|
+
visibility: zod_1.z.enum(["PUBLIC", "CONNECTIONS"]).default("PUBLIC"),
|
|
15
|
+
});
|
|
16
|
+
exports.socialPostOutputSchema = zod_1.z.object({
|
|
17
|
+
shareUrn: zod_1.z.string().optional(),
|
|
18
|
+
});
|