mogobase 1.0.0 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/client/hooks/useMutation.js +18 -0
- package/lib/client/hooks/usePaginatedQuery.js +104 -0
- package/lib/client/hooks/useQuery.js +32 -0
- package/lib/db/buildMongoFilters.js +189 -0
- package/lib/db/index.js +86 -0
- package/lib/dev/index.js +20 -0
- package/lib/dev/start.js +41 -0
- package/lib/index.js +4 -0
- package/lib/server/handlers.js +93 -0
- package/lib/server/hono.js +51 -0
- package/lib/server/index.js +10 -0
- package/lib/server/start.js +20 -0
- package/lib/server/ws.js +133 -0
- package/package.json +2 -2
- package/types/client/hooks/useMutation.d.ts +2 -0
- package/types/client/hooks/usePaginatedQuery.d.ts +14 -0
- package/types/client/hooks/useQuery.d.ts +2 -0
- package/types/db/buildMongoFilters.d.ts +2 -0
- package/types/db/index.d.ts +24 -0
- package/types/dev/index.d.ts +2 -0
- package/types/dev/start.d.ts +1 -0
- package/types/index.d.ts +4 -0
- package/types/server/handlers.d.ts +43 -0
- package/types/server/hono.d.ts +3 -0
- package/types/server/index.d.ts +10 -0
- package/types/server/start.d.ts +1 -0
- package/types/server/ws.d.ts +19 -0
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
const apiUrl = `${process.env.NEXT_MOGOBASE_URL || process.env.MOGOBASE_URL || "http://localhost:4000"}/api/handlers`;
|
|
2
|
+
function useMutation(name) {
|
|
3
|
+
return async (args) => {
|
|
4
|
+
const rs = await fetch(apiUrl, {
|
|
5
|
+
method: "POST",
|
|
6
|
+
headers: {
|
|
7
|
+
"Content-Type": "application/json",
|
|
8
|
+
},
|
|
9
|
+
body: JSON.stringify({
|
|
10
|
+
name,
|
|
11
|
+
args,
|
|
12
|
+
}),
|
|
13
|
+
});
|
|
14
|
+
const data = await rs.json();
|
|
15
|
+
return data;
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
export default useMutation;
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import { hc } from "hono/client";
|
|
2
|
+
import { useCallback, useEffect, useRef, useState } from "react";
|
|
3
|
+
const client = hc(process.env.NEXT_MOGOBASE_URL || process.env.MOGOBASE_URL || "http://localhost:4000");
|
|
4
|
+
const mergeArray = (arr1, arr2, key, insert = false) => {
|
|
5
|
+
const result = [...arr1];
|
|
6
|
+
for (const item of arr2) {
|
|
7
|
+
const foundIndex = result.findIndex((i) => i[key] === item[key]);
|
|
8
|
+
if (foundIndex >= 0) {
|
|
9
|
+
result[foundIndex] = item;
|
|
10
|
+
continue;
|
|
11
|
+
}
|
|
12
|
+
else if (insert) {
|
|
13
|
+
result.push(item);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
return result;
|
|
17
|
+
};
|
|
18
|
+
function usePaginatedQuery(name, args, paginationData = {
|
|
19
|
+
pageSize: 10,
|
|
20
|
+
}) {
|
|
21
|
+
const [data, setData] = useState([]);
|
|
22
|
+
const [loading, setLoading] = useState(false);
|
|
23
|
+
const nextPage = useRef("");
|
|
24
|
+
const previousPage = useRef("");
|
|
25
|
+
const ws = useRef(null);
|
|
26
|
+
const fetchNextPage = useCallback(() => {
|
|
27
|
+
setLoading(true);
|
|
28
|
+
ws.current?.send(JSON.stringify({
|
|
29
|
+
type: "paginated-query",
|
|
30
|
+
name,
|
|
31
|
+
args: {
|
|
32
|
+
...(args || {}),
|
|
33
|
+
paginationArgs: {
|
|
34
|
+
limit: paginationData.pageSize,
|
|
35
|
+
next: nextPage.current || undefined,
|
|
36
|
+
sortAscending: paginationData.sortAscending ?? true,
|
|
37
|
+
sortCaseInsensitive: paginationData.sortCaseInsensitive ?? false,
|
|
38
|
+
},
|
|
39
|
+
},
|
|
40
|
+
}));
|
|
41
|
+
}, [name, JSON.stringify(args), paginationData]);
|
|
42
|
+
const fetchPreviousPage = useCallback(() => {
|
|
43
|
+
setLoading(true);
|
|
44
|
+
ws.current?.send(JSON.stringify({
|
|
45
|
+
type: "paginated-query",
|
|
46
|
+
name,
|
|
47
|
+
args: {
|
|
48
|
+
...(args || {}),
|
|
49
|
+
paginationArgs: {
|
|
50
|
+
limit: paginationData.pageSize,
|
|
51
|
+
previous: previousPage.current || undefined,
|
|
52
|
+
sortAscending: paginationData.sortAscending ?? true,
|
|
53
|
+
sortCaseInsensitive: paginationData.sortCaseInsensitive ?? false,
|
|
54
|
+
},
|
|
55
|
+
},
|
|
56
|
+
}));
|
|
57
|
+
}, [name, JSON.stringify(args), paginationData]);
|
|
58
|
+
useEffect(() => {
|
|
59
|
+
ws.current = client.ws.$ws(0);
|
|
60
|
+
ws.current.addEventListener("open", () => {
|
|
61
|
+
fetchNextPage();
|
|
62
|
+
});
|
|
63
|
+
ws.current.addEventListener("message", (event) => {
|
|
64
|
+
setLoading(false);
|
|
65
|
+
const rs = JSON.parse(event.data);
|
|
66
|
+
if (rs.type === "PaginatedQueryResult") {
|
|
67
|
+
if (rs.success) {
|
|
68
|
+
const { results, previous, hasPrevious, next, hasNext } = rs.data;
|
|
69
|
+
nextPage.current = hasNext ? next : undefined;
|
|
70
|
+
previousPage.current = hasPrevious ? previous : undefined;
|
|
71
|
+
setData((d) => mergeArray(d, results, "_id", true));
|
|
72
|
+
}
|
|
73
|
+
else {
|
|
74
|
+
console.error(rs.error);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
else if (rs.type === "UpdateDoc") {
|
|
78
|
+
setData((d) => mergeArray(d, [rs.data], "_id"));
|
|
79
|
+
}
|
|
80
|
+
});
|
|
81
|
+
return () => {
|
|
82
|
+
ws.current?.close();
|
|
83
|
+
};
|
|
84
|
+
}, [fetchNextPage]);
|
|
85
|
+
const loadNext = useCallback(() => {
|
|
86
|
+
if (nextPage.current) {
|
|
87
|
+
fetchNextPage();
|
|
88
|
+
}
|
|
89
|
+
}, [fetchNextPage]);
|
|
90
|
+
const loadPrevious = useCallback(() => {
|
|
91
|
+
if (previousPage.current) {
|
|
92
|
+
fetchPreviousPage();
|
|
93
|
+
}
|
|
94
|
+
}, [fetchPreviousPage]);
|
|
95
|
+
return {
|
|
96
|
+
results: data,
|
|
97
|
+
hasNext: !!nextPage.current,
|
|
98
|
+
loadNext,
|
|
99
|
+
hasPrevious: !!previousPage.current,
|
|
100
|
+
loadPrevious,
|
|
101
|
+
isLoading: loading,
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
export default usePaginatedQuery;
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { hc } from "hono/client";
|
|
2
|
+
import { useEffect, useState } from "react";
|
|
3
|
+
const client = hc(process.env.NEXT_MOGOBASE_URL || process.env.MOGOBASE_URL || "http://localhost:4000");
|
|
4
|
+
function useQuery(name, args) {
|
|
5
|
+
const [data, setData] = useState(null);
|
|
6
|
+
useEffect(() => {
|
|
7
|
+
const ws = client.ws.$ws(0);
|
|
8
|
+
ws.addEventListener("open", () => {
|
|
9
|
+
ws.send(JSON.stringify({
|
|
10
|
+
type: "query",
|
|
11
|
+
name,
|
|
12
|
+
args,
|
|
13
|
+
}));
|
|
14
|
+
});
|
|
15
|
+
ws.addEventListener("message", (event) => {
|
|
16
|
+
const rs = JSON.parse(event.data);
|
|
17
|
+
if (rs.type === "QueryResult") {
|
|
18
|
+
if (rs.success) {
|
|
19
|
+
setData(rs.data);
|
|
20
|
+
}
|
|
21
|
+
else {
|
|
22
|
+
console.error(rs.error);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
});
|
|
26
|
+
return () => {
|
|
27
|
+
ws.close();
|
|
28
|
+
};
|
|
29
|
+
}, [name, JSON.stringify(args)]);
|
|
30
|
+
return data;
|
|
31
|
+
}
|
|
32
|
+
export default useQuery;
|
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
import { ObjectId } from "mongodb";
|
|
2
|
+
import merge from "lodash.merge";
|
|
3
|
+
const processValue = (value) => {
|
|
4
|
+
if (value && typeof value.getMonth === "function") {
|
|
5
|
+
return new Date(value).valueOf();
|
|
6
|
+
}
|
|
7
|
+
return value;
|
|
8
|
+
};
|
|
9
|
+
const processOrAndFilter = (filter) => {
|
|
10
|
+
// Note:
|
|
11
|
+
// input = { '$or': [ { email: 'chardy@gmail.com' }, { email: 'abc@gmail.com' } ]}
|
|
12
|
+
// output = { '$or': [ { email: { '$regex': '.*$chardy@gmail.com.*'} }, { email: { '$regex': '.*$abc@gmail.com.*'} } ]}
|
|
13
|
+
let filters = [];
|
|
14
|
+
for (let key in filter) {
|
|
15
|
+
let obj = filter[key];
|
|
16
|
+
Object.keys(obj).forEach((item) => {
|
|
17
|
+
const k = item;
|
|
18
|
+
if (key && k) {
|
|
19
|
+
filters.push(processFilter({ [k]: obj[k] }));
|
|
20
|
+
}
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
return filters;
|
|
24
|
+
};
|
|
25
|
+
const processKey = (key, opts) => {
|
|
26
|
+
if (key === "_id") {
|
|
27
|
+
return key;
|
|
28
|
+
}
|
|
29
|
+
const { allowSnakeCase } = opts || {};
|
|
30
|
+
if (!allowSnakeCase) {
|
|
31
|
+
return key.replace(/_/g, ".");
|
|
32
|
+
}
|
|
33
|
+
return key;
|
|
34
|
+
};
|
|
35
|
+
const processFilter = (filter, opts) => {
|
|
36
|
+
let filters = {};
|
|
37
|
+
let newFilters = {};
|
|
38
|
+
for (let key in filter) {
|
|
39
|
+
if (filter[key] === "") {
|
|
40
|
+
// Do nothing
|
|
41
|
+
}
|
|
42
|
+
else if (key == "OR" || key == "or") {
|
|
43
|
+
// query = filter: { OR: [{email: "chardy@gmail.com"}, {email: "abc@gmail.com"}] }
|
|
44
|
+
// output = { "$or":[{"email":{"$regex":".*chardy@gmail.com.*"}},{"email":{"$regex":".*abc@gmail.com.*"}}] }
|
|
45
|
+
filters = { $or: processOrAndFilter(filter[key]) };
|
|
46
|
+
}
|
|
47
|
+
else if (key == "AND" || key == "and") {
|
|
48
|
+
// query = filter: { AND: [{email: "chardy@gmail.com"}, {email: "abc@gmail.com"}] }
|
|
49
|
+
// output = { "$and":[{"email":{"$regex":".*chardy@gmail.com.*"}},{"email":{"$regex":".*abc@gmail.com.*"}}] }
|
|
50
|
+
filters = { $and: processOrAndFilter(filter[key]) };
|
|
51
|
+
}
|
|
52
|
+
else if (key.indexOf("_id") > 0 && key.includes("Id")) {
|
|
53
|
+
// query = filter: { _id: "5f8d9f1d-d3a1-4d0d-b5a8-c8e8c2e3d4e5" }
|
|
54
|
+
// output = { _id: new ObjectId("5f8d9f1d-d3a1-4d0d-b5a8-c8e8c2e3d4e5") }
|
|
55
|
+
filters = { [key.replace("_id", "")]: new ObjectId(filter[key]) };
|
|
56
|
+
}
|
|
57
|
+
else if (key.includes("_eq")) {
|
|
58
|
+
// equal ==
|
|
59
|
+
// query = filter: { amount_eq: 20}
|
|
60
|
+
// output = { amount: { $eq: 20 }}
|
|
61
|
+
filters = {
|
|
62
|
+
[processKey(key.replace("_eq", ""), opts)]: {
|
|
63
|
+
$eq: processValue(filter[key]),
|
|
64
|
+
},
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
else if (key.includes("_ne")) {
|
|
68
|
+
// not equal !=
|
|
69
|
+
// query = filter: { amount_ne: 20}
|
|
70
|
+
// output = { amount: { $ne: 20 }}
|
|
71
|
+
filters = {
|
|
72
|
+
[processKey(key.replace("_ne", ""), opts)]: {
|
|
73
|
+
$ne: processValue(filter[key]),
|
|
74
|
+
},
|
|
75
|
+
};
|
|
76
|
+
}
|
|
77
|
+
else if (key.includes("_lte")) {
|
|
78
|
+
// less than equal <=
|
|
79
|
+
// query = filter: { amount_lte: 20}
|
|
80
|
+
// output = { amount: { $lte: 20 }}
|
|
81
|
+
filters = {
|
|
82
|
+
[processKey(key.replace("_lte", ""), opts)]: {
|
|
83
|
+
$lte: processValue(filter[key]),
|
|
84
|
+
},
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
else if (key.includes("_gte")) {
|
|
88
|
+
// greater than equal >=
|
|
89
|
+
// query = filter: { amount_gte: 20}
|
|
90
|
+
// output = { amount: { $gte: 20 }}
|
|
91
|
+
filters = {
|
|
92
|
+
[processKey(key.replace("_gte", ""), opts)]: {
|
|
93
|
+
$gte: processValue(filter[key]),
|
|
94
|
+
},
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
else if (key.includes("_lt")) {
|
|
98
|
+
// less than <
|
|
99
|
+
// query = filter: { amount_lt: 20}
|
|
100
|
+
// output = { amount: { $lt: 20 }}
|
|
101
|
+
filters = {
|
|
102
|
+
[processKey(key.replace("_lt", ""), opts)]: {
|
|
103
|
+
$lt: processValue(filter[key]),
|
|
104
|
+
},
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
else if (key.includes("_gt")) {
|
|
108
|
+
// greater than >
|
|
109
|
+
// query = filter: { amount_gt: 20}
|
|
110
|
+
// output = { amount: { $gt: 20 }}
|
|
111
|
+
filters = {
|
|
112
|
+
[processKey(key.replace("_gt", ""), opts)]: {
|
|
113
|
+
$gt: processValue(filter[key]),
|
|
114
|
+
},
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
else if (key.includes("_between")) {
|
|
118
|
+
// query = filter: { amount_between: [20, 30]}
|
|
119
|
+
// output = { amount: { $gte: 20, $lte: 30 }}
|
|
120
|
+
filters = {
|
|
121
|
+
[processKey(key.replace("_between", ""), opts)]: {
|
|
122
|
+
$gte: processValue(filter[key][0]),
|
|
123
|
+
$lte: processValue(filter[key][1]),
|
|
124
|
+
},
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
else if (key.includes("_in")) {
|
|
128
|
+
// same like IN sql
|
|
129
|
+
// query = filter: { email_in: ["chardy@gmail.com", "abc@gmail.com"]}
|
|
130
|
+
// output = { email: { $in: ["chardy@gmail.com", "abc@gmail.com"] }}
|
|
131
|
+
filters = {
|
|
132
|
+
[processKey(key.replace("_in", ""), opts)]: {
|
|
133
|
+
$in: processValue(filter[key]),
|
|
134
|
+
},
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
else if (key.includes("_all")) {
|
|
138
|
+
// same like ALL sql
|
|
139
|
+
// query = filter: { email_all: ["chardy@gmail.com", "abc@gmail.com"]}
|
|
140
|
+
// output = { email: { $all: ["chardy@gmail.com", "abc@gmail.com"] }}
|
|
141
|
+
filters = {
|
|
142
|
+
[processKey(key.replace("_all", ""), opts)]: { $all: filter[key] },
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
else if (key.includes("_nin")) {
|
|
146
|
+
// not in
|
|
147
|
+
// query = filter: { email_nin: ["chardy@gmail.com", "abc@gmail.com"]}
|
|
148
|
+
// output = { email: { $nin: ["chardy@gmail.com", "abc@gmail.com"] }}
|
|
149
|
+
filters = {
|
|
150
|
+
[processKey(key.replace("_nin", ""), opts)]: {
|
|
151
|
+
$nin: processValue(filter[key]),
|
|
152
|
+
},
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
else if (key.includes("_contains")) {
|
|
156
|
+
// same like LIKE in SQL
|
|
157
|
+
// query = filter: { email_contains: "chardy@gmail.com" }
|
|
158
|
+
// output = { email: { "$regex":".*char.*" }}
|
|
159
|
+
filters = {
|
|
160
|
+
[processKey(key.replace("_contains", ""), opts)]: {
|
|
161
|
+
$regex: `.*${filter[key]}.*`,
|
|
162
|
+
},
|
|
163
|
+
};
|
|
164
|
+
}
|
|
165
|
+
else if (key.includes("_regex")) {
|
|
166
|
+
// use REGEX to search
|
|
167
|
+
// query = filter: { email_regex: "^abc.*" }
|
|
168
|
+
// output = { email: { "$regex":"^abc.*" }}
|
|
169
|
+
filters = {
|
|
170
|
+
[processKey(key.replace("_regex", ""), opts)]: {
|
|
171
|
+
$regex: `${filter[key]}`,
|
|
172
|
+
$options: "i",
|
|
173
|
+
},
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
else {
|
|
177
|
+
// no regex search
|
|
178
|
+
// query = filter: {email:"chardy@gmail.com"}
|
|
179
|
+
// output = { "email":"chardy@gmail.com" }
|
|
180
|
+
filters = { [processKey(key, opts)]: processValue(filter[key]) };
|
|
181
|
+
}
|
|
182
|
+
newFilters = merge(newFilters, filters);
|
|
183
|
+
}
|
|
184
|
+
return newFilters;
|
|
185
|
+
};
|
|
186
|
+
export default (filter, opts) => {
|
|
187
|
+
const newFilters = processFilter(filter, opts);
|
|
188
|
+
return { ...newFilters, deletedAt: null };
|
|
189
|
+
};
|
package/lib/db/index.js
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import { MongoClient, ObjectId } from "mongodb";
|
|
2
|
+
import DataLoader from "dataloader";
|
|
3
|
+
import buildMongoFilters from "./buildMongoFilters.js";
|
|
4
|
+
const MONGO_URI = process.env.MONGO_URI || "mongodb://localhost:27017";
|
|
5
|
+
const MONGO_DB = process.env.MONGO_DB || "mogobase";
|
|
6
|
+
console.log("MONGO_URI", MONGO_URI);
|
|
7
|
+
console.log("MONGO_DB", MONGO_DB);
|
|
8
|
+
class MogobaseDB {
|
|
9
|
+
static _instance;
|
|
10
|
+
_mongoClient;
|
|
11
|
+
_db;
|
|
12
|
+
_schemas = new Map();
|
|
13
|
+
constructor() {
|
|
14
|
+
if (!MogobaseDB._instance) {
|
|
15
|
+
MogobaseDB._instance = this;
|
|
16
|
+
}
|
|
17
|
+
return MogobaseDB._instance;
|
|
18
|
+
}
|
|
19
|
+
async connect() {
|
|
20
|
+
// Connect to MongoDB
|
|
21
|
+
if (this._mongoClient && this._db) {
|
|
22
|
+
return this._db;
|
|
23
|
+
}
|
|
24
|
+
const client = await MongoClient.connect(MONGO_URI);
|
|
25
|
+
this._mongoClient = client;
|
|
26
|
+
this._db = client.db(MONGO_DB);
|
|
27
|
+
return this._db;
|
|
28
|
+
}
|
|
29
|
+
async disconnect() {
|
|
30
|
+
if (!this._mongoClient)
|
|
31
|
+
return;
|
|
32
|
+
await this._mongoClient.close();
|
|
33
|
+
}
|
|
34
|
+
get client() {
|
|
35
|
+
if (!this._mongoClient) {
|
|
36
|
+
throw new Error("Call connect() first");
|
|
37
|
+
}
|
|
38
|
+
return this._mongoClient;
|
|
39
|
+
}
|
|
40
|
+
get db() {
|
|
41
|
+
if (!this._db) {
|
|
42
|
+
throw new Error("Call connect() first");
|
|
43
|
+
}
|
|
44
|
+
return this._db;
|
|
45
|
+
}
|
|
46
|
+
model(name) {
|
|
47
|
+
if (!this._db) {
|
|
48
|
+
throw new Error("Call connect() first");
|
|
49
|
+
}
|
|
50
|
+
return this._db.collection(name);
|
|
51
|
+
}
|
|
52
|
+
async defineModel(name, schema, indexes) {
|
|
53
|
+
if (!this._db) {
|
|
54
|
+
this._db = await this.connect();
|
|
55
|
+
}
|
|
56
|
+
let collection = this._db.collection(name);
|
|
57
|
+
if (!collection) {
|
|
58
|
+
collection = await this._db.createCollection(name);
|
|
59
|
+
}
|
|
60
|
+
if (schema) {
|
|
61
|
+
this._schemas.set(name, schema);
|
|
62
|
+
}
|
|
63
|
+
if (indexes) {
|
|
64
|
+
await collection.createIndexes(indexes.indexSpecs, indexes.options);
|
|
65
|
+
}
|
|
66
|
+
return collection;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
export const Id = ObjectId;
|
|
70
|
+
export const buildFilters = buildMongoFilters;
|
|
71
|
+
export const DataLoaderGenerate = (model, key = "_id") => {
|
|
72
|
+
const DB = new MogobaseDB();
|
|
73
|
+
return new DataLoader(async (ids) => {
|
|
74
|
+
console.log("DataLoader", model, key, ids);
|
|
75
|
+
const data = await DB.model(model)
|
|
76
|
+
.find({
|
|
77
|
+
[key]: {
|
|
78
|
+
$in: ids.map((id) => new Id(id)),
|
|
79
|
+
},
|
|
80
|
+
})
|
|
81
|
+
.toArray();
|
|
82
|
+
return ids.map((id) => data.find((item) => `${item[key]}` === id));
|
|
83
|
+
});
|
|
84
|
+
};
|
|
85
|
+
// Export singleton
|
|
86
|
+
export default new MogobaseDB();
|
package/lib/dev/index.js
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { spawn } from "child_process";
|
|
3
|
+
import makeCli from "make-cli";
|
|
4
|
+
const version = "1.0.0";
|
|
5
|
+
makeCli({
|
|
6
|
+
version,
|
|
7
|
+
name: "mogobase",
|
|
8
|
+
usage: "mogobase dev",
|
|
9
|
+
arguments: "[command] [options]",
|
|
10
|
+
options: [],
|
|
11
|
+
action: (command = "dev", options) => {
|
|
12
|
+
if (command === "dev") {
|
|
13
|
+
const child = spawn("npx tsx watch", ["./node_modules/mogobase/lib/dev/start.js"], {
|
|
14
|
+
shell: true,
|
|
15
|
+
stdio: "inherit",
|
|
16
|
+
});
|
|
17
|
+
child.on("exit", (code) => process.exit(code));
|
|
18
|
+
}
|
|
19
|
+
},
|
|
20
|
+
});
|
package/lib/dev/start.js
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { serve } from "@hono/node-server";
|
|
2
|
+
import app from "../server/hono.js";
|
|
3
|
+
import path from "path";
|
|
4
|
+
import ws from "../server/ws.js";
|
|
5
|
+
import fs from "fs";
|
|
6
|
+
const cwd = process.cwd();
|
|
7
|
+
const mogobaseFolder = path.resolve(cwd, "./mogobase");
|
|
8
|
+
let server;
|
|
9
|
+
const port = Number(process.env.MOGOBASE_PORT) || 4000;
|
|
10
|
+
const files = fs.readdirSync(mogobaseFolder);
|
|
11
|
+
for (const file of files) {
|
|
12
|
+
if (!file.endsWith(".ts"))
|
|
13
|
+
continue;
|
|
14
|
+
const filePath = path.join(mogobaseFolder, file);
|
|
15
|
+
const module = (await import(filePath));
|
|
16
|
+
if (typeof module.default === "function") {
|
|
17
|
+
module.default(app);
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
server = serve({
|
|
21
|
+
fetch: app.fetch,
|
|
22
|
+
port,
|
|
23
|
+
});
|
|
24
|
+
console.log(`Mogobase dev server running on port ${port}`);
|
|
25
|
+
ws.injectWebSocket(server);
|
|
26
|
+
// graceful shutdown
|
|
27
|
+
process.on("SIGINT", () => {
|
|
28
|
+
server.close();
|
|
29
|
+
process.exit(0);
|
|
30
|
+
});
|
|
31
|
+
process.on("SIGTERM", () => {
|
|
32
|
+
if (!server)
|
|
33
|
+
return;
|
|
34
|
+
server.close((err) => {
|
|
35
|
+
if (err) {
|
|
36
|
+
console.error(err);
|
|
37
|
+
process.exit(1);
|
|
38
|
+
}
|
|
39
|
+
process.exit(0);
|
|
40
|
+
});
|
|
41
|
+
});
|
package/lib/index.js
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import z4 from "zod/v4";
|
|
2
|
+
import DB from "../db/index.js";
|
|
3
|
+
class Handlers {
|
|
4
|
+
static _instance;
|
|
5
|
+
queries = new Map();
|
|
6
|
+
mutations = new Map();
|
|
7
|
+
_queries = new Map();
|
|
8
|
+
_mutations = new Map();
|
|
9
|
+
constructor() {
|
|
10
|
+
if (!Handlers._instance) {
|
|
11
|
+
Handlers._instance = this;
|
|
12
|
+
}
|
|
13
|
+
return Handlers._instance;
|
|
14
|
+
}
|
|
15
|
+
async _runQuery(name, args, ctx = {}) {
|
|
16
|
+
let handler = this.queries.get(name);
|
|
17
|
+
if (!handler) {
|
|
18
|
+
if (name.startsWith("internal")) {
|
|
19
|
+
handler = this._queries.get(name);
|
|
20
|
+
if (!handler) {
|
|
21
|
+
throw new Error(`Query ${name} not found`);
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
else {
|
|
25
|
+
throw new Error(`Query ${name} not found`);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
const validated = await handler.args.safeParseAsync(args);
|
|
29
|
+
if (validated.success) {
|
|
30
|
+
return await handler.handler(validated.data, {
|
|
31
|
+
db: ctx.db || DB,
|
|
32
|
+
runQuery: this._runQuery.bind(this),
|
|
33
|
+
runMutation: this._runMutation.bind(this),
|
|
34
|
+
watch: ctx.watch || (() => { }),
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
else {
|
|
38
|
+
throw new Error(`Invalid args: ${validated.error.issues[0].message}`);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
async _runMutation(name, args, ctx = {}) {
|
|
42
|
+
let handler = this.mutations.get(name);
|
|
43
|
+
if (!handler) {
|
|
44
|
+
if (name.startsWith("internal")) {
|
|
45
|
+
handler = this._mutations.get(name);
|
|
46
|
+
if (!handler) {
|
|
47
|
+
throw new Error(`Mutation ${name} not found`);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
else {
|
|
51
|
+
throw new Error(`Mutation ${name} not found`);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
const validated = await handler.args.safeParseAsync(args);
|
|
55
|
+
if (validated.success) {
|
|
56
|
+
return await handler.handler(validated.data, {
|
|
57
|
+
db: ctx.db || DB,
|
|
58
|
+
runQuery: this._runQuery.bind(this),
|
|
59
|
+
runMutation: this._runMutation.bind(this),
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
else {
|
|
63
|
+
throw new Error(`Invalid args: ${validated.error.issues[0].message}`);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
// filename: callsite.ts
|
|
68
|
+
export function query(name, c) {
|
|
69
|
+
if (Handlers._instance.queries.has(name)) {
|
|
70
|
+
throw new Error(`Handler ${name} already exists`);
|
|
71
|
+
}
|
|
72
|
+
Handlers._instance.queries.set(name, c);
|
|
73
|
+
}
|
|
74
|
+
export function mutation(name, c) {
|
|
75
|
+
if (Handlers._instance.mutations.has(name)) {
|
|
76
|
+
throw new Error(`Handler ${name} already exists`);
|
|
77
|
+
}
|
|
78
|
+
Handlers._instance.mutations.set(name, c);
|
|
79
|
+
}
|
|
80
|
+
export function internalQuery(name, c) {
|
|
81
|
+
if (Handlers._instance._queries.has(name)) {
|
|
82
|
+
throw new Error(`Handler ${name} already exists`);
|
|
83
|
+
}
|
|
84
|
+
Handlers._instance._queries.set(`internal.${name}`, c);
|
|
85
|
+
}
|
|
86
|
+
export function internalMutation(name, c) {
|
|
87
|
+
if (Handlers._instance._mutations.has(name)) {
|
|
88
|
+
throw new Error(`Handler ${name} already exists`);
|
|
89
|
+
}
|
|
90
|
+
Handlers._instance._mutations.set(`internal.${name}`, c);
|
|
91
|
+
}
|
|
92
|
+
export const v = z4;
|
|
93
|
+
export default new Handlers();
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
import { cors } from "hono/cors";
|
|
3
|
+
import ws from "../server/ws.js";
|
|
4
|
+
import handlers from "../server/handlers.js";
|
|
5
|
+
import DB from "../db/index.js";
|
|
6
|
+
const app = new Hono();
|
|
7
|
+
ws.createNodeWebSocket(app);
|
|
8
|
+
app.get("/", (c) => {
|
|
9
|
+
return c.json({
|
|
10
|
+
apiVersion: "1.0.0",
|
|
11
|
+
});
|
|
12
|
+
});
|
|
13
|
+
app.get("/ws", ws.upgradeWebSocket());
|
|
14
|
+
app.use("/api/handlers", cors());
|
|
15
|
+
app.get("/api/handlers", async (c) => {
|
|
16
|
+
const body = c.req.query();
|
|
17
|
+
const { name, args } = body;
|
|
18
|
+
if (!name) {
|
|
19
|
+
return c.text("Name is required", 400);
|
|
20
|
+
}
|
|
21
|
+
try {
|
|
22
|
+
await DB.connect();
|
|
23
|
+
const rs = await handlers._runQuery(name, JSON.parse(args), {
|
|
24
|
+
db: DB,
|
|
25
|
+
});
|
|
26
|
+
// await DB.disconnect();
|
|
27
|
+
return c.json(rs);
|
|
28
|
+
}
|
|
29
|
+
catch (error) {
|
|
30
|
+
return c.text(`${error}`, 400);
|
|
31
|
+
}
|
|
32
|
+
});
|
|
33
|
+
app.post("/api/handlers", async (c) => {
|
|
34
|
+
const body = await c.req.json();
|
|
35
|
+
const { name, args } = body;
|
|
36
|
+
if (!name) {
|
|
37
|
+
return c.text("Name is required", 400);
|
|
38
|
+
}
|
|
39
|
+
try {
|
|
40
|
+
await DB.connect();
|
|
41
|
+
const rs = await handlers._runMutation(name, args, {
|
|
42
|
+
db: DB,
|
|
43
|
+
});
|
|
44
|
+
// await DB.disconnect();
|
|
45
|
+
return c.json(rs);
|
|
46
|
+
}
|
|
47
|
+
catch (error) {
|
|
48
|
+
return c.text(`${error}`, 400);
|
|
49
|
+
}
|
|
50
|
+
});
|
|
51
|
+
export default app;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { query, mutation, internalQuery, internalMutation, v } from "../server/handlers.js";
|
|
2
|
+
const PaginationQueryArgs = v.object({
|
|
3
|
+
limit: v.number(),
|
|
4
|
+
paginatedField: v.string().optional(),
|
|
5
|
+
sortAscending: v.boolean().optional(),
|
|
6
|
+
sortCaseInsensitive: v.boolean().optional(),
|
|
7
|
+
previous: v.string().optional(),
|
|
8
|
+
next: v.string().optional(),
|
|
9
|
+
});
|
|
10
|
+
export { query, mutation, internalQuery, internalMutation, v, PaginationQueryArgs };
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { serve } from "@hono/node-server";
|
|
2
|
+
import app from "../server/hono.js";
|
|
3
|
+
const server = serve({
|
|
4
|
+
fetch: app.fetch,
|
|
5
|
+
port: Number(process.env.MOGOBASE_PORT) || 4000,
|
|
6
|
+
});
|
|
7
|
+
// graceful shutdown
|
|
8
|
+
process.on("SIGINT", () => {
|
|
9
|
+
server.close();
|
|
10
|
+
process.exit(0);
|
|
11
|
+
});
|
|
12
|
+
process.on("SIGTERM", () => {
|
|
13
|
+
server.close((err) => {
|
|
14
|
+
if (err) {
|
|
15
|
+
console.error(err);
|
|
16
|
+
process.exit(1);
|
|
17
|
+
}
|
|
18
|
+
process.exit(0);
|
|
19
|
+
});
|
|
20
|
+
});
|
package/lib/server/ws.js
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
// import { upgradeWebSocket } from "hono/cloudflare-workers";
|
|
2
|
+
import { createNodeWebSocket } from "@hono/node-ws";
|
|
3
|
+
import handlers from "./handlers.js";
|
|
4
|
+
import DB from "../db/index.js";
|
|
5
|
+
class WebSocket {
|
|
6
|
+
static _instance;
|
|
7
|
+
_nodeWebSocket;
|
|
8
|
+
_changeStream;
|
|
9
|
+
_state = new Map();
|
|
10
|
+
constructor() {
|
|
11
|
+
if (!WebSocket._instance) {
|
|
12
|
+
WebSocket._instance = this;
|
|
13
|
+
}
|
|
14
|
+
return WebSocket._instance;
|
|
15
|
+
}
|
|
16
|
+
async _handleEvent(event, socket, id) {
|
|
17
|
+
const data = JSON.parse(event.data);
|
|
18
|
+
const { type, name, args } = data;
|
|
19
|
+
if (!name) {
|
|
20
|
+
return socket.send({
|
|
21
|
+
success: false,
|
|
22
|
+
error: "Name is required",
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
let rs;
|
|
26
|
+
if (type === "query") {
|
|
27
|
+
const func = async (noWatch) => {
|
|
28
|
+
await DB.connect();
|
|
29
|
+
rs = await handlers._runQuery(name, args, {
|
|
30
|
+
db: DB,
|
|
31
|
+
watch: (modelName, pipeline, options) => {
|
|
32
|
+
if (noWatch) {
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
const state = this._state.get(id);
|
|
36
|
+
const resumeToken = state.changeStream?.resumeToken;
|
|
37
|
+
if (state.changeStream) {
|
|
38
|
+
state.changeStream.close();
|
|
39
|
+
}
|
|
40
|
+
const changeStream = DB.model(modelName).watch(pipeline, {
|
|
41
|
+
...(options || {}),
|
|
42
|
+
fullDocument: "updateLookup",
|
|
43
|
+
resumeAfter: resumeToken,
|
|
44
|
+
});
|
|
45
|
+
this._state.set(id, { ...state, changeStream: changeStream });
|
|
46
|
+
changeStream.on("change", (change) => {
|
|
47
|
+
func(true);
|
|
48
|
+
});
|
|
49
|
+
},
|
|
50
|
+
});
|
|
51
|
+
socket.send(JSON.stringify({ type: "QueryResult", success: true, data: rs }));
|
|
52
|
+
};
|
|
53
|
+
func();
|
|
54
|
+
}
|
|
55
|
+
else if (type === "paginated-query") {
|
|
56
|
+
const func = async (noWatch) => {
|
|
57
|
+
await DB.connect();
|
|
58
|
+
rs = await handlers._runQuery(name, args, {
|
|
59
|
+
db: DB,
|
|
60
|
+
watch: (modelName, pipeline, options) => {
|
|
61
|
+
if (noWatch) {
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
64
|
+
const state = this._state.get(id);
|
|
65
|
+
const resumeToken = state.changeStream?.resumeToken;
|
|
66
|
+
if (state.changeStream) {
|
|
67
|
+
state.changeStream.close();
|
|
68
|
+
}
|
|
69
|
+
const changeStream = DB.model(modelName).watch(pipeline, {
|
|
70
|
+
...(options || {}),
|
|
71
|
+
fullDocument: "updateLookup",
|
|
72
|
+
resumeAfter: resumeToken,
|
|
73
|
+
});
|
|
74
|
+
this._state.set(id, { ...state, changeStream: changeStream });
|
|
75
|
+
changeStream.on("change", (change) => {
|
|
76
|
+
if (change.operationType === "update") {
|
|
77
|
+
const { fullDocument } = change;
|
|
78
|
+
socket.send(JSON.stringify({ type: "UpdateDoc", success: true, data: fullDocument }));
|
|
79
|
+
}
|
|
80
|
+
func(true);
|
|
81
|
+
});
|
|
82
|
+
},
|
|
83
|
+
});
|
|
84
|
+
if (!rs.results) {
|
|
85
|
+
throw new Error("Invalid paginated result. Make sure the return value is from MongoPaging.find");
|
|
86
|
+
}
|
|
87
|
+
socket.send(JSON.stringify({ type: "PaginatedQueryResult", success: true, data: rs }));
|
|
88
|
+
};
|
|
89
|
+
func();
|
|
90
|
+
}
|
|
91
|
+
else if (type === "mutation") {
|
|
92
|
+
await DB.connect();
|
|
93
|
+
rs = await handlers._runMutation(name, args, {
|
|
94
|
+
db: DB,
|
|
95
|
+
});
|
|
96
|
+
socket.send(JSON.stringify({ type: "MutationResult", success: true, data: rs }));
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
createNodeWebSocket(app) {
|
|
100
|
+
this._nodeWebSocket = createNodeWebSocket({ app });
|
|
101
|
+
}
|
|
102
|
+
upgradeWebSocket() {
|
|
103
|
+
if (!this._nodeWebSocket) {
|
|
104
|
+
throw new Error("Call createNodeWebSocket() first");
|
|
105
|
+
}
|
|
106
|
+
return this._nodeWebSocket.upgradeWebSocket((c) => {
|
|
107
|
+
const id = crypto.randomUUID();
|
|
108
|
+
return {
|
|
109
|
+
onMessage: (event, ws) => {
|
|
110
|
+
const state = this._state.get(id);
|
|
111
|
+
if (!state) {
|
|
112
|
+
this._state.set(id, { ws });
|
|
113
|
+
}
|
|
114
|
+
this._handleEvent(event, ws, id);
|
|
115
|
+
},
|
|
116
|
+
onClose: async () => {
|
|
117
|
+
const state = this._state.get(id);
|
|
118
|
+
if (state && state.changeStream) {
|
|
119
|
+
await state.changeStream.close();
|
|
120
|
+
this._state.delete(id);
|
|
121
|
+
}
|
|
122
|
+
},
|
|
123
|
+
};
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
injectWebSocket(server) {
|
|
127
|
+
if (!this._nodeWebSocket) {
|
|
128
|
+
throw new Error("Call createNodeWebSocket() first");
|
|
129
|
+
}
|
|
130
|
+
this._nodeWebSocket.injectWebSocket(server);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
export default new WebSocket();
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "mogobase",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "1.0.
|
|
4
|
+
"version": "1.0.2",
|
|
5
5
|
"description": "",
|
|
6
6
|
"main": "lib/index.js",
|
|
7
7
|
"types": "types/index.d.ts",
|
|
@@ -44,7 +44,7 @@
|
|
|
44
44
|
"hono": "^4.11.3",
|
|
45
45
|
"lodash.merge": "^4.6.2",
|
|
46
46
|
"make-cli": "^6.0.0",
|
|
47
|
-
"mongodb": "
|
|
47
|
+
"mongodb": "6.8.2",
|
|
48
48
|
"react": "^19.2.3",
|
|
49
49
|
"tsx": "^4.21.0",
|
|
50
50
|
"zod": "^4.3.5"
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
type PaginationData = {
|
|
2
|
+
pageSize: number;
|
|
3
|
+
sortAscending?: boolean;
|
|
4
|
+
sortCaseInsensitive?: boolean;
|
|
5
|
+
};
|
|
6
|
+
declare function usePaginatedQuery(name: string, args?: any, paginationData?: PaginationData): {
|
|
7
|
+
results: any[];
|
|
8
|
+
hasNext: boolean;
|
|
9
|
+
loadNext: () => void;
|
|
10
|
+
hasPrevious: boolean;
|
|
11
|
+
loadPrevious: () => void;
|
|
12
|
+
isLoading: boolean;
|
|
13
|
+
};
|
|
14
|
+
export default usePaginatedQuery;
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { Collection, CreateIndexesOptions, Db, IndexDescription, MongoClient, ObjectId } from "mongodb";
|
|
2
|
+
import DataLoader from "dataloader";
|
|
3
|
+
declare class MogobaseDB {
|
|
4
|
+
static _instance: MogobaseDB;
|
|
5
|
+
_mongoClient?: MongoClient;
|
|
6
|
+
_db?: Db;
|
|
7
|
+
_schemas: Map<string, any>;
|
|
8
|
+
constructor();
|
|
9
|
+
connect(): Promise<Db>;
|
|
10
|
+
disconnect(): Promise<void>;
|
|
11
|
+
get client(): MongoClient;
|
|
12
|
+
get db(): Db;
|
|
13
|
+
model(name: string): Collection;
|
|
14
|
+
defineModel(name: string, schema?: any, indexes?: {
|
|
15
|
+
indexSpecs: IndexDescription[];
|
|
16
|
+
options?: CreateIndexesOptions;
|
|
17
|
+
}): Promise<Collection>;
|
|
18
|
+
}
|
|
19
|
+
export type { MogobaseDB };
|
|
20
|
+
export declare const Id: typeof ObjectId;
|
|
21
|
+
export declare const buildFilters: (filter: any, opts?: any) => any;
|
|
22
|
+
export declare const DataLoaderGenerate: (model: string, key?: string) => DataLoader<string, import("mongodb").WithId<import("bson").Document> | undefined, string>;
|
|
23
|
+
declare const _default: MogobaseDB;
|
|
24
|
+
export default _default;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/types/index.d.ts
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import z4 from "zod/v4";
|
|
2
|
+
import type { MogobaseDB } from "../db";
|
|
3
|
+
import { ChangeStreamOptions, Document } from "mongodb";
|
|
4
|
+
export type Context = {
|
|
5
|
+
db?: MogobaseDB;
|
|
6
|
+
runQuery?: (name: string, args: any, ctx?: Context) => Promise<any>;
|
|
7
|
+
runMutation?: (name: string, args: any, ctx?: Context) => Promise<any>;
|
|
8
|
+
watch?: (modelName: string, pipeline?: Document[], options?: ChangeStreamOptions) => void;
|
|
9
|
+
};
|
|
10
|
+
export type QueryHandler = {
|
|
11
|
+
args: z4.ZodType;
|
|
12
|
+
handler: (args: any, ctx: {
|
|
13
|
+
db: MogobaseDB;
|
|
14
|
+
runQuery: (name: string, args: any, ctx?: Context) => Promise<any>;
|
|
15
|
+
runMutation: (name: string, args: any, ctx?: Context) => Promise<any>;
|
|
16
|
+
watch: (modelName: string, pipeline?: Document[], options?: ChangeStreamOptions) => void;
|
|
17
|
+
}) => Promise<any>;
|
|
18
|
+
};
|
|
19
|
+
export type MutationHandler = {
|
|
20
|
+
args: z4.ZodType;
|
|
21
|
+
handler: (args: any, ctx: {
|
|
22
|
+
db: MogobaseDB;
|
|
23
|
+
runQuery: (name: string, args: any, ctx?: Context) => Promise<any>;
|
|
24
|
+
runMutation: (name: string, args: any, ctx?: Context) => Promise<any>;
|
|
25
|
+
}) => Promise<any>;
|
|
26
|
+
};
|
|
27
|
+
declare class Handlers {
|
|
28
|
+
static _instance: Handlers;
|
|
29
|
+
queries: Map<string, QueryHandler>;
|
|
30
|
+
mutations: Map<string, MutationHandler>;
|
|
31
|
+
_queries: Map<string, QueryHandler>;
|
|
32
|
+
_mutations: Map<string, MutationHandler>;
|
|
33
|
+
constructor();
|
|
34
|
+
_runQuery(name: string, args: any, ctx?: Context): Promise<any>;
|
|
35
|
+
_runMutation(name: string, args: any, ctx?: Context): Promise<any>;
|
|
36
|
+
}
|
|
37
|
+
export declare function query(name: string, c: QueryHandler): void;
|
|
38
|
+
export declare function mutation(name: string, c: MutationHandler): void;
|
|
39
|
+
export declare function internalQuery(name: string, c: QueryHandler): void;
|
|
40
|
+
export declare function internalMutation(name: string, c: MutationHandler): void;
|
|
41
|
+
export declare const v: typeof z4;
|
|
42
|
+
declare const _default: Handlers;
|
|
43
|
+
export default _default;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { query, mutation, internalQuery, internalMutation, v } from "../server/handlers";
|
|
2
|
+
declare const PaginationQueryArgs: import("zod/v4").ZodObject<{
|
|
3
|
+
limit: import("zod/v4").ZodNumber;
|
|
4
|
+
paginatedField: import("zod/v4").ZodOptional<import("zod/v4").ZodString>;
|
|
5
|
+
sortAscending: import("zod/v4").ZodOptional<import("zod/v4").ZodBoolean>;
|
|
6
|
+
sortCaseInsensitive: import("zod/v4").ZodOptional<import("zod/v4").ZodBoolean>;
|
|
7
|
+
previous: import("zod/v4").ZodOptional<import("zod/v4").ZodString>;
|
|
8
|
+
next: import("zod/v4").ZodOptional<import("zod/v4").ZodString>;
|
|
9
|
+
}, import("zod/v4/core").$strip>;
|
|
10
|
+
export { query, mutation, internalQuery, internalMutation, v, PaginationQueryArgs };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { NodeWebSocket } from "@hono/node-ws";
|
|
2
|
+
import { Hono } from "hono";
|
|
3
|
+
import { ServerType } from "@hono/node-server";
|
|
4
|
+
import { ChangeStream } from "mongodb";
|
|
5
|
+
declare class WebSocket {
|
|
6
|
+
static _instance: WebSocket;
|
|
7
|
+
_nodeWebSocket?: NodeWebSocket;
|
|
8
|
+
_changeStream?: ChangeStream;
|
|
9
|
+
_state: Map<string, any>;
|
|
10
|
+
constructor();
|
|
11
|
+
_handleEvent(event: any, socket: any, id: string): Promise<any>;
|
|
12
|
+
createNodeWebSocket(app: Hono): void;
|
|
13
|
+
upgradeWebSocket(): import("hono").MiddlewareHandler<any, string, {
|
|
14
|
+
outputFormat: "ws";
|
|
15
|
+
}>;
|
|
16
|
+
injectWebSocket(server: ServerType): void;
|
|
17
|
+
}
|
|
18
|
+
declare const _default: WebSocket;
|
|
19
|
+
export default _default;
|