@rpcbase/client 0.61.0 → 0.63.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +9 -6
- package/rts/boot.js +0 -2
- package/rts/getUseQuery.js +17 -21
- package/rts/store/index.js +19 -99
- package/rts/store/satisfies_projection.js +32 -0
- package/rts/store/satisfies_projection.test.js +42 -0
package/package.json
CHANGED
|
@@ -1,20 +1,23 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@rpcbase/client",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.63.0",
|
|
4
4
|
"scripts": {
|
|
5
|
-
"test": "
|
|
5
|
+
"test": "jest"
|
|
6
6
|
},
|
|
7
7
|
"dependencies": {
|
|
8
8
|
"axios": "1.4.0",
|
|
9
|
-
"i18next": "23.
|
|
9
|
+
"i18next": "23.4.1",
|
|
10
10
|
"i18next-chained-backend": "4.4.0",
|
|
11
11
|
"i18next-resources-to-backend": "1.1.4",
|
|
12
12
|
"lodash": "4.17.21",
|
|
13
|
+
"posthog-js": "1.75.3",
|
|
13
14
|
"pouchdb-adapter-indexeddb": "8.0.1",
|
|
14
15
|
"pouchdb-core": "8.0.1",
|
|
15
16
|
"pouchdb-find": "8.0.1",
|
|
16
|
-
"
|
|
17
|
-
"
|
|
18
|
-
|
|
17
|
+
"react-i18next": "13.0.3",
|
|
18
|
+
"socket.io-client": "4.7.2"
|
|
19
|
+
},
|
|
20
|
+
"devDependencies": {
|
|
21
|
+
"jest": "29.6.2"
|
|
19
22
|
}
|
|
20
23
|
}
|
package/rts/boot.js
CHANGED
package/rts/getUseQuery.js
CHANGED
|
@@ -8,7 +8,7 @@ import _omit from "lodash/omit"
|
|
|
8
8
|
|
|
9
9
|
import get_uid from "../auth/get_uid"
|
|
10
10
|
|
|
11
|
-
import cacheStorage from "./cacheStorage"
|
|
11
|
+
// import cacheStorage from "./cacheStorage"
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
const log = debug("rb:rts:useQuery")
|
|
@@ -45,7 +45,7 @@ const getUseQuery = (register_query) => (
|
|
|
45
45
|
sort = {},
|
|
46
46
|
} = options
|
|
47
47
|
|
|
48
|
-
const storageKey = `${uid}.${key}.${model_name}.${JSON.stringify(query)}.${JSON.stringify(projection)}.${JSON.stringify(sort)}`
|
|
48
|
+
// const storageKey = `${uid}.${key}.${model_name}.${JSON.stringify(query)}.${JSON.stringify(projection)}.${JSON.stringify(sort)}`
|
|
49
49
|
|
|
50
50
|
useEffect(() => {
|
|
51
51
|
if (options.debug) {
|
|
@@ -53,27 +53,23 @@ const getUseQuery = (register_query) => (
|
|
|
53
53
|
}
|
|
54
54
|
}, [])
|
|
55
55
|
|
|
56
|
-
useEffect(() => {
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
}, [storageKey])
|
|
56
|
+
// useEffect(() => {
|
|
57
|
+
// const load = async() => {
|
|
58
|
+
// const val = await cacheStorage.get(storageKey)
|
|
59
|
+
// // TODO: rm this
|
|
60
|
+
// // always initially apply when first load here
|
|
61
|
+
// if (val) {
|
|
62
|
+
// console.log("Will set val from cache storage")
|
|
63
|
+
// setData(val)
|
|
64
|
+
// setLoading(false)
|
|
65
|
+
// }
|
|
66
|
+
// }
|
|
67
|
+
//
|
|
68
|
+
// load()
|
|
69
|
+
// }, [storageKey])
|
|
70
70
|
|
|
71
71
|
const applyNewData = (newData, context) => {
|
|
72
72
|
setData(newData)
|
|
73
|
-
// write data to storage
|
|
74
|
-
// TODO: is there a better way than doing it on every update ?
|
|
75
|
-
// should this be throttled
|
|
76
|
-
cacheStorage.set(storageKey, newData)
|
|
77
73
|
|
|
78
74
|
if (newData?.length > 0) {
|
|
79
75
|
lastDocRef.current = newData[newData.length - 1]
|
|
@@ -161,7 +157,7 @@ const getUseQuery = (register_query) => (
|
|
|
161
157
|
log && log("useQuery cleanup unsubscribe()")
|
|
162
158
|
unsubscribe()
|
|
163
159
|
}
|
|
164
|
-
}, [JSON.stringify(query), key
|
|
160
|
+
}, [JSON.stringify(query), key])
|
|
165
161
|
|
|
166
162
|
|
|
167
163
|
const loadNextPage = useCallback(() => {
|
package/rts/store/index.js
CHANGED
|
@@ -5,36 +5,15 @@ import "./debug"
|
|
|
5
5
|
|
|
6
6
|
import get_collection from "./get_collection"
|
|
7
7
|
import update_docs from "./update_docs"
|
|
8
|
+
import satisfies_projection from "./satisfies_projection"
|
|
8
9
|
|
|
9
10
|
const log = debug("rb:rts:store")
|
|
10
11
|
|
|
11
|
-
|
|
12
|
-
// import {DATABASE_NAME} from "env"
|
|
13
|
-
// let prefix = "rb/"
|
|
14
|
-
// if (DATABASE_NAME) prefix += `${DATABASE_NAME}/`
|
|
15
|
-
//
|
|
16
|
-
// PouchDB.prefix = prefix
|
|
17
|
-
//
|
|
18
|
-
// PouchDB.plugin(IndexedDBAdapter)
|
|
19
|
-
// PouchDB.plugin(FindPlugin)
|
|
20
|
-
//
|
|
21
|
-
//
|
|
22
|
-
// let db = new PouchDB(`db/items`, { adapter: "indexeddb" });
|
|
23
|
-
//
|
|
24
|
-
// // Create a new document
|
|
25
|
-
// let doc = {
|
|
26
|
-
// _id: "001",
|
|
27
|
-
// message: "Hello, World!"
|
|
28
|
-
// }
|
|
29
|
-
// //
|
|
30
|
-
//
|
|
31
|
-
// const run = async() => {
|
|
12
|
+
|
|
32
13
|
// // https://github.com/pouchdb/pouchdb/tree/master/packages/node_modules/pouchdb-find#dbcreateindexindex--callback
|
|
33
14
|
// const res = await db.createIndex({
|
|
34
15
|
// index: {fields: ["name"]}
|
|
35
16
|
// })
|
|
36
|
-
//
|
|
37
|
-
//
|
|
38
17
|
// // Listen for changes on the database
|
|
39
18
|
// const fn = db.changes({
|
|
40
19
|
// since: "now",
|
|
@@ -53,74 +32,13 @@ const log = debug("rb:rts:store")
|
|
|
53
32
|
// console.log("GOT FN", fn)
|
|
54
33
|
//
|
|
55
34
|
// console.log("got res", res)
|
|
56
|
-
//
|
|
57
|
-
//
|
|
58
|
-
// selector: {name: "Mario"},
|
|
59
|
-
// sort: ["name"]
|
|
60
|
-
// })
|
|
61
|
-
//
|
|
62
|
-
// console.log("GT doc", doc)
|
|
63
|
-
//
|
|
64
|
-
// const {docs} = await db.find({selector: {}})
|
|
65
|
-
//
|
|
66
|
-
// console.log("got all docs", docs)
|
|
67
|
-
//
|
|
68
|
-
// db.put({
|
|
69
|
-
// _id: "001-" + Date.now(),
|
|
70
|
-
// message: "Hello, World!"
|
|
71
|
-
// })
|
|
72
|
-
// }
|
|
73
|
-
//
|
|
74
|
-
//
|
|
75
|
-
// run()
|
|
76
|
-
|
|
77
|
-
// // Insert the document into the database
|
|
78
|
-
// db.put(doc, function(err, response) {
|
|
79
|
-
// if (err) {
|
|
80
|
-
// return console.log(err);
|
|
81
|
-
// } else {
|
|
82
|
-
// console.log("Document created successfully");
|
|
83
|
-
// }
|
|
35
|
+
// _cols_store[col_name].getIndexes().then(function (result) {
|
|
36
|
+
// console.log("got indexes", result)
|
|
84
37
|
// })
|
|
85
|
-
//
|
|
86
|
-
//
|
|
87
|
-
// // Insert the document into the database
|
|
88
|
-
// db.put({
|
|
89
|
-
// _id: "hello world",
|
|
90
|
-
// fieldVal: 12,
|
|
91
|
-
// }, {force: true}, async function(err, response) {
|
|
92
|
-
// if (err) {
|
|
93
|
-
// console.log("errrro", err);
|
|
94
|
-
// console.log("Stt", JSON.stringify(err))
|
|
95
|
-
// return
|
|
96
|
-
// } else {
|
|
97
|
-
// console.log("Document created successfully", response);
|
|
98
|
-
// }
|
|
99
|
-
//
|
|
100
|
-
// console.log("will try to find:")
|
|
101
|
-
// const doc = await db.find({
|
|
102
|
-
// selector: {
|
|
103
|
-
// fieldVal: 10
|
|
104
|
-
// }
|
|
105
|
-
// })
|
|
106
|
-
// console.log("LEDOC", doc)
|
|
107
|
-
//
|
|
108
|
-
// });
|
|
109
|
-
//
|
|
110
|
-
// setInterval(() => {
|
|
111
|
-
// // Fetch the document
|
|
112
|
-
// db.get("001", function(err, doc) {
|
|
113
|
-
// if (err) {
|
|
114
|
-
// return console.log(err);
|
|
115
|
-
// } else {
|
|
116
|
-
// console.log(doc);
|
|
117
|
-
// }
|
|
118
|
-
// });
|
|
119
|
-
// }, 2000)
|
|
120
|
-
//
|
|
121
38
|
|
|
122
|
-
// TODO: should the store be in a worker or the main thread ?
|
|
123
39
|
|
|
40
|
+
// TODO: implement store in a shared worker
|
|
41
|
+
// TODO: should we filter all docs by projection ? or just the ones where the projection isn't complete ?
|
|
124
42
|
const run_query = async({model_name, query, query_key, options}, callback) => {
|
|
125
43
|
// console.log("run_query", {model_name, query, query_key, options})
|
|
126
44
|
// console.time("store run_query")
|
|
@@ -137,20 +55,22 @@ const run_query = async({model_name, query, query_key, options}, callback) => {
|
|
|
137
55
|
// fields: [""]
|
|
138
56
|
})
|
|
139
57
|
|
|
140
|
-
|
|
58
|
+
let mapped_docs = docs
|
|
59
|
+
.map(({_rev, ...doc}) => {
|
|
60
|
+
// TODO: handle projections here
|
|
61
|
+
const remapped_doc = Object.entries(doc).reduce((new_doc, [key, value]) => {
|
|
62
|
+
let new_key = key.startsWith('$_') ? key.replace(/^\$_/, "") : key
|
|
63
|
+
new_doc[new_key] = value
|
|
64
|
+
return new_doc
|
|
65
|
+
}, {})
|
|
141
66
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
const remapped_doc = Object.entries(doc).reduce((new_doc, [key, value]) => {
|
|
145
|
-
let new_key = key.startsWith('$_') ? key.replace(/^\$_/, "") : key
|
|
146
|
-
new_doc[new_key] = value
|
|
147
|
-
return new_doc
|
|
148
|
-
}, {})
|
|
67
|
+
return remapped_doc
|
|
68
|
+
})
|
|
149
69
|
|
|
150
|
-
return remapped_doc
|
|
151
|
-
})
|
|
152
70
|
|
|
153
|
-
|
|
71
|
+
if (options.projection) {
|
|
72
|
+
mapped_docs = mapped_docs.filter((doc) => satisfies_projection(doc, options.projection))
|
|
73
|
+
}
|
|
154
74
|
|
|
155
75
|
callback(null, mapped_docs, {source: "cache"})
|
|
156
76
|
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
|
|
3
|
+
const get_keys = (obj, parent_key = '') => {
|
|
4
|
+
return Object.keys(obj).reduce((acc, key) => {
|
|
5
|
+
const new_key = parent_key ? `${parent_key}.${key}` : key
|
|
6
|
+
if (typeof obj[key] === 'object' && obj[key] !== null && !Array.isArray(obj[key])) {
|
|
7
|
+
acc.push(...get_keys(obj[key], new_key))
|
|
8
|
+
} else {
|
|
9
|
+
acc.push(new_key)
|
|
10
|
+
}
|
|
11
|
+
return acc
|
|
12
|
+
}, [])
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const set_contains_set = (set_a, set_b) => {
|
|
16
|
+
if (set_b.size > set_a.size) return false
|
|
17
|
+
for (const b of set_b) if (!set_a.has(b)) return false
|
|
18
|
+
return true
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
const satisfies_projection = (doc, projection) => {
|
|
22
|
+
const doc_keys = new Set(get_keys(doc))
|
|
23
|
+
const projection_keys = new Set(Object.keys(projection).filter(k => projection[k] === 1))
|
|
24
|
+
|
|
25
|
+
if (!projection_keys.has('_id')) {
|
|
26
|
+
doc_keys.delete('_id')
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
return set_contains_set(doc_keys, projection_keys)
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
module.exports = satisfies_projection
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
const satisfies_projection = require("./satisfies_projection")
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
let doc = {
|
|
6
|
+
_id: '1',
|
|
7
|
+
field1: 'value1',
|
|
8
|
+
field2: {
|
|
9
|
+
subField1: 'value2',
|
|
10
|
+
subField2: 'value3'
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
let projection = {
|
|
15
|
+
field1: 1,
|
|
16
|
+
'field2.subField1': 1,
|
|
17
|
+
'field2.subField2': 1
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
test("simple", () => {
|
|
23
|
+
expect(satisfies_projection(doc, projection)).toBe(true)
|
|
24
|
+
})
|
|
25
|
+
|
|
26
|
+
test("missing field", () => {
|
|
27
|
+
expect(satisfies_projection(doc, {
|
|
28
|
+
missing_field: 1,
|
|
29
|
+
...projection
|
|
30
|
+
})).toBe(false)
|
|
31
|
+
})
|
|
32
|
+
|
|
33
|
+
test("missing nested field", () => {
|
|
34
|
+
expect(satisfies_projection(doc, {
|
|
35
|
+
"missing_field.missing_nested": 1,
|
|
36
|
+
...projection
|
|
37
|
+
})).toBe(false)
|
|
38
|
+
})
|
|
39
|
+
|
|
40
|
+
test("empty", () => {
|
|
41
|
+
expect(satisfies_projection(doc, {field1: 1})).toBe(true)
|
|
42
|
+
})
|