@rpcbase/client 0.44.0 → 0.46.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +6 -2
- package/rpc_post.js +1 -1
- package/rts/boot.js +5 -0
- package/rts/cacheStorage/index.js +12 -0
- package/rts/cacheStorage/native.js +28 -0
- package/rts/cacheStorage/web.js +85 -0
- package/rts/getUseQuery.js +177 -0
- package/rts/index.js +194 -0
- package/rts/store/debug.js +24 -0
- package/rts/store/get_collection.js +35 -0
- package/rts/store/index.js +161 -0
- package/rts/store/update_docs.js +44 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@rpcbase/client",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.46.0",
|
|
4
4
|
"scripts": {
|
|
5
5
|
"test": "echo \"Error: no test specified\" && exit 0"
|
|
6
6
|
},
|
|
@@ -10,6 +10,10 @@
|
|
|
10
10
|
"i18next-chained-backend": "4.4.0",
|
|
11
11
|
"i18next-resources-to-backend": "1.1.4",
|
|
12
12
|
"lodash": "4.17.21",
|
|
13
|
-
"
|
|
13
|
+
"pouchdb-adapter-indexeddb": "8.0.1",
|
|
14
|
+
"pouchdb-core": "8.0.1",
|
|
15
|
+
"pouchdb-find": "8.0.1",
|
|
16
|
+
"react-i18next": "13.0.1",
|
|
17
|
+
"socket.io-client": "4.7.1"
|
|
14
18
|
}
|
|
15
19
|
}
|
package/rpc_post.js
CHANGED
|
@@ -5,7 +5,7 @@ import _get from "lodash/get"
|
|
|
5
5
|
import _set from "lodash/set"
|
|
6
6
|
|
|
7
7
|
import get_txn_id from "@rpcbase/std/get_txn_id"
|
|
8
|
-
import {add_local_txn} from "@rpcbase/
|
|
8
|
+
import {add_local_txn} from "@rpcbase/client/rts"
|
|
9
9
|
|
|
10
10
|
import {BASE_URL} from "env"
|
|
11
11
|
|
package/rts/boot.js
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
import {Platform} from "react-native"
|
|
3
|
+
|
|
4
|
+
import getWebStorage from "./web"
|
|
5
|
+
import getNativeStorage from "./native"
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
// TODO: add support for storage expiration
|
|
9
|
+
|
|
10
|
+
const storage = Platform.OS === "web" ? getWebStorage() : getNativeStorage()
|
|
11
|
+
|
|
12
|
+
export default storage
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
import AsyncStorage from "@react-native-async-storage/async-storage"
|
|
3
|
+
|
|
4
|
+
const getNativeStorage = () => {
|
|
5
|
+
const cacheStorage = {
|
|
6
|
+
get: async(key) => {
|
|
7
|
+
try {
|
|
8
|
+
const res = await AsyncStorage.getItem(key)
|
|
9
|
+
const val = JSON.parse(res)
|
|
10
|
+
if (val) return val
|
|
11
|
+
} catch (error) {
|
|
12
|
+
// There was an error on the native side
|
|
13
|
+
}
|
|
14
|
+
},
|
|
15
|
+
set: async(key, value) => {
|
|
16
|
+
try {
|
|
17
|
+
await AsyncStorage.setItem(key, JSON.stringify(value))
|
|
18
|
+
} catch (error) {
|
|
19
|
+
// There was an error on the native side
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
return cacheStorage
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
export default getNativeStorage
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
const storeName = "store"
|
|
3
|
+
const version = 1
|
|
4
|
+
|
|
5
|
+
const getWebStorage = (
|
|
6
|
+
dbName = "rb-query-cache"
|
|
7
|
+
) => {
|
|
8
|
+
let __db
|
|
9
|
+
|
|
10
|
+
const openRequest = indexedDB.open(dbName, version)
|
|
11
|
+
|
|
12
|
+
// Handle the creation or upgrade of the database
|
|
13
|
+
openRequest.onupgradeneeded = (event) => {
|
|
14
|
+
const db = event.target.result
|
|
15
|
+
if (!db.objectStoreNames.contains(storeName)) {
|
|
16
|
+
db.createObjectStore(storeName, { keyPath: "key" })
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
// Handle errors when opening the database
|
|
21
|
+
openRequest.onerror = (event) => {
|
|
22
|
+
console.error("Error opening database:", event.target.errorCode)
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
openRequest.onsuccess = (event) => {
|
|
26
|
+
const db = event.target.result
|
|
27
|
+
__db = db
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
const cacheStorage = {
|
|
32
|
+
get: (key) => new Promise((resolve, reject) => {
|
|
33
|
+
const transaction = __db.transaction([storeName], "readonly")
|
|
34
|
+
const objectStore = transaction.objectStore(storeName)
|
|
35
|
+
const request = objectStore.get(key)
|
|
36
|
+
|
|
37
|
+
request.onsuccess = (event) => {
|
|
38
|
+
const obj = event.target.result?.obj
|
|
39
|
+
resolve(obj)
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
request.onerror = (event) => {
|
|
43
|
+
console.error("Error reading data:", event.target)
|
|
44
|
+
reject()
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
}),
|
|
48
|
+
set: (key, obj) => new Promise((resolve, reject) => {
|
|
49
|
+
|
|
50
|
+
const transaction = __db.transaction([storeName], "readwrite")
|
|
51
|
+
const objectStore = transaction.objectStore(storeName)
|
|
52
|
+
const request = objectStore.put({obj, key})
|
|
53
|
+
|
|
54
|
+
request.onsuccess = (event) => {
|
|
55
|
+
// console.log("Data written successfully:", event.target.result)
|
|
56
|
+
resolve()
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
request.onerror = (event) => {
|
|
60
|
+
// console.error("Error writing data:", event.target.errorCode)
|
|
61
|
+
console.log("ERRR", event.target)
|
|
62
|
+
reject()
|
|
63
|
+
}
|
|
64
|
+
}),
|
|
65
|
+
delete: (key) => new Promise((resolve, reject) => {
|
|
66
|
+
const transaction = __db.transaction([storeName], "readwrite")
|
|
67
|
+
const objectStore = transaction.objectStore(storeName)
|
|
68
|
+
const request = objectStore.delete(key)
|
|
69
|
+
|
|
70
|
+
request.onsuccess = (event) => {
|
|
71
|
+
resolve()
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
request.onerror = (event) => {
|
|
75
|
+
console.error("Error deleting data:", event.target.errorCode)
|
|
76
|
+
reject()
|
|
77
|
+
}
|
|
78
|
+
}),
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
return cacheStorage
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
export default getWebStorage
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
import assert from "assert"
|
|
3
|
+
import {Platform} from "react-native"
|
|
4
|
+
import {useCallback, useEffect, useState, useMemo, useId, useRef} from "react"
|
|
5
|
+
import debug from "debug"
|
|
6
|
+
import isEqual from "fast-deep-equal/react"
|
|
7
|
+
import _omit from "lodash/omit"
|
|
8
|
+
|
|
9
|
+
import get_uid from "@rpcbase/client/auth/get_uid"
|
|
10
|
+
|
|
11
|
+
import cacheStorage from "./cacheStorage"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
const log = debug("rb:rts:client")
|
|
15
|
+
|
|
16
|
+
const getUseQuery = (register_query) => (
|
|
17
|
+
model_name,
|
|
18
|
+
query = {},
|
|
19
|
+
options = {},
|
|
20
|
+
) => {
|
|
21
|
+
const id = useId()
|
|
22
|
+
|
|
23
|
+
// TODO: retrieve this from AuthContext in client
|
|
24
|
+
const uid = useMemo(() => {
|
|
25
|
+
const _uid = Platform.OS === "web" ? get_uid() : options.userId
|
|
26
|
+
assert(_uid, "missing uid")
|
|
27
|
+
|
|
28
|
+
return _uid
|
|
29
|
+
}, [])
|
|
30
|
+
|
|
31
|
+
const hasFirstReply = useRef(false)
|
|
32
|
+
const hasNetworkReply = useRef(false)
|
|
33
|
+
const lastDocRef = useRef(null)
|
|
34
|
+
// const [page, setPage] = useState(0)
|
|
35
|
+
|
|
36
|
+
const [source, setSource] = useState()
|
|
37
|
+
const [data, setData] = useState()
|
|
38
|
+
const [error, setError] = useState()
|
|
39
|
+
const [loading, setLoading] = useState(true)
|
|
40
|
+
|
|
41
|
+
const {
|
|
42
|
+
key = "",
|
|
43
|
+
projection = {},
|
|
44
|
+
sort = {},
|
|
45
|
+
} = options
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
const storageKey = `${uid}.${key}.${model_name}.${JSON.stringify(query)}.${JSON.stringify(projection)}.${JSON.stringify(sort)}`
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
useEffect(() => {
|
|
52
|
+
if (options.debug) {
|
|
53
|
+
console.log("use query", model_name, query, options)
|
|
54
|
+
}
|
|
55
|
+
}, [])
|
|
56
|
+
|
|
57
|
+
useEffect(() => {
|
|
58
|
+
const load = async() => {
|
|
59
|
+
const val = await cacheStorage.get(storageKey)
|
|
60
|
+
// console.log("got val from storage", val)
|
|
61
|
+
// always initially apply when first load here
|
|
62
|
+
if (val) {
|
|
63
|
+
setData(val)
|
|
64
|
+
setLoading(false)
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
load()
|
|
69
|
+
}, [storageKey])
|
|
70
|
+
|
|
71
|
+
const applyNewData = (newData, context) => {
|
|
72
|
+
setData(newData)
|
|
73
|
+
// write data to storage
|
|
74
|
+
// TODO: is there a better way than doing it on every update ?
|
|
75
|
+
// should this be throttled
|
|
76
|
+
cacheStorage.set(storageKey, newData)
|
|
77
|
+
|
|
78
|
+
if (newData?.length > 0) {
|
|
79
|
+
lastDocRef.current = newData[newData.length - 1]
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const applyContext = (newContext) => {
|
|
84
|
+
if (newContext.source !== source) {
|
|
85
|
+
setSource(newContext.source)
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
useEffect(() => {
|
|
90
|
+
const queryKey = key || id
|
|
91
|
+
|
|
92
|
+
if (options.debug ) {
|
|
93
|
+
console.log("register query", model_name, query, options)
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
const start = Date.now()
|
|
97
|
+
const unsubscribe = register_query(model_name, query, {...options, key: queryKey}, (err, queryResult, context) => {
|
|
98
|
+
log("callback answer with context", context, queryResult?.length)
|
|
99
|
+
|
|
100
|
+
// believe it or not, the network can be faster than indexeddb...
|
|
101
|
+
if (context.source === "cache" && hasNetworkReply.current) {
|
|
102
|
+
log("skipping cache arriving later than network")
|
|
103
|
+
return
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// mark if we received from network
|
|
107
|
+
if (context.source === "network" && !hasNetworkReply.current) {
|
|
108
|
+
hasNetworkReply.current = true
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
if (options.debug) {
|
|
112
|
+
console.log("query took", Date.now() - start, model_name, query)
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
setLoading(false)
|
|
116
|
+
if (err) {
|
|
117
|
+
setError(err)
|
|
118
|
+
return
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
log("query callback", model_name, queryKey, JSON.stringify(query))
|
|
122
|
+
|
|
123
|
+
// return if no data (this should be handled already)
|
|
124
|
+
if (!queryResult) return
|
|
125
|
+
|
|
126
|
+
let newData
|
|
127
|
+
if (Array.isArray(queryResult)) {
|
|
128
|
+
newData = queryResult.map((o) => _omit(o, "__txn_id"))
|
|
129
|
+
} else {
|
|
130
|
+
newData = _omit(queryResult, "__txn_id")
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// We return once in any case!
|
|
134
|
+
if (!hasFirstReply.current) {
|
|
135
|
+
hasFirstReply.current = true
|
|
136
|
+
|
|
137
|
+
// skip if we already have the data
|
|
138
|
+
if (isEqual(data, newData)) {
|
|
139
|
+
applyContext(context)
|
|
140
|
+
return
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
applyContext(context)
|
|
144
|
+
applyNewData(newData, context)
|
|
145
|
+
return
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
if (context.is_local && options.skip_local && hasFirstReply.current) {
|
|
149
|
+
log("skipping local update", key)
|
|
150
|
+
return
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
if (!isEqual(data, newData)) {
|
|
154
|
+
applyContext(context)
|
|
155
|
+
applyNewData(newData, context)
|
|
156
|
+
}
|
|
157
|
+
})
|
|
158
|
+
|
|
159
|
+
return () => {
|
|
160
|
+
log && log("useQuery cleanup unsubscribe()")
|
|
161
|
+
unsubscribe()
|
|
162
|
+
}
|
|
163
|
+
}, [JSON.stringify(query), key, storageKey])
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
const loadNextPage = useCallback(() => {
|
|
167
|
+
console.log("will load next page after DOC")
|
|
168
|
+
}, [])
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
const result = useMemo(() => ({data, source, error, loading}), [data, source, error, loading])
|
|
172
|
+
|
|
173
|
+
return result
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
export default getUseQuery
|
package/rts/index.js
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
import assert from "assert"
|
|
3
|
+
import {io} from "socket.io-client"
|
|
4
|
+
import _get from "lodash/get"
|
|
5
|
+
import _set from "lodash/set"
|
|
6
|
+
import debug from "debug"
|
|
7
|
+
|
|
8
|
+
import {BASE_URL} from "env"
|
|
9
|
+
|
|
10
|
+
import store from "./store"
|
|
11
|
+
import getUseQuery from "./getUseQuery"
|
|
12
|
+
|
|
13
|
+
const log = debug("rb:rts:client")
|
|
14
|
+
|
|
15
|
+
const MAX_TXN_BUF = 2048
|
|
16
|
+
|
|
17
|
+
let _socket
|
|
18
|
+
const _callbacks = {}
|
|
19
|
+
const _queries_store = {}
|
|
20
|
+
const _local_txn = []
|
|
21
|
+
|
|
22
|
+
// TODO: when server disconnects / crashes and loses all server side stored queries
|
|
23
|
+
// the clients must reconnect and re-register those queries, or the page will need to be hard refreshed
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
// add_local_txn
|
|
27
|
+
// when a request is made to the server, we generate (or send if provided) the txn_id
|
|
28
|
+
// if the array becomes longer than the default buffer length we shift the array
|
|
29
|
+
export const add_local_txn = (txn_id) => {
|
|
30
|
+
assert(txn_id, "add_local_txn trying to add an invalid txn_id")
|
|
31
|
+
_local_txn.push(txn_id)
|
|
32
|
+
if (_local_txn.length > MAX_TXN_BUF) {
|
|
33
|
+
_local_txn.shift()
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
// TODO: add compression / decompression
|
|
39
|
+
const dispatch_query_payload = (payload) => {
|
|
40
|
+
log("dispatch_query_payload", payload)
|
|
41
|
+
|
|
42
|
+
const {model_name, query_key} = payload
|
|
43
|
+
// const cb = _get(_callbacks, `${model_name}.${query_key}`)
|
|
44
|
+
const callbacks = _callbacks[`${model_name}.${query_key}`]
|
|
45
|
+
|
|
46
|
+
if (!callbacks || !Array.isArray(callbacks)) {
|
|
47
|
+
log("dispatch_query_payload", "unable to find callback for query payload", payload)
|
|
48
|
+
console.error("dispatch_query_payload", "unable to find callback for query payload", payload)
|
|
49
|
+
return
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (payload.error) {
|
|
53
|
+
console.warn("rts-client, payload error:", payload.error)
|
|
54
|
+
console.warn("rts-client", "in:", model_name, "query_key:", query_key)
|
|
55
|
+
callbacks.forEach((cb) => cb(payload.error, undefined, {source: "network"}))
|
|
56
|
+
return
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
let data
|
|
60
|
+
try {
|
|
61
|
+
// TODO: zstd / brotli decompression here
|
|
62
|
+
data = JSON.parse(payload.data_buf)
|
|
63
|
+
} catch (err) {
|
|
64
|
+
console.log("Error", err)
|
|
65
|
+
log("dispatch_query_payload", "unable to parse or send data from payload:", payload.data_buf)
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if (!data) {
|
|
69
|
+
// skipping if data parsing failed
|
|
70
|
+
return
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const context = {
|
|
74
|
+
source: "network",
|
|
75
|
+
is_local: _local_txn.includes(payload.txn_id),
|
|
76
|
+
txn_id: payload.txn_id,
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
callbacks.forEach((cb) => cb(null, data, context))
|
|
80
|
+
|
|
81
|
+
store.update_docs(model_name, data)
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
export const connect = () => new Promise((resolve) => {
|
|
86
|
+
log("rts client will connect")
|
|
87
|
+
|
|
88
|
+
_socket = io(BASE_URL, {
|
|
89
|
+
forceNew: true,
|
|
90
|
+
transports: ["websocket", "polling"],
|
|
91
|
+
withCredentials: true,
|
|
92
|
+
// extraHeaders: {},
|
|
93
|
+
// https://socket.io/docs/v4/client-options/#reconnection
|
|
94
|
+
reconnection: true,
|
|
95
|
+
reconnectionAttempts: 128,
|
|
96
|
+
reconnectionDelay: 400, // ms
|
|
97
|
+
reconnectionDelayMax: 10 * 1000, // 10s
|
|
98
|
+
})
|
|
99
|
+
|
|
100
|
+
_socket.on("connect", () => {
|
|
101
|
+
log("socket connected")
|
|
102
|
+
resolve()
|
|
103
|
+
})
|
|
104
|
+
|
|
105
|
+
_socket.io.on("reconnect", (e) => {
|
|
106
|
+
log("socked reconnected", e)
|
|
107
|
+
})
|
|
108
|
+
|
|
109
|
+
_socket.on("error", (err) => {
|
|
110
|
+
log("socket error", err)
|
|
111
|
+
})
|
|
112
|
+
|
|
113
|
+
_socket.on("query_payload", (payload) => {
|
|
114
|
+
// console.log("socket:query_payload", payload)
|
|
115
|
+
dispatch_query_payload(payload)
|
|
116
|
+
})
|
|
117
|
+
|
|
118
|
+
_socket.on("delete_doc", (payload) => {
|
|
119
|
+
log("document deleted", payload)
|
|
120
|
+
})
|
|
121
|
+
|
|
122
|
+
_socket.on("disconnect", (arg) => {
|
|
123
|
+
log("socket disconnected", arg)
|
|
124
|
+
})
|
|
125
|
+
})
|
|
126
|
+
|
|
127
|
+
export const reconnect = () => {
|
|
128
|
+
log("socket will force reconnect")
|
|
129
|
+
_socket?.disconnect()
|
|
130
|
+
setTimeout(() => {
|
|
131
|
+
_socket?.connect()
|
|
132
|
+
}, 200)
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
// register a query
|
|
137
|
+
export const register_query = (model_name, query, _options, _callback) => {
|
|
138
|
+
// left shift args if _options is undefined
|
|
139
|
+
let options
|
|
140
|
+
let callback
|
|
141
|
+
if (_callback) {
|
|
142
|
+
assert(typeof _callback === "function")
|
|
143
|
+
options = _options
|
|
144
|
+
callback = _callback
|
|
145
|
+
} else {
|
|
146
|
+
options = {}
|
|
147
|
+
callback = _options
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
log("register_query", {model_name, query, options, callback})
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
if (!_socket) {
|
|
154
|
+
log("register_query: trying to use null socket", {model_name, query})
|
|
155
|
+
return
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
const key = options.key || ""
|
|
159
|
+
const query_key = `${key}${JSON.stringify(query)}`
|
|
160
|
+
log("registering query with key", key, model_name, query, query_key)
|
|
161
|
+
|
|
162
|
+
// save callback to update hooks
|
|
163
|
+
const cb_key = `${model_name}.${query_key}`
|
|
164
|
+
if (!_callbacks[cb_key] || !Array.isArray(_callbacks[cb_key])) {
|
|
165
|
+
_callbacks[cb_key] = []
|
|
166
|
+
}
|
|
167
|
+
_callbacks[cb_key].push(callback)
|
|
168
|
+
|
|
169
|
+
// save query for reconnections and retries
|
|
170
|
+
if (!_queries_store[model_name]) {
|
|
171
|
+
_queries_store[model_name] = {}
|
|
172
|
+
}
|
|
173
|
+
_queries_store[model_name][query]
|
|
174
|
+
|
|
175
|
+
// TODO: why both run and register query here ? the run_query should come straight from register ?
|
|
176
|
+
_socket.emit("run_query", {model_name, query, query_key, options})
|
|
177
|
+
_socket.emit("register_query", {model_name, query, query_key, options})
|
|
178
|
+
|
|
179
|
+
// run the query from the cache a first time
|
|
180
|
+
store.run_query({model_name, query, query_key, options}, callback)
|
|
181
|
+
|
|
182
|
+
return () => {
|
|
183
|
+
_socket.emit("remove_query", {model_name, query, query_key, options})
|
|
184
|
+
// remove callback
|
|
185
|
+
const cb_index = _callbacks[cb_key].indexOf(callback)
|
|
186
|
+
if (cb_index > -1) {
|
|
187
|
+
_callbacks[cb_key].splice(cb_index, 1)
|
|
188
|
+
} else {
|
|
189
|
+
console.warn("warning, trying to remove a callback that doesn't exist")
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
export const useQuery = getUseQuery(register_query)
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
import debug from "debug"
|
|
3
|
+
import PouchDB from "pouchdb-core"
|
|
4
|
+
|
|
5
|
+
if (debug.enabled("rb:store")) {
|
|
6
|
+
|
|
7
|
+
const log = debug("rb:store:pouch")
|
|
8
|
+
|
|
9
|
+
log("store debug is enabled")
|
|
10
|
+
|
|
11
|
+
const shouldLogAllPouch = debug.enabled("pouch")
|
|
12
|
+
|
|
13
|
+
PouchDB.on("debug", (args) => {
|
|
14
|
+
if (shouldLogAllPouch) {
|
|
15
|
+
log(...args)
|
|
16
|
+
}
|
|
17
|
+
// log find only when not loggin all pouch
|
|
18
|
+
else {
|
|
19
|
+
if (args[0] === "find") {
|
|
20
|
+
log(...args)
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
})
|
|
24
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
import debug from "debug"
|
|
3
|
+
import PouchDB from "pouchdb-core"
|
|
4
|
+
import IndexedDBAdapter from "pouchdb-adapter-indexeddb"
|
|
5
|
+
import FindPlugin from "pouchdb-find"
|
|
6
|
+
|
|
7
|
+
import {DATABASE_NAME} from "env"
|
|
8
|
+
|
|
9
|
+
const log = debug("rb:store")
|
|
10
|
+
|
|
11
|
+
let prefix = "rb/"
|
|
12
|
+
if (DATABASE_NAME) prefix += `${DATABASE_NAME}/`
|
|
13
|
+
|
|
14
|
+
PouchDB.prefix = prefix
|
|
15
|
+
|
|
16
|
+
PouchDB.plugin(IndexedDBAdapter)
|
|
17
|
+
PouchDB.plugin(FindPlugin)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
const _cols_store = Object.create(null)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
const get_collection = (col_name) => {
|
|
24
|
+
if (_cols_store[col_name]) {
|
|
25
|
+
return _cols_store[col_name]
|
|
26
|
+
} else {
|
|
27
|
+
const col = new PouchDB(col_name, { adapter: "indexeddb" });
|
|
28
|
+
_cols_store[col_name] = col
|
|
29
|
+
|
|
30
|
+
return col
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export default get_collection
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
import debug from "debug"
|
|
3
|
+
|
|
4
|
+
import "./debug"
|
|
5
|
+
|
|
6
|
+
import get_collection from "./get_collection"
|
|
7
|
+
import update_docs from "./update_docs"
|
|
8
|
+
|
|
9
|
+
const log = debug("rb:store")
|
|
10
|
+
|
|
11
|
+
// import updateDocument from "./updateDocument"
|
|
12
|
+
// import {DATABASE_NAME} from "env"
|
|
13
|
+
// let prefix = "rb/"
|
|
14
|
+
// if (DATABASE_NAME) prefix += `${DATABASE_NAME}/`
|
|
15
|
+
//
|
|
16
|
+
// PouchDB.prefix = prefix
|
|
17
|
+
//
|
|
18
|
+
// PouchDB.plugin(IndexedDBAdapter)
|
|
19
|
+
// PouchDB.plugin(FindPlugin)
|
|
20
|
+
//
|
|
21
|
+
//
|
|
22
|
+
// let db = new PouchDB(`db/items`, { adapter: "indexeddb" });
|
|
23
|
+
//
|
|
24
|
+
// // Create a new document
|
|
25
|
+
// let doc = {
|
|
26
|
+
// _id: "001",
|
|
27
|
+
// message: "Hello, World!"
|
|
28
|
+
// }
|
|
29
|
+
// //
|
|
30
|
+
//
|
|
31
|
+
// const run = async() => {
|
|
32
|
+
// // https://github.com/pouchdb/pouchdb/tree/master/packages/node_modules/pouchdb-find#dbcreateindexindex--callback
|
|
33
|
+
// const res = await db.createIndex({
|
|
34
|
+
// index: {fields: ["name"]}
|
|
35
|
+
// })
|
|
36
|
+
//
|
|
37
|
+
//
|
|
38
|
+
// // Listen for changes on the database
|
|
39
|
+
// const fn = db.changes({
|
|
40
|
+
// since: "now",
|
|
41
|
+
// live: true,
|
|
42
|
+
// include_docs: true
|
|
43
|
+
// }).on("change", function(change) {
|
|
44
|
+
// // handle change
|
|
45
|
+
// console.log("GOT CHANGE", change)
|
|
46
|
+
// }).on("error", function (err) {
|
|
47
|
+
// // handle errors
|
|
48
|
+
// console.log(err)
|
|
49
|
+
// })
|
|
50
|
+
//
|
|
51
|
+
// fn.cancel()
|
|
52
|
+
//
|
|
53
|
+
// console.log("GOT FN", fn)
|
|
54
|
+
//
|
|
55
|
+
// console.log("got res", res)
|
|
56
|
+
//
|
|
57
|
+
// const doc = await db.find({
|
|
58
|
+
// selector: {name: "Mario"},
|
|
59
|
+
// sort: ["name"]
|
|
60
|
+
// })
|
|
61
|
+
//
|
|
62
|
+
// console.log("GT doc", doc)
|
|
63
|
+
//
|
|
64
|
+
// const {docs} = await db.find({selector: {}})
|
|
65
|
+
//
|
|
66
|
+
// console.log("got all docs", docs)
|
|
67
|
+
//
|
|
68
|
+
// db.put({
|
|
69
|
+
// _id: "001-" + Date.now(),
|
|
70
|
+
// message: "Hello, World!"
|
|
71
|
+
// })
|
|
72
|
+
// }
|
|
73
|
+
//
|
|
74
|
+
//
|
|
75
|
+
// run()
|
|
76
|
+
|
|
77
|
+
// // Insert the document into the database
|
|
78
|
+
// db.put(doc, function(err, response) {
|
|
79
|
+
// if (err) {
|
|
80
|
+
// return console.log(err);
|
|
81
|
+
// } else {
|
|
82
|
+
// console.log("Document created successfully");
|
|
83
|
+
// }
|
|
84
|
+
// })
|
|
85
|
+
//
|
|
86
|
+
//
|
|
87
|
+
// // Insert the document into the database
|
|
88
|
+
// db.put({
|
|
89
|
+
// _id: "hello world",
|
|
90
|
+
// fieldVal: 12,
|
|
91
|
+
// }, {force: true}, async function(err, response) {
|
|
92
|
+
// if (err) {
|
|
93
|
+
// console.log("errrro", err);
|
|
94
|
+
// console.log("Stt", JSON.stringify(err))
|
|
95
|
+
// return
|
|
96
|
+
// } else {
|
|
97
|
+
// console.log("Document created successfully", response);
|
|
98
|
+
// }
|
|
99
|
+
//
|
|
100
|
+
// console.log("will try to find:")
|
|
101
|
+
// const doc = await db.find({
|
|
102
|
+
// selector: {
|
|
103
|
+
// fieldVal: 10
|
|
104
|
+
// }
|
|
105
|
+
// })
|
|
106
|
+
// console.log("LEDOC", doc)
|
|
107
|
+
//
|
|
108
|
+
// });
|
|
109
|
+
//
|
|
110
|
+
// setInterval(() => {
|
|
111
|
+
// // Fetch the document
|
|
112
|
+
// db.get("001", function(err, doc) {
|
|
113
|
+
// if (err) {
|
|
114
|
+
// return console.log(err);
|
|
115
|
+
// } else {
|
|
116
|
+
// console.log(doc);
|
|
117
|
+
// }
|
|
118
|
+
// });
|
|
119
|
+
// }, 2000)
|
|
120
|
+
//
|
|
121
|
+
|
|
122
|
+
// TODO: should the store be in a worker or the main thread ?
|
|
123
|
+
|
|
124
|
+
const run_query = async({model_name, query, query_key, options}, callback) => {
|
|
125
|
+
// console.log("ALAAARM")
|
|
126
|
+
// console.log("run_query", {model_name, query, query_key, options})
|
|
127
|
+
|
|
128
|
+
// console.time("store run_query")
|
|
129
|
+
|
|
130
|
+
const col = get_collection(model_name)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
// https://github.com/pouchdb/pouchdb/tree/master/packages/node_modules/pouchdb-find#dbcreateindexindex--callback
|
|
134
|
+
const {docs} = await col.find({
|
|
135
|
+
// TODO: we should check if the selectors are compatible here
|
|
136
|
+
selector: query,
|
|
137
|
+
// DO NOT INCLUDE FIELDS HERE AS IT USES PICK AND WE WOULD GO through the list twice
|
|
138
|
+
// https://github.com/pouchdb/pouchdb/blob/master/packages/node_modules/pouchdb-find/src/adapters/local/find/index.js
|
|
139
|
+
// fields: [""]
|
|
140
|
+
})
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
const mapped_docs = docs.map(({_rev, ...doc}) => {
|
|
144
|
+
// TODO: handle projections here
|
|
145
|
+
const remapped_doc = Object.entries(doc).reduce((new_doc, [key, value]) => {
|
|
146
|
+
let new_key = key.startsWith('$_') ? key.replace(/^\$_/, "") : key
|
|
147
|
+
new_doc[new_key] = value
|
|
148
|
+
return new_doc
|
|
149
|
+
}, {})
|
|
150
|
+
|
|
151
|
+
return remapped_doc
|
|
152
|
+
})
|
|
153
|
+
|
|
154
|
+
// console.log("FOUND INITIAL", mapped_docs)
|
|
155
|
+
// console.timeEnd("store run_query")
|
|
156
|
+
|
|
157
|
+
callback(null, mapped_docs, {source: "cache"})
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
export default {run_query, update_docs}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
import get_collection from "./get_collection"
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
const update_docs = async(model_name, data) => {
|
|
6
|
+
const collection = get_collection(model_name)
|
|
7
|
+
|
|
8
|
+
const all_ids = data.map((doc) => doc._id)
|
|
9
|
+
|
|
10
|
+
// console.log("will update cache", model_name, data, all_ids)
|
|
11
|
+
|
|
12
|
+
// console.time("find")
|
|
13
|
+
|
|
14
|
+
// TODO:
|
|
15
|
+
// there is also a bulk get which could have different performance than find, try both
|
|
16
|
+
// https://pouchdb.com/api.html#bulk_get
|
|
17
|
+
|
|
18
|
+
// https://github.com/pouchdb/pouchdb/tree/master/packages/node_modules/pouchdb-find#dbcreateindexindex--callback
|
|
19
|
+
const {docs: current_docs} = await collection.find({
|
|
20
|
+
selector: {_id: {$in: all_ids}},
|
|
21
|
+
fields: ["_id", "_rev"],
|
|
22
|
+
})
|
|
23
|
+
|
|
24
|
+
// console.log("current_docs", current_docs)
|
|
25
|
+
|
|
26
|
+
const revs_map = {}
|
|
27
|
+
current_docs.forEach((doc) => revs_map[doc._id] = doc._rev)
|
|
28
|
+
|
|
29
|
+
const write_docs = data.map((mongo_doc) => {
|
|
30
|
+
const op = Object.entries(mongo_doc).reduce((new_doc, [key, value]) => {
|
|
31
|
+
let new_key = key !== "_id" && key.startsWith('_') ? `$_${key}` : key
|
|
32
|
+
new_doc[new_key] = value
|
|
33
|
+
return new_doc
|
|
34
|
+
}, {})
|
|
35
|
+
|
|
36
|
+
op._rev = revs_map[mongo_doc._id]
|
|
37
|
+
return op
|
|
38
|
+
})
|
|
39
|
+
|
|
40
|
+
await collection.bulkDocs(write_docs)
|
|
41
|
+
// console.timeEnd("find")
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export default update_docs
|