@rpcbase/client 0.44.0 → 0.45.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +6 -2
- package/rpc_post.js +1 -1
- package/rts/boot.js +5 -0
- package/rts/cacheStorage/index.js +12 -0
- package/rts/cacheStorage/native.js +28 -0
- package/rts/cacheStorage/web.js +85 -0
- package/rts/getUseQuery.js +153 -0
- package/rts/index.js +184 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@rpcbase/client",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.45.0",
|
|
4
4
|
"scripts": {
|
|
5
5
|
"test": "echo \"Error: no test specified\" && exit 0"
|
|
6
6
|
},
|
|
@@ -10,6 +10,10 @@
|
|
|
10
10
|
"i18next-chained-backend": "4.4.0",
|
|
11
11
|
"i18next-resources-to-backend": "1.1.4",
|
|
12
12
|
"lodash": "4.17.21",
|
|
13
|
-
"
|
|
13
|
+
"pouchdb-adapter-indexeddb": "8.0.1",
|
|
14
|
+
"pouchdb-core": "8.0.1",
|
|
15
|
+
"pouchdb-find": "8.0.1",
|
|
16
|
+
"react-i18next": "13.0.1",
|
|
17
|
+
"socket.io-client": "4.7.1"
|
|
14
18
|
}
|
|
15
19
|
}
|
package/rpc_post.js
CHANGED
|
@@ -5,7 +5,7 @@ import _get from "lodash/get"
|
|
|
5
5
|
import _set from "lodash/set"
|
|
6
6
|
|
|
7
7
|
import get_txn_id from "@rpcbase/std/get_txn_id"
|
|
8
|
-
import {add_local_txn} from "@rpcbase/
|
|
8
|
+
import {add_local_txn} from "@rpcbase/client/rts"
|
|
9
9
|
|
|
10
10
|
import {BASE_URL} from "env"
|
|
11
11
|
|
package/rts/boot.js
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
import {Platform} from "react-native"
|
|
3
|
+
|
|
4
|
+
import getWebStorage from "./web"
|
|
5
|
+
import getNativeStorage from "./native"
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
// TODO: add support for storage expiration
|
|
9
|
+
|
|
10
|
+
const storage = Platform.OS === "web" ? getWebStorage() : getNativeStorage()
|
|
11
|
+
|
|
12
|
+
export default storage
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
import AsyncStorage from "@react-native-async-storage/async-storage"
|
|
3
|
+
|
|
4
|
+
const getNativeStorage = () => {
|
|
5
|
+
const cacheStorage = {
|
|
6
|
+
get: async(key) => {
|
|
7
|
+
try {
|
|
8
|
+
const res = await AsyncStorage.getItem(key)
|
|
9
|
+
const val = JSON.parse(res)
|
|
10
|
+
if (val) return val
|
|
11
|
+
} catch (error) {
|
|
12
|
+
// There was an error on the native side
|
|
13
|
+
}
|
|
14
|
+
},
|
|
15
|
+
set: async(key, value) => {
|
|
16
|
+
try {
|
|
17
|
+
await AsyncStorage.setItem(key, JSON.stringify(value))
|
|
18
|
+
} catch (error) {
|
|
19
|
+
// There was an error on the native side
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
return cacheStorage
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
export default getNativeStorage
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
const storeName = "store"
|
|
3
|
+
const version = 1
|
|
4
|
+
|
|
5
|
+
const getWebStorage = (
|
|
6
|
+
dbName = "rb-query-cache"
|
|
7
|
+
) => {
|
|
8
|
+
let __db
|
|
9
|
+
|
|
10
|
+
const openRequest = indexedDB.open(dbName, version)
|
|
11
|
+
|
|
12
|
+
// Handle the creation or upgrade of the database
|
|
13
|
+
openRequest.onupgradeneeded = (event) => {
|
|
14
|
+
const db = event.target.result
|
|
15
|
+
if (!db.objectStoreNames.contains(storeName)) {
|
|
16
|
+
db.createObjectStore(storeName, { keyPath: "key" })
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
// Handle errors when opening the database
|
|
21
|
+
openRequest.onerror = (event) => {
|
|
22
|
+
console.error("Error opening database:", event.target.errorCode)
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
openRequest.onsuccess = (event) => {
|
|
26
|
+
const db = event.target.result
|
|
27
|
+
__db = db
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
const cacheStorage = {
|
|
32
|
+
get: (key) => new Promise((resolve, reject) => {
|
|
33
|
+
const transaction = __db.transaction([storeName], "readonly")
|
|
34
|
+
const objectStore = transaction.objectStore(storeName)
|
|
35
|
+
const request = objectStore.get(key)
|
|
36
|
+
|
|
37
|
+
request.onsuccess = (event) => {
|
|
38
|
+
const obj = event.target.result?.obj
|
|
39
|
+
resolve(obj)
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
request.onerror = (event) => {
|
|
43
|
+
console.error("Error reading data:", event.target)
|
|
44
|
+
reject()
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
}),
|
|
48
|
+
set: (key, obj) => new Promise((resolve, reject) => {
|
|
49
|
+
|
|
50
|
+
const transaction = __db.transaction([storeName], "readwrite")
|
|
51
|
+
const objectStore = transaction.objectStore(storeName)
|
|
52
|
+
const request = objectStore.put({obj, key})
|
|
53
|
+
|
|
54
|
+
request.onsuccess = (event) => {
|
|
55
|
+
// console.log("Data written successfully:", event.target.result)
|
|
56
|
+
resolve()
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
request.onerror = (event) => {
|
|
60
|
+
// console.error("Error writing data:", event.target.errorCode)
|
|
61
|
+
console.log("ERRR", event.target)
|
|
62
|
+
reject()
|
|
63
|
+
}
|
|
64
|
+
}),
|
|
65
|
+
delete: (key) => new Promise((resolve, reject) => {
|
|
66
|
+
const transaction = __db.transaction([storeName], "readwrite")
|
|
67
|
+
const objectStore = transaction.objectStore(storeName)
|
|
68
|
+
const request = objectStore.delete(key)
|
|
69
|
+
|
|
70
|
+
request.onsuccess = (event) => {
|
|
71
|
+
resolve()
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
request.onerror = (event) => {
|
|
75
|
+
console.error("Error deleting data:", event.target.errorCode)
|
|
76
|
+
reject()
|
|
77
|
+
}
|
|
78
|
+
}),
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
return cacheStorage
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
export default getWebStorage
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
import assert from "assert"
|
|
3
|
+
import {Platform} from "react-native"
|
|
4
|
+
import {useCallback, useEffect, useState, useMemo, useId, useRef} from "react"
|
|
5
|
+
import debug from "debug"
|
|
6
|
+
import isEqual from "fast-deep-equal/react"
|
|
7
|
+
import _omit from "lodash/omit"
|
|
8
|
+
|
|
9
|
+
import get_uid from "@rpcbase/client/auth/get_uid"
|
|
10
|
+
|
|
11
|
+
import cacheStorage from "./cacheStorage"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
const log = debug("rb:rts:client")
|
|
15
|
+
|
|
16
|
+
const getUseQuery = (register_query) => (
|
|
17
|
+
model_name,
|
|
18
|
+
query = {},
|
|
19
|
+
options = {},
|
|
20
|
+
) => {
|
|
21
|
+
const id = useId()
|
|
22
|
+
|
|
23
|
+
// TODO: retrieve this from AuthContext in client
|
|
24
|
+
const uid = useMemo(() => {
|
|
25
|
+
const _uid = Platform.OS === "web" ? get_uid() : options.userId
|
|
26
|
+
assert(_uid, "missing uid")
|
|
27
|
+
|
|
28
|
+
return _uid
|
|
29
|
+
}, [])
|
|
30
|
+
|
|
31
|
+
const hasFirstReply = useRef(false)
|
|
32
|
+
const lastDocRef = useRef(null)
|
|
33
|
+
// const [page, setPage] = useState(0)
|
|
34
|
+
|
|
35
|
+
const [data, setData] = useState()
|
|
36
|
+
const [error, setError] = useState()
|
|
37
|
+
const [loading, setLoading] = useState(true)
|
|
38
|
+
|
|
39
|
+
const {
|
|
40
|
+
key = "",
|
|
41
|
+
projection = {},
|
|
42
|
+
sort = {},
|
|
43
|
+
} = options
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
const storageKey = `${uid}.${key}.${model_name}.${JSON.stringify(query)}.${JSON.stringify(projection)}.${JSON.stringify(sort)}`
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
useEffect(() => {
|
|
50
|
+
if (options.debug) {
|
|
51
|
+
console.log("use query", model_name, query, options)
|
|
52
|
+
}
|
|
53
|
+
}, [])
|
|
54
|
+
|
|
55
|
+
useEffect(() => {
|
|
56
|
+
const load = async() => {
|
|
57
|
+
const val = await cacheStorage.get(storageKey)
|
|
58
|
+
// always initially apply when first load here
|
|
59
|
+
if (val) {
|
|
60
|
+
setData(val)
|
|
61
|
+
setLoading(false)
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
load()
|
|
66
|
+
}, [storageKey])
|
|
67
|
+
|
|
68
|
+
const applyNewData = (newData) => {
|
|
69
|
+
setData(newData)
|
|
70
|
+
|
|
71
|
+
// write data to storage
|
|
72
|
+
// TODO: is there a better way than doing it on every update ?
|
|
73
|
+
// should this be throttled
|
|
74
|
+
cacheStorage.set(storageKey, newData)
|
|
75
|
+
|
|
76
|
+
if (newData?.length > 0) {
|
|
77
|
+
lastDocRef.current = newData[newData.length - 1]
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
useEffect(() => {
|
|
82
|
+
const queryKey = key || id
|
|
83
|
+
|
|
84
|
+
if (options.debug ) {
|
|
85
|
+
console.log("register query", model_name, query, options)
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
const start = Date.now()
|
|
89
|
+
const unsubscribe = register_query(model_name, query, {...options, key: queryKey}, (err, queryResult, context) => {
|
|
90
|
+
|
|
91
|
+
if (options.debug ) {
|
|
92
|
+
console.log("query took", Date.now() - start, model_name, query)
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
setLoading(false)
|
|
96
|
+
if (err) {
|
|
97
|
+
setError(err)
|
|
98
|
+
return
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
log("query callback", model_name, queryKey, JSON.stringify(query))
|
|
102
|
+
|
|
103
|
+
// return if no data (this should be handled already)
|
|
104
|
+
if (!queryResult) return
|
|
105
|
+
|
|
106
|
+
let newData
|
|
107
|
+
if (Array.isArray(queryResult)) {
|
|
108
|
+
newData = queryResult.map((o) => _omit(o, "__txn_id"))
|
|
109
|
+
} else {
|
|
110
|
+
newData = _omit(queryResult, "__txn_id")
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// We return once in any case!
|
|
114
|
+
if (!hasFirstReply.current) {
|
|
115
|
+
hasFirstReply.current = true
|
|
116
|
+
|
|
117
|
+
// skip if we already have the data
|
|
118
|
+
if (isEqual(data, newData)) return
|
|
119
|
+
|
|
120
|
+
applyNewData(newData)
|
|
121
|
+
return
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
if (context.is_local && options.skip_local && hasFirstReply.current) {
|
|
125
|
+
// console.log("skipping local update", key)
|
|
126
|
+
return
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
if (!isEqual(data, newData)) {
|
|
130
|
+
applyNewData(newData)
|
|
131
|
+
}
|
|
132
|
+
})
|
|
133
|
+
|
|
134
|
+
return () => {
|
|
135
|
+
log && log("useQuery cleanup unsubscribe()")
|
|
136
|
+
unsubscribe()
|
|
137
|
+
}
|
|
138
|
+
}, [JSON.stringify(query), key, storageKey])
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
const loadNextPage = useCallback(() => {
|
|
142
|
+
console.log("will load next page after DOC")
|
|
143
|
+
}, [])
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
const result = useMemo(() => ({data, error, loading}), [data, error, loading])
|
|
147
|
+
|
|
148
|
+
return result
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
export default getUseQuery
|
package/rts/index.js
ADDED
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
/* @flow */
|
|
2
|
+
import assert from "assert"
|
|
3
|
+
import {io} from "socket.io-client"
|
|
4
|
+
import _get from "lodash/get"
|
|
5
|
+
import _set from "lodash/set"
|
|
6
|
+
import debug from "debug"
|
|
7
|
+
|
|
8
|
+
import {BASE_URL} from "env"
|
|
9
|
+
|
|
10
|
+
import getUseQuery from "./getUseQuery"
|
|
11
|
+
|
|
12
|
+
const log = debug("rb:rts:client")
|
|
13
|
+
|
|
14
|
+
const MAX_TXN_BUF = 2048
|
|
15
|
+
|
|
16
|
+
let _socket
|
|
17
|
+
const _callbacks = {}
|
|
18
|
+
const _queries_storage = {}
|
|
19
|
+
const _local_txn = []
|
|
20
|
+
|
|
21
|
+
// TODO: when server disconnects / crashes and loses all server side stored queries
|
|
22
|
+
// the clients must reconnect and re-register those queries, or the page will need to be hard refreshed
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
// add_local_txn
|
|
26
|
+
// when a request is made to the server, we generate (or send if provided) the txn_id
|
|
27
|
+
// if the array becomes longer than the default buffer length we shift the array
|
|
28
|
+
export const add_local_txn = (txn_id) => {
|
|
29
|
+
assert(txn_id, "add_local_txn trying to add an invalid txn_id")
|
|
30
|
+
_local_txn.push(txn_id)
|
|
31
|
+
if (_local_txn.length > MAX_TXN_BUF) {
|
|
32
|
+
_local_txn.shift()
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
// TODO: add compression / decompression
|
|
38
|
+
const dispatch_query_payload = (payload) => {
|
|
39
|
+
log("dispatch_query_payload", payload)
|
|
40
|
+
|
|
41
|
+
const {model_name, query_key} = payload
|
|
42
|
+
// const cb = _get(_callbacks, `${model_name}.${query_key}`)
|
|
43
|
+
const callbacks = _callbacks[`${model_name}.${query_key}`]
|
|
44
|
+
|
|
45
|
+
if (!callbacks || !Array.isArray(callbacks)) {
|
|
46
|
+
log("dispatch_query_payload", "unable to find callback for query payload", payload)
|
|
47
|
+
console.error("dispatch_query_payload", "unable to find callback for query payload", payload)
|
|
48
|
+
return
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
if (payload.error) {
|
|
52
|
+
console.warn("rts-client, payload error:", payload.error)
|
|
53
|
+
console.warn("rts-client", "in:", model_name, "query_key:", query_key)
|
|
54
|
+
callbacks.forEach((cb) => cb(payload.error))
|
|
55
|
+
return
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
let data
|
|
59
|
+
try {
|
|
60
|
+
// TODO: zstd decompression here
|
|
61
|
+
data = JSON.parse(payload.data_buf)
|
|
62
|
+
} catch (err) {
|
|
63
|
+
console.log("Error", err)
|
|
64
|
+
log("dispatch_query_payload", "unable to parse or send data from payload:", payload.data_buf)
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if (!data) {
|
|
68
|
+
// skipping if data parsing failed
|
|
69
|
+
return
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
const context = {
|
|
73
|
+
is_local: _local_txn.includes(payload.txn_id),
|
|
74
|
+
txn_id: payload.txn_id,
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
callbacks.forEach((cb) => cb(null, data, context))
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
export const connect = () => new Promise((resolve) => {
|
|
82
|
+
log("rts client will connect")
|
|
83
|
+
|
|
84
|
+
_socket = io(BASE_URL, {
|
|
85
|
+
forceNew: true,
|
|
86
|
+
transports: ["websocket", "polling"],
|
|
87
|
+
withCredentials: true,
|
|
88
|
+
// extraHeaders: {},
|
|
89
|
+
// https://socket.io/docs/v4/client-options/#reconnection
|
|
90
|
+
reconnection: true,
|
|
91
|
+
reconnectionAttempts: 128,
|
|
92
|
+
reconnectionDelay: 400, // ms
|
|
93
|
+
reconnectionDelayMax: 10 * 1000, // 10s
|
|
94
|
+
})
|
|
95
|
+
|
|
96
|
+
_socket.on("connect", () => {
|
|
97
|
+
log("socket connected")
|
|
98
|
+
resolve()
|
|
99
|
+
})
|
|
100
|
+
|
|
101
|
+
_socket.io.on("reconnect", (e) => {
|
|
102
|
+
log("socked reconnected", e)
|
|
103
|
+
})
|
|
104
|
+
|
|
105
|
+
_socket.on("error", (err) => {
|
|
106
|
+
log("socket error", err)
|
|
107
|
+
})
|
|
108
|
+
|
|
109
|
+
_socket.on("query_payload", (payload) => {
|
|
110
|
+
// console.log("socket:query_payload", payload)
|
|
111
|
+
dispatch_query_payload(payload)
|
|
112
|
+
})
|
|
113
|
+
|
|
114
|
+
_socket.on("delete_doc", (payload) => {
|
|
115
|
+
log("document deleted", payload)
|
|
116
|
+
})
|
|
117
|
+
|
|
118
|
+
_socket.on("disconnect", (arg) => {
|
|
119
|
+
log("socket disconnected", arg)
|
|
120
|
+
})
|
|
121
|
+
})
|
|
122
|
+
|
|
123
|
+
export const reconnect = () => {
|
|
124
|
+
log("socket will force reconnect")
|
|
125
|
+
_socket?.disconnect()
|
|
126
|
+
setTimeout(() => {
|
|
127
|
+
_socket?.connect()
|
|
128
|
+
}, 200)
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
// register a query
|
|
133
|
+
export const register_query = (model_name, query, _options, _callback) => {
|
|
134
|
+
// left shift args if _options is undefined
|
|
135
|
+
let options
|
|
136
|
+
let callback
|
|
137
|
+
if (_callback) {
|
|
138
|
+
assert(typeof _callback === "function")
|
|
139
|
+
options = _options
|
|
140
|
+
callback = _callback
|
|
141
|
+
} else {
|
|
142
|
+
options = {}
|
|
143
|
+
callback = _options
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
if (!_socket) {
|
|
148
|
+
log("trying to use null socket", {model_name, query})
|
|
149
|
+
return
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
const key = options.key || ""
|
|
153
|
+
log("registering query with key", key, model_name, query)
|
|
154
|
+
const query_key = `${key}${JSON.stringify(query)}`
|
|
155
|
+
|
|
156
|
+
// save callback to update hooks
|
|
157
|
+
const cb_key = `${model_name}.${query_key}`
|
|
158
|
+
if (!_callbacks[cb_key] || !Array.isArray(_callbacks[cb_key])) {
|
|
159
|
+
_callbacks[cb_key] = []
|
|
160
|
+
}
|
|
161
|
+
_callbacks[cb_key].push(callback)
|
|
162
|
+
|
|
163
|
+
// save query for reconnections and retries
|
|
164
|
+
if (!_queries_storage[model_name]) {
|
|
165
|
+
_queries_storage[model_name] = {}
|
|
166
|
+
}
|
|
167
|
+
_queries_storage[model_name][query]
|
|
168
|
+
|
|
169
|
+
_socket.emit("run_query", {model_name, query, query_key, options})
|
|
170
|
+
_socket.emit("register_query", {model_name, query, query_key, options})
|
|
171
|
+
|
|
172
|
+
return () => {
|
|
173
|
+
_socket.emit("remove_query", {model_name, query})
|
|
174
|
+
// remove callback
|
|
175
|
+
const cb_index = _callbacks[cb_key].indexOf(callback)
|
|
176
|
+
if (cb_index > -1) {
|
|
177
|
+
_callbacks[cb_key].splice(cb_index, 1)
|
|
178
|
+
} else {
|
|
179
|
+
console.warn("warning, trying to remove a callback that doesn't exist")
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
export const useQuery = getUseQuery(register_query)
|