@rpcbase/client 0.45.0 → 0.47.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@rpcbase/client",
3
- "version": "0.45.0",
3
+ "version": "0.47.0",
4
4
  "scripts": {
5
5
  "test": "echo \"Error: no test specified\" && exit 0"
6
6
  },
@@ -29,9 +29,11 @@ const getUseQuery = (register_query) => (
29
29
  }, [])
30
30
 
31
31
  const hasFirstReply = useRef(false)
32
+ const hasNetworkReply = useRef(false)
32
33
  const lastDocRef = useRef(null)
33
34
  // const [page, setPage] = useState(0)
34
35
 
36
+ const [source, setSource] = useState()
35
37
  const [data, setData] = useState()
36
38
  const [error, setError] = useState()
37
39
  const [loading, setLoading] = useState(true)
@@ -55,6 +57,7 @@ const getUseQuery = (register_query) => (
55
57
  useEffect(() => {
56
58
  const load = async() => {
57
59
  const val = await cacheStorage.get(storageKey)
60
+ // console.log("got val from storage", val)
58
61
  // always initially apply when first load here
59
62
  if (val) {
60
63
  setData(val)
@@ -65,9 +68,8 @@ const getUseQuery = (register_query) => (
65
68
  load()
66
69
  }, [storageKey])
67
70
 
68
- const applyNewData = (newData) => {
71
+ const applyNewData = (newData, context) => {
69
72
  setData(newData)
70
-
71
73
  // write data to storage
72
74
  // TODO: is there a better way than doing it on every update ?
73
75
  // should this be throttled
@@ -78,6 +80,12 @@ const getUseQuery = (register_query) => (
78
80
  }
79
81
  }
80
82
 
83
+ const applyContext = (newContext) => {
84
+ if (newContext.source !== source) {
85
+ setSource(newContext.source)
86
+ }
87
+ }
88
+
81
89
  useEffect(() => {
82
90
  const queryKey = key || id
83
91
 
@@ -87,8 +95,20 @@ const getUseQuery = (register_query) => (
87
95
 
88
96
  const start = Date.now()
89
97
  const unsubscribe = register_query(model_name, query, {...options, key: queryKey}, (err, queryResult, context) => {
98
+ log("callback answer with context", context, queryResult?.length)
99
+
100
+ // believe it or not, the network can be faster than indexeddb...
101
+ if (context.source === "cache" && hasNetworkReply.current) {
102
+ log("skipping cache arriving later than network")
103
+ return
104
+ }
90
105
 
91
- if (options.debug ) {
106
+ // mark if we received from network
107
+ if (context.source === "network" && !hasNetworkReply.current) {
108
+ hasNetworkReply.current = true
109
+ }
110
+
111
+ if (options.debug) {
92
112
  console.log("query took", Date.now() - start, model_name, query)
93
113
  }
94
114
 
@@ -115,19 +135,24 @@ const getUseQuery = (register_query) => (
115
135
  hasFirstReply.current = true
116
136
 
117
137
  // skip if we already have the data
118
- if (isEqual(data, newData)) return
138
+ if (isEqual(data, newData)) {
139
+ applyContext(context)
140
+ return
141
+ }
119
142
 
120
- applyNewData(newData)
143
+ applyContext(context)
144
+ applyNewData(newData, context)
121
145
  return
122
146
  }
123
147
 
124
148
  if (context.is_local && options.skip_local && hasFirstReply.current) {
125
- // console.log("skipping local update", key)
149
+ log("skipping local update", key)
126
150
  return
127
151
  }
128
152
 
129
153
  if (!isEqual(data, newData)) {
130
- applyNewData(newData)
154
+ applyContext(context)
155
+ applyNewData(newData, context)
131
156
  }
132
157
  })
133
158
 
@@ -143,11 +168,10 @@ const getUseQuery = (register_query) => (
143
168
  }, [])
144
169
 
145
170
 
146
- const result = useMemo(() => ({data, error, loading}), [data, error, loading])
171
+ const result = useMemo(() => ({data, source, error, loading}), [data, source, error, loading])
147
172
 
148
173
  return result
149
174
  }
150
175
 
151
176
 
152
-
153
177
  export default getUseQuery
package/rts/index.js CHANGED
@@ -7,6 +7,7 @@ import debug from "debug"
7
7
 
8
8
  import {BASE_URL} from "env"
9
9
 
10
+ import store from "./store"
10
11
  import getUseQuery from "./getUseQuery"
11
12
 
12
13
  const log = debug("rb:rts:client")
@@ -15,7 +16,7 @@ const MAX_TXN_BUF = 2048
15
16
 
16
17
  let _socket
17
18
  const _callbacks = {}
18
- const _queries_storage = {}
19
+ const _queries_store = {}
19
20
  const _local_txn = []
20
21
 
21
22
  // TODO: when server disconnects / crashes and loses all server side stored queries
@@ -51,13 +52,13 @@ const dispatch_query_payload = (payload) => {
51
52
  if (payload.error) {
52
53
  console.warn("rts-client, payload error:", payload.error)
53
54
  console.warn("rts-client", "in:", model_name, "query_key:", query_key)
54
- callbacks.forEach((cb) => cb(payload.error))
55
+ callbacks.forEach((cb) => cb(payload.error, undefined, {source: "network"}))
55
56
  return
56
57
  }
57
58
 
58
59
  let data
59
60
  try {
60
- // TODO: zstd decompression here
61
+ // TODO: zstd / brotli decompression here
61
62
  data = JSON.parse(payload.data_buf)
62
63
  } catch (err) {
63
64
  console.log("Error", err)
@@ -70,11 +71,14 @@ const dispatch_query_payload = (payload) => {
70
71
  }
71
72
 
72
73
  const context = {
74
+ source: "network",
73
75
  is_local: _local_txn.includes(payload.txn_id),
74
76
  txn_id: payload.txn_id,
75
77
  }
76
78
 
77
79
  callbacks.forEach((cb) => cb(null, data, context))
80
+
81
+ store.update_docs(model_name, data)
78
82
  }
79
83
 
80
84
 
@@ -143,15 +147,17 @@ export const register_query = (model_name, query, _options, _callback) => {
143
147
  callback = _options
144
148
  }
145
149
 
150
+ log("register_query", {model_name, query, options, callback})
151
+
146
152
 
147
153
  if (!_socket) {
148
- log("trying to use null socket", {model_name, query})
154
+ log("register_query: trying to use null socket", {model_name, query})
149
155
  return
150
156
  }
151
157
 
152
158
  const key = options.key || ""
153
- log("registering query with key", key, model_name, query)
154
159
  const query_key = `${key}${JSON.stringify(query)}`
160
+ log("registering query with key", key, model_name, query, query_key)
155
161
 
156
162
  // save callback to update hooks
157
163
  const cb_key = `${model_name}.${query_key}`
@@ -161,16 +167,20 @@ export const register_query = (model_name, query, _options, _callback) => {
161
167
  _callbacks[cb_key].push(callback)
162
168
 
163
169
  // save query for reconnections and retries
164
- if (!_queries_storage[model_name]) {
165
- _queries_storage[model_name] = {}
170
+ if (!_queries_store[model_name]) {
171
+ _queries_store[model_name] = {}
166
172
  }
167
- _queries_storage[model_name][query]
173
+ _queries_store[model_name][query]
168
174
 
175
+ // TODO: why both run and register query here ? the run_query should come straight from register ?
169
176
  _socket.emit("run_query", {model_name, query, query_key, options})
170
177
  _socket.emit("register_query", {model_name, query, query_key, options})
171
178
 
179
+ // run the query from the cache a first time
180
+ store.run_query({model_name, query, query_key, options}, callback)
181
+
172
182
  return () => {
173
- _socket.emit("remove_query", {model_name, query})
183
+ _socket.emit("remove_query", {model_name, query, query_key, options})
174
184
  // remove callback
175
185
  const cb_index = _callbacks[cb_key].indexOf(callback)
176
186
  if (cb_index > -1) {
@@ -0,0 +1,24 @@
1
+ /* @flow */
2
+ import debug from "debug"
3
+ import PouchDB from "pouchdb-core"
4
+
5
+ if (debug.enabled("rb:store")) {
6
+
7
+ const log = debug("rb:store:pouch")
8
+
9
+ log("store debug is enabled")
10
+
11
+ const shouldLogAllPouch = debug.enabled("pouch")
12
+
13
+ PouchDB.on("debug", (args) => {
14
+ if (shouldLogAllPouch) {
15
+ log(...args)
16
+ }
17
+ // log find only when not loggin all pouch
18
+ else {
19
+ if (args[0] === "find") {
20
+ log(...args)
21
+ }
22
+ }
23
+ })
24
+ }
@@ -0,0 +1,35 @@
1
+ /* @flow */
2
+ import debug from "debug"
3
+ import PouchDB from "pouchdb-core"
4
+ import IndexedDBAdapter from "pouchdb-adapter-indexeddb"
5
+ import FindPlugin from "pouchdb-find"
6
+
7
+ import {DATABASE_NAME} from "env"
8
+
9
+ const log = debug("rb:store")
10
+
11
+ let prefix = "rb/"
12
+ if (DATABASE_NAME) prefix += `${DATABASE_NAME}/`
13
+
14
+ PouchDB.prefix = prefix
15
+
16
+ PouchDB.plugin(IndexedDBAdapter)
17
+ PouchDB.plugin(FindPlugin)
18
+
19
+
20
+ const _cols_store = Object.create(null)
21
+
22
+
23
+ const get_collection = (col_name) => {
24
+ if (_cols_store[col_name]) {
25
+ return _cols_store[col_name]
26
+ } else {
27
+ const col = new PouchDB(col_name, { adapter: "indexeddb" });
28
+ _cols_store[col_name] = col
29
+
30
+ return col
31
+ }
32
+
33
+ }
34
+
35
+ export default get_collection
@@ -0,0 +1,156 @@
1
+ /* @flow */
2
+ import debug from "debug"
3
+
4
+ import "./debug"
5
+
6
+ import get_collection from "./get_collection"
7
+ import update_docs from "./update_docs"
8
+
9
+ const log = debug("rb:store")
10
+
11
+ // import updateDocument from "./updateDocument"
12
+ // import {DATABASE_NAME} from "env"
13
+ // let prefix = "rb/"
14
+ // if (DATABASE_NAME) prefix += `${DATABASE_NAME}/`
15
+ //
16
+ // PouchDB.prefix = prefix
17
+ //
18
+ // PouchDB.plugin(IndexedDBAdapter)
19
+ // PouchDB.plugin(FindPlugin)
20
+ //
21
+ //
22
+ // let db = new PouchDB(`db/items`, { adapter: "indexeddb" });
23
+ //
24
+ // // Create a new document
25
+ // let doc = {
26
+ // _id: "001",
27
+ // message: "Hello, World!"
28
+ // }
29
+ // //
30
+ //
31
+ // const run = async() => {
32
+ // // https://github.com/pouchdb/pouchdb/tree/master/packages/node_modules/pouchdb-find#dbcreateindexindex--callback
33
+ // const res = await db.createIndex({
34
+ // index: {fields: ["name"]}
35
+ // })
36
+ //
37
+ //
38
+ // // Listen for changes on the database
39
+ // const fn = db.changes({
40
+ // since: "now",
41
+ // live: true,
42
+ // include_docs: true
43
+ // }).on("change", function(change) {
44
+ // // handle change
45
+ // console.log("GOT CHANGE", change)
46
+ // }).on("error", function (err) {
47
+ // // handle errors
48
+ // console.log(err)
49
+ // })
50
+ //
51
+ // fn.cancel()
52
+ //
53
+ // console.log("GOT FN", fn)
54
+ //
55
+ // console.log("got res", res)
56
+ //
57
+ // const doc = await db.find({
58
+ // selector: {name: "Mario"},
59
+ // sort: ["name"]
60
+ // })
61
+ //
62
+ // console.log("GT doc", doc)
63
+ //
64
+ // const {docs} = await db.find({selector: {}})
65
+ //
66
+ // console.log("got all docs", docs)
67
+ //
68
+ // db.put({
69
+ // _id: "001-" + Date.now(),
70
+ // message: "Hello, World!"
71
+ // })
72
+ // }
73
+ //
74
+ //
75
+ // run()
76
+
77
+ // // Insert the document into the database
78
+ // db.put(doc, function(err, response) {
79
+ // if (err) {
80
+ // return console.log(err);
81
+ // } else {
82
+ // console.log("Document created successfully");
83
+ // }
84
+ // })
85
+ //
86
+ //
87
+ // // Insert the document into the database
88
+ // db.put({
89
+ // _id: "hello world",
90
+ // fieldVal: 12,
91
+ // }, {force: true}, async function(err, response) {
92
+ // if (err) {
93
+ // console.log("errrro", err);
94
+ // console.log("Stt", JSON.stringify(err))
95
+ // return
96
+ // } else {
97
+ // console.log("Document created successfully", response);
98
+ // }
99
+ //
100
+ // console.log("will try to find:")
101
+ // const doc = await db.find({
102
+ // selector: {
103
+ // fieldVal: 10
104
+ // }
105
+ // })
106
+ // console.log("LEDOC", doc)
107
+ //
108
+ // });
109
+ //
110
+ // setInterval(() => {
111
+ // // Fetch the document
112
+ // db.get("001", function(err, doc) {
113
+ // if (err) {
114
+ // return console.log(err);
115
+ // } else {
116
+ // console.log(doc);
117
+ // }
118
+ // });
119
+ // }, 2000)
120
+ //
121
+
122
+ // TODO: should the store be in a worker or the main thread ?
123
+
124
+ const run_query = async({model_name, query, query_key, options}, callback) => {
125
+ // console.log("ALAAARM")
126
+ // console.log("run_query", {model_name, query, query_key, options})
127
+ // console.time("store run_query")
128
+ const col = get_collection(model_name)
129
+
130
+ // https://github.com/pouchdb/pouchdb/tree/master/packages/node_modules/pouchdb-find#dbcreateindexindex--callback
131
+ const {docs} = await col.find({
132
+ // TODO: we should check if the selectors are compatible here
133
+ selector: query,
134
+ // DO NOT INCLUDE FIELDS HERE AS IT USES PICK AND WE WOULD GO through the list twice
135
+ // https://github.com/pouchdb/pouchdb/blob/master/packages/node_modules/pouchdb-find/src/adapters/local/find/index.js
136
+ // fields: [""]
137
+ })
138
+
139
+ const mapped_docs = docs.map(({_rev, ...doc}) => {
140
+ // TODO: handle projections here
141
+ const remapped_doc = Object.entries(doc).reduce((new_doc, [key, value]) => {
142
+ let new_key = key.startsWith('$_') ? key.replace(/^\$_/, "") : key
143
+ new_doc[new_key] = value
144
+ return new_doc
145
+ }, {})
146
+
147
+ return remapped_doc
148
+ })
149
+
150
+ // console.timeEnd("store run_query")
151
+
152
+ callback(null, mapped_docs, {source: "cache"})
153
+ }
154
+
155
+
156
+ export default {run_query, update_docs}
@@ -0,0 +1,44 @@
1
+ /* @flow */
2
+ import get_collection from "./get_collection"
3
+
4
+
5
+ const update_docs = async(model_name, data) => {
6
+ const collection = get_collection(model_name)
7
+
8
+ const all_ids = data.map((doc) => doc._id)
9
+
10
+ // console.log("will update cache", model_name, data, all_ids)
11
+
12
+ // console.time("find")
13
+
14
+ // TODO:
15
+ // there is also a bulk get which could have different performance than find, try both
16
+ // https://pouchdb.com/api.html#bulk_get
17
+
18
+ // https://github.com/pouchdb/pouchdb/tree/master/packages/node_modules/pouchdb-find#dbcreateindexindex--callback
19
+ const {docs: current_docs} = await collection.find({
20
+ selector: {_id: {$in: all_ids}},
21
+ fields: ["_id", "_rev"],
22
+ })
23
+
24
+ // console.log("current_docs", current_docs)
25
+
26
+ const revs_map = {}
27
+ current_docs.forEach((doc) => revs_map[doc._id] = doc._rev)
28
+
29
+ const write_docs = data.map((mongo_doc) => {
30
+ const op = Object.entries(mongo_doc).reduce((new_doc, [key, value]) => {
31
+ let new_key = key !== "_id" && key.startsWith('_') ? `$_${key}` : key
32
+ new_doc[new_key] = value
33
+ return new_doc
34
+ }, {})
35
+
36
+ op._rev = revs_map[mongo_doc._id]
37
+ return op
38
+ })
39
+
40
+ await collection.bulkDocs(write_docs)
41
+ // console.timeEnd("find")
42
+ }
43
+
44
+ export default update_docs