@rpcbase/client 0.94.0 → 0.96.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/jest.config.js ADDED
@@ -0,0 +1,17 @@
1
+ /* @flow */
2
+
3
+ module.exports = {
4
+ testEnvironment: "jsdom",
5
+ transform: {
6
+ "^.+\\.js$": ["babel-jest", {
7
+ presets: ["@babel/preset-react"],
8
+ plugins: [
9
+ "@babel/plugin-transform-runtime",
10
+ "@babel/plugin-transform-modules-commonjs"
11
+ ],
12
+ }],
13
+ },
14
+ moduleNameMapper: {
15
+ "^react-native$": "react-native-web",
16
+ },
17
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@rpcbase/client",
3
- "version": "0.94.0",
3
+ "version": "0.96.0",
4
4
  "scripts": {
5
5
  "build-firebase": "webpack -c firebase/webpack.config.js",
6
6
  "build": "yarn build-firebase",
@@ -20,8 +20,21 @@
20
20
  "socket.io-client": "4.7.2"
21
21
  },
22
22
  "devDependencies": {
23
+ "@babel/core": "7.23.0",
24
+ "@babel/plugin-transform-modules-commonjs": "7.23.0",
25
+ "@babel/plugin-transform-runtime": "7.22.15",
26
+ "@babel/preset-react": "7.22.15",
27
+ "@testing-library/react": "14.0.0",
28
+ "@testing-library/react-hooks": "8.0.1",
29
+ "babel-jest": "29.7.0",
30
+ "bluebird": "3.7.2",
23
31
  "firebase": "10.2.0",
24
32
  "jest": "29.6.3",
33
+ "jest-environment-jsdom": "29.7.0",
34
+ "react": "18.2.0",
35
+ "react-dom": "18.2.0",
36
+ "react-native-web": "0.19.9",
37
+ "react-test-renderer": "18.2.0",
25
38
  "webpack": "5.88.2",
26
39
  "webpack-cli": "5.1.4"
27
40
  }
@@ -19,31 +19,67 @@ const getUseQuery = (register_query) => (
19
19
  ) => {
20
20
  const id = useId()
21
21
 
22
- // TODO: retrieve this from AuthContext in client
22
+ // TODO: retrieve this from future AuthContext in client
23
23
  const uid = useMemo(() => {
24
+ // TODO: why is there a options.userId here ??
24
25
  const _uid = Platform.OS === "web" ? get_uid() : options.userId
25
26
  assert(_uid, "missing uid")
26
-
27
27
  return _uid
28
28
  }, [])
29
29
 
30
+ const hasInitiallySetFromStorage = useRef(false)
30
31
  const hasFirstReply = useRef(false)
31
32
  const hasNetworkReply = useRef(false)
32
33
  const lastDocRef = useRef(null)
33
34
  // const [page, setPage] = useState(0)
34
35
 
35
- const [source, setSource] = useState()
36
- const [data, setData] = useState()
37
- const [error, setError] = useState()
38
- const [loading, setLoading] = useState(true)
39
-
40
36
  const {
41
37
  key = "",
42
38
  projection = {},
43
39
  sort = {},
40
+ useStorage = false,
44
41
  } = options
45
42
 
46
- // const storageKey = `${uid}.${key}.${model_name}.${JSON.stringify(query)}.${JSON.stringify(projection)}.${JSON.stringify(sort)}`
43
+ const storageKey = useMemo(() => {
44
+ return `${uid}${key ? `.${key}` : ""}.${model_name}.${JSON.stringify(query)}.${JSON.stringify(projection)}.${JSON.stringify(sort)}`
45
+ }, [uid, key, model_name, query, projection, sort])
46
+
47
+ const [source, setSource] = useState()
48
+
49
+ const [data, setData] = useState(() => {
50
+ if (!useStorage) {
51
+ return
52
+ }
53
+
54
+ if (Platform.OS === "web") {
55
+ const item = localStorage.getItem(storageKey)
56
+ if (!item) return
57
+
58
+ hasInitiallySetFromStorage.current = true
59
+
60
+ let parsedItem
61
+ try {
62
+ parsedItem = JSON.parse(item)
63
+ } catch (err) {
64
+ //
65
+ }
66
+ if (parsedItem) {
67
+ return parsedItem
68
+ }
69
+ } else {
70
+ // TODO: fast storage not implemented on mobile
71
+ console.log("fast storage mobile nOT IMPLEMENTED")
72
+ }
73
+ })
74
+
75
+ const [error, setError] = useState()
76
+
77
+ const [loading, setLoading] = useState(() => {
78
+ if (hasInitiallySetFromStorage.current) {
79
+ return false
80
+ }
81
+ return true
82
+ })
47
83
 
48
84
  useEffect(() => {
49
85
  if (options.debug) {
@@ -54,6 +90,15 @@ const getUseQuery = (register_query) => (
54
90
  const applyNewData = (newData, context) => {
55
91
  setData(newData)
56
92
 
93
+ // we only save network queries
94
+ if (useStorage && context.source === "network") {
95
+ if (Platform.OS === "web") {
96
+ localStorage.setItem(storageKey, JSON.stringify(newData))
97
+ } else {
98
+ // TODO: rn fast storage async
99
+ }
100
+ }
101
+
57
102
  if (newData?.length > 0) {
58
103
  lastDocRef.current = newData[newData.length - 1]
59
104
  }
@@ -73,7 +118,9 @@ const getUseQuery = (register_query) => (
73
118
  }
74
119
 
75
120
  const start = Date.now()
121
+
76
122
  log("will register query", model_name, query)
123
+
77
124
  const unsubscribe = register_query(model_name, query, {...options, key: queryKey}, (err, queryResult, context) => {
78
125
  log("callback answer with context", context, queryResult?.length)
79
126
 
@@ -125,7 +172,8 @@ const getUseQuery = (register_query) => (
125
172
  return
126
173
  }
127
174
 
128
- if (context.is_local && options.skip_local && hasFirstReply.current) {
175
+ // TODO: this should be handled by the consumer with the context (cache or network)
176
+ if (context.is_local && options.skipLocal && hasFirstReply.current) {
129
177
  log("skipping local update", key)
130
178
  return
131
179
  }
@@ -140,6 +188,9 @@ const getUseQuery = (register_query) => (
140
188
  log && log("useQuery cleanup unsubscribe()")
141
189
  typeof unsubscribe === "function" && unsubscribe()
142
190
  }
191
+
192
+
193
+ // TODO: this isnt right we need to update on options change too
143
194
  }, [JSON.stringify(query), key])
144
195
 
145
196
 
package/rts/index.js CHANGED
@@ -1,223 +1,2 @@
1
1
  /* @flow */
2
- import assert from "assert"
3
- import {io} from "socket.io-client"
4
- import _get from "lodash/get"
5
- import _set from "lodash/set"
6
- import debug from "debug"
7
-
8
- import BASE_URL from "../base_url"
9
- import {get_tenant_id} from "../auth"
10
-
11
- import store from "./store"
12
- import getUseDocument from "./getUseDocument"
13
- import getUseQuery from "./getUseQuery"
14
-
15
- const log = debug("rb:socket")
16
-
17
- const TENANT_ID_HEADER = "rb-tenant-id"
18
-
19
- // how many local transaction ids are we keeping, it doesn't really need to be that big
20
- const MAX_TXN_BUF = 2048
21
-
22
- let _socket
23
- const _callbacks = {}
24
- const _queries_store = {}
25
- const _local_txn = []
26
-
27
- // TODO: when server disconnects / crashes and loses all server side stored queries
28
- // the clients must reconnect and re-register those queries, or the page will need to be hard refreshed
29
-
30
-
31
- // add_local_txn
32
- // when a request is made to the server, we generate (or send if provided) the txn_id
33
- // if the array becomes longer than the default buffer length we shift the array
34
- export const add_local_txn = (txn_id) => {
35
- assert(txn_id, "add_local_txn trying to add an invalid txn_id")
36
- _local_txn.push(txn_id)
37
- if (_local_txn.length > MAX_TXN_BUF) {
38
- _local_txn.shift()
39
- }
40
- }
41
-
42
-
43
- // TODO: add compression / decompression
44
- const dispatch_query_payload = (payload) => {
45
- log("dispatch_query_payload", payload)
46
-
47
- const {model_name, query_key} = payload
48
- // const cb = _get(_callbacks, `${model_name}.${query_key}`)
49
- const callbacks = _callbacks[`${model_name}.${query_key}`]
50
-
51
- if (!callbacks || !Array.isArray(callbacks)) {
52
- log("dispatch_query_payload", "unable to find callback for query payload", payload)
53
- console.error("dispatch_query_payload", "unable to find callback for query payload", payload)
54
- return
55
- }
56
-
57
- if (payload.error) {
58
- console.warn("rts-client, payload error:", payload.error)
59
- console.warn("rts-client", "in:", model_name, "query_key:", query_key)
60
- callbacks.forEach((cb) => cb(payload.error, undefined, {source: "network"}))
61
- return
62
- }
63
-
64
- let data
65
- try {
66
- // TODO: zstd / brotli decompression here
67
- data = JSON.parse(payload.data_buf)
68
- } catch (err) {
69
- console.log("Error", err)
70
- log("dispatch_query_payload", "unable to parse or send data from payload:", payload.data_buf)
71
- }
72
-
73
- if (!data) {
74
- // skipping if data parsing failed
75
- return
76
- }
77
-
78
- const context = {
79
- source: "network",
80
- is_local: _local_txn.includes(payload.txn_id),
81
- txn_id: payload.txn_id,
82
- }
83
-
84
- callbacks.forEach((cb) => cb(null, data, context))
85
-
86
- store.update_docs(model_name, data)
87
- }
88
-
89
-
90
- export const connect = () => new Promise((resolve) => {
91
- const tenant_id = get_tenant_id()
92
-
93
- if (tenant_id) {
94
- log("rts client will connect")
95
- } else {
96
- log("no tenant_id, rts connect will skip")
97
- return
98
- }
99
-
100
-
101
- _socket = io(BASE_URL, {
102
- forceNew: true,
103
- transports: ["websocket", "polling"],
104
- withCredentials: true,
105
- query: {
106
- [TENANT_ID_HEADER]: tenant_id,
107
- },
108
- // https://socket.io/docs/v4/client-options/#reconnection
109
- reconnection: true,
110
- reconnectionAttempts: 128,
111
- reconnectionDelay: 400, // ms
112
- reconnectionDelayMax: 10 * 1000, // 10s
113
- })
114
-
115
- _socket.on("connect", () => {
116
- log("socket connected")
117
- resolve()
118
- })
119
-
120
- _socket.io.on("reconnect", (e) => {
121
- log("socked reconnected", e)
122
- })
123
-
124
- _socket.on("error", (err) => {
125
- log("socket error", err)
126
- })
127
-
128
- _socket.on("query_payload", (payload) => {
129
- // console.log("socket:query_payload", payload)
130
- dispatch_query_payload(payload)
131
- })
132
-
133
- _socket.on("delete_doc", (payload) => {
134
- log("document deleted", payload)
135
- })
136
-
137
- _socket.on("disconnect", (arg) => {
138
- log("socket disconnected", arg)
139
- })
140
- })
141
-
142
-
143
- export const disconnect = () => {
144
- if (_socket) {
145
- try {
146
- _socket.disconnect()
147
- } catch (e) {
148
- // TODO: should we be swallowing this error
149
- }
150
- _socket = null
151
- }
152
- }
153
-
154
-
155
- export const reconnect = () => {
156
- log("socket will force reconnect")
157
-
158
- // destroy current socket if exists
159
- disconnect()
160
-
161
- connect()
162
- }
163
-
164
- // register a query
165
- export const register_query = (model_name, query, _options, _callback) => {
166
- // left shift args if _options is undefined
167
- let options
168
- let callback
169
- if (_callback) {
170
- assert(typeof _callback === "function")
171
- options = _options
172
- callback = _callback
173
- } else {
174
- options = {}
175
- callback = _options
176
- }
177
-
178
- log("register_query", {model_name, query, options, callback})
179
-
180
- if (!_socket) {
181
- log("register_query: trying to use null socket", {model_name, query})
182
- return
183
- }
184
-
185
- const key = options.key || ""
186
- const query_key = `${key}${JSON.stringify(query)}${options.projection ? JSON.stringify(options.projection) : ""}`
187
- log("registering query with key", key, model_name, query, query_key)
188
-
189
- // save callback to update hooks
190
- const cb_key = `${model_name}.${query_key}`
191
- if (!_callbacks[cb_key] || !Array.isArray(_callbacks[cb_key])) {
192
- _callbacks[cb_key] = []
193
- }
194
- _callbacks[cb_key].push(callback)
195
-
196
- // save query for reconnections and retries
197
- if (!_queries_store[model_name]) {
198
- _queries_store[model_name] = {}
199
- }
200
- _queries_store[model_name][query]
201
-
202
- // TODO: why both run and register query here ? the run_query should come straight from register ?
203
- _socket.emit("run_query", {model_name, query, query_key, options})
204
- _socket.emit("register_query", {model_name, query, query_key, options})
205
-
206
- // run the query from the cache a first time
207
- store.run_query({model_name, query, query_key, options}, callback)
208
-
209
- return () => {
210
- _socket.emit("remove_query", {model_name, query, query_key, options})
211
- // remove callback
212
- const cb_index = _callbacks[cb_key].indexOf(callback)
213
- if (cb_index > -1) {
214
- _callbacks[cb_key].splice(cb_index, 1)
215
- } else {
216
- console.warn("warning, trying to remove a callback that doesn't exist")
217
- }
218
- }
219
- }
220
-
221
- export const useQuery = getUseQuery(register_query)
222
-
223
- export const useDocument = getUseDocument(register_query)
2
+ export * from "./rts"
package/rts/rts.js ADDED
@@ -0,0 +1,223 @@
1
+ /* @flow */
2
+ import assert from "assert"
3
+ import {io} from "socket.io-client"
4
+ import _get from "lodash/get"
5
+ import _set from "lodash/set"
6
+ import debug from "debug"
7
+
8
+ import BASE_URL from "../base_url"
9
+ import {get_tenant_id} from "../auth"
10
+
11
+ import store from "./store"
12
+ import getUseDocument from "./getUseDocument"
13
+ import getUseQuery from "./getUseQuery"
14
+
15
+ const log = debug("rb:socket")
16
+
17
+ const TENANT_ID_HEADER = "rb-tenant-id"
18
+
19
+ // how many local transaction ids are we keeping, it doesn't really need to be that big
20
+ const MAX_TXN_BUF = 2048
21
+
22
+ let _socket
23
+ const _callbacks = {}
24
+ const _queries_store = {}
25
+ const _local_txn = []
26
+
27
+ // TODO: when server disconnects / crashes and loses all server side stored queries
28
+ // the clients must reconnect and re-register those queries, or the page will need to be hard refreshed
29
+
30
+ // add_local_txn
31
+ // when a request is made to the server, we generate (or send if provided) the txn_id
32
+ // if the array becomes longer than the default buffer length we shift the array
33
+ export const add_local_txn = (txn_id) => {
34
+ assert(txn_id, "add_local_txn trying to add an invalid txn_id")
35
+ _local_txn.push(txn_id)
36
+ if (_local_txn.length > MAX_TXN_BUF) {
37
+ _local_txn.shift()
38
+ }
39
+ }
40
+
41
+
42
+ // TODO: add compression / decompression
43
+ const dispatch_query_payload = (payload) => {
44
+ log("dispatch_query_payload", payload)
45
+
46
+ const {model_name, query_key} = payload
47
+ // const cb = _get(_callbacks, `${model_name}.${query_key}`)
48
+ const callbacks = _callbacks[`${model_name}.${query_key}`]
49
+
50
+ if (!callbacks || !Array.isArray(callbacks)) {
51
+ log("dispatch_query_payload", "unable to find callback for query payload", payload)
52
+ console.error("dispatch_query_payload", "unable to find callback for query payload", payload)
53
+ return
54
+ }
55
+
56
+ if (payload.error) {
57
+ console.warn("rts-client, payload error:", payload.error)
58
+ console.warn("rts-client", "in:", model_name, "query_key:", query_key)
59
+ callbacks.forEach((cb) => cb(payload.error, undefined, {source: "network"}))
60
+ return
61
+ }
62
+
63
+ let data
64
+ try {
65
+ // TODO: zstd decompression here
66
+ data = JSON.parse(payload.data_buf)
67
+ } catch (err) {
68
+ console.log("Error", err)
69
+ log("dispatch_query_payload", "unable to parse or send data from payload:", payload.data_buf)
70
+ }
71
+
72
+ if (!data) {
73
+ // skipping if data parsing failed
74
+ return
75
+ }
76
+
77
+ const context = {
78
+ source: "network",
79
+ is_local: _local_txn.includes(payload.txn_id),
80
+ txn_id: payload.txn_id,
81
+ }
82
+
83
+ callbacks.forEach((cb) => cb(null, data, context))
84
+
85
+ store.update_docs(model_name, data)
86
+ }
87
+
88
+
89
+ export const connect = () => new Promise((resolve) => {
90
+ const tenant_id = get_tenant_id()
91
+
92
+ if (tenant_id) {
93
+ log("rts client will connect")
94
+ } else {
95
+ log("no tenant_id, rts connect will skip")
96
+ return
97
+ }
98
+
99
+
100
+ _socket = io(BASE_URL, {
101
+ forceNew: true,
102
+ transports: ["websocket", "polling"],
103
+ withCredentials: true,
104
+ query: {
105
+ [TENANT_ID_HEADER]: tenant_id,
106
+ },
107
+ // https://socket.io/docs/v4/client-options/#reconnection
108
+ reconnection: true,
109
+ reconnectionAttempts: 128,
110
+ reconnectionDelay: 400, // ms
111
+ reconnectionDelayMax: 10 * 1000, // 10s
112
+ })
113
+
114
+ _socket.on("connect", () => {
115
+ log("socket connected")
116
+ resolve()
117
+ })
118
+
119
+ _socket.io.on("reconnect", (e) => {
120
+ log("socked reconnected", e)
121
+ })
122
+
123
+ _socket.on("error", (err) => {
124
+ log("socket error", err)
125
+ })
126
+
127
+ _socket.on("query_payload", (payload) => {
128
+ // console.log("socket:query_payload", payload)
129
+ dispatch_query_payload(payload)
130
+ })
131
+
132
+ _socket.on("delete_doc", (payload) => {
133
+ log("document deleted", payload)
134
+ })
135
+
136
+ _socket.on("disconnect", (arg) => {
137
+ log("socket disconnected", arg)
138
+ })
139
+ })
140
+
141
+
142
+ export const disconnect = () => {
143
+ if (_socket) {
144
+ try {
145
+ _socket.disconnect()
146
+ } catch (e) {
147
+ // TODO: should we be swallowing this error
148
+ }
149
+ _socket = null
150
+ }
151
+ }
152
+
153
+
154
+ export const reconnect = () => {
155
+ log("socket will force reconnect")
156
+
157
+ // destroy current socket if exists
158
+ disconnect()
159
+
160
+ connect()
161
+ }
162
+
163
+ // register a query
164
+ export const register_query = (model_name, query, _options, _callback) => {
165
+ // left shift args if _options is undefined
166
+ let options
167
+ let callback
168
+ if (_callback) {
169
+ assert(typeof _callback === "function")
170
+ options = _options
171
+ callback = _callback
172
+ } else {
173
+ options = {}
174
+ callback = _options
175
+ }
176
+
177
+ log("register_query", {model_name, query, options, callback})
178
+
179
+ if (!_socket) {
180
+ log("register_query: trying to use null socket", {model_name, query})
181
+ return
182
+ }
183
+
184
+ const key = options.key || ""
185
+ const query_key = `${key}${JSON.stringify(query)}${options.projection ? JSON.stringify(options.projection) : ""}`
186
+ log("registering query with key", key, model_name, query, query_key)
187
+
188
+ // save callback to update hooks
189
+ const cb_key = `${model_name}.${query_key}`
190
+ if (!_callbacks[cb_key] || !Array.isArray(_callbacks[cb_key])) {
191
+ _callbacks[cb_key] = []
192
+ }
193
+ _callbacks[cb_key].push(callback)
194
+
195
+ // save query for reconnections and retries
196
+ if (!_queries_store[model_name]) {
197
+ _queries_store[model_name] = {}
198
+ }
199
+ _queries_store[model_name][query]
200
+
201
+ // TODO: why both run and register query here ? the run_query should come straight from register ?
202
+ _socket.emit("run_query", {model_name, query, query_key, options})
203
+ _socket.emit("register_query", {model_name, query, query_key, options})
204
+
205
+ // run the query from the cache a first time
206
+ store.run_query({model_name, query, query_key, options}, callback)
207
+
208
+ return () => {
209
+ _socket.emit("remove_query", {model_name, query, query_key, options})
210
+ // remove callback
211
+ const cb_index = _callbacks[cb_key].indexOf(callback)
212
+ if (cb_index > -1) {
213
+ _callbacks[cb_key].splice(cb_index, 1)
214
+ } else {
215
+ console.warn("warning, trying to remove a callback that doesn't exist")
216
+ }
217
+ }
218
+ }
219
+
220
+
221
+ export const useQuery = getUseQuery(register_query)
222
+
223
+ export const useDocument = getUseDocument(register_query)
@@ -0,0 +1,3 @@
1
+ /* @flow */
2
+
3
+ export const UNDERSCORE_PREFIX = "$_"
@@ -25,7 +25,8 @@ const get_collection = (col_name) => {
25
25
  if (_cols_store[col_name]) {
26
26
  return _cols_store[col_name]
27
27
  } else {
28
- const col = new PouchDB(col_name, { adapter: "indexeddb" });
28
+ // https://pouchdb.com/api.html#create_database
29
+ const col = new PouchDB(col_name, { adapter: "indexeddb", revs_limit: 1 })
29
30
  _cols_store[col_name] = col
30
31
 
31
32
  return col
@@ -3,14 +3,18 @@ import debug from "debug"
3
3
 
4
4
  import "./debug"
5
5
 
6
+ import {UNDERSCORE_PREFIX} from "./constants"
7
+
6
8
  import get_collection from "./get_collection"
7
9
  import update_docs from "./update_docs"
8
10
  import satisfies_projection from "./satisfies_projection"
11
+ import replace_query_keys from "./replace_query_keys"
9
12
 
10
13
  const log = debug("rb:rts:store")
11
14
 
12
15
 
13
- // // https://github.com/pouchdb/pouchdb/tree/master/packages/node_modules/pouchdb-find#dbcreateindexindex--callback
16
+ // TODO: listening for changes
17
+ // https://github.com/pouchdb/pouchdb/tree/master/packages/node_modules/pouchdb-find#dbcreateindexindex--callback
14
18
  // const res = await db.createIndex({
15
19
  // index: {fields: ["name"]}
16
20
  // })
@@ -26,11 +30,7 @@ const log = debug("rb:rts:store")
26
30
  // // handle errors
27
31
  // console.log(err)
28
32
  // })
29
- //
30
33
  // fn.cancel()
31
- //
32
- // console.log("GOT FN", fn)
33
- //
34
34
  // console.log("got res", res)
35
35
  // _cols_store[col_name].getIndexes().then(function (result) {
36
36
  // console.log("got indexes", result)
@@ -44,13 +44,14 @@ const run_query = async({model_name, query, query_key, options}, callback) => {
44
44
  // console.time("store run_query")
45
45
 
46
46
  // TODO: we should prefix model_name with tenant_prefix + env_id
47
- const col = get_collection(model_name)
47
+ const collection = get_collection(model_name)
48
+
49
+ const replaced_query = replace_query_keys(query, (k) => (k.startsWith("_") && k !== "_id") ? `${UNDERSCORE_PREFIX}${k}` : k)
48
50
 
49
51
  // console.time(`query-${model_name}`)
50
52
  // https://github.com/pouchdb/pouchdb/tree/master/packages/node_modules/pouchdb-find#dbcreateindexindex--callback
51
- const {docs} = await col.find({
52
- // TODO: we should check if the selectors are compatible here
53
- selector: query,
53
+ const {docs} = await collection.find({
54
+ selector: replaced_query,
54
55
  // DO NOT INCLUDE FIELDS HERE AS IT USES PICK AND WE WOULD GO through the list twice
55
56
  // https://github.com/pouchdb/pouchdb/blob/master/packages/node_modules/pouchdb-find/src/adapters/local/find/index.js
56
57
  // fields: [""]
@@ -70,7 +71,9 @@ const run_query = async({model_name, query, query_key, options}, callback) => {
70
71
  })
71
72
 
72
73
  if (options.projection) {
73
- mapped_docs = mapped_docs.filter((doc) => satisfies_projection(doc, options.projection))
74
+ mapped_docs = mapped_docs.filter((doc) => {
75
+ return satisfies_projection(doc, options.projection)
76
+ })
74
77
  }
75
78
 
76
79
  callback(null, mapped_docs, {source: "cache"})
@@ -0,0 +1,30 @@
1
+ /* @flow */
2
+
3
+
4
+ // WARNING: this is a bit of a toy implementation it's not certain it will work with complex queries where a field isn't referenced by being an object's key
5
+ // TODO: this isn't right it's missing a lot of possible queries
6
+ // https://chat.openai.com/share/da344eed-e3e1-4c8d-8091-cc26b22a8f2e
7
+
8
+ const replace_query_keys = (obj, fn) => {
9
+ if (typeof obj !== 'object' || obj === null) {
10
+ return obj
11
+ }
12
+
13
+ if (Array.isArray(obj)) {
14
+ return obj.map(item => replace_query_keys(item, fn))
15
+ }
16
+
17
+ // create an empty new obj with the same prototype as obj
18
+ const new_obj = Object.create(Object.getPrototypeOf(obj))
19
+
20
+ Object
21
+ .keys(obj)
22
+ .forEach(key => {
23
+ const new_key = fn(key)
24
+ new_obj[new_key] = replace_query_keys(obj[key], fn)
25
+ })
26
+
27
+ return new_obj
28
+ }
29
+
30
+ export default replace_query_keys
@@ -1,44 +1,46 @@
1
1
  /* @flow */
2
2
  import get_collection from "./get_collection"
3
3
 
4
+ import {UNDERSCORE_PREFIX} from "./constants"
5
+
4
6
 
5
7
  const update_docs = async(model_name, data) => {
6
8
  const collection = get_collection(model_name)
7
9
 
8
10
  const all_ids = data.map((doc) => doc._id)
9
11
 
10
- // console.log("will update cache", model_name, data, all_ids)
11
-
12
- // console.time("find")
13
-
14
- // TODO:
15
- // there is also a bulk get which could have different performance than find, try both
16
- // https://pouchdb.com/api.html#bulk_get
17
-
18
12
  // https://github.com/pouchdb/pouchdb/tree/master/packages/node_modules/pouchdb-find#dbcreateindexindex--callback
19
13
  const {docs: current_docs} = await collection.find({
20
14
  selector: {_id: {$in: all_ids}},
21
15
  fields: ["_id", "_rev"],
22
16
  })
23
17
 
24
- // console.log("current_docs", current_docs)
18
+ const current_docs_by_id = current_docs.reduce((acc, val) => {
19
+ acc[val._id] = val
20
+ return acc
21
+ }, {})
22
+
23
+ // WARNING: tmp
24
+ // we don't need to remove docs that aren't present in data because queries are refined with find
25
25
 
26
26
  const revs_map = {}
27
27
  current_docs.forEach((doc) => revs_map[doc._id] = doc._rev)
28
28
 
29
- const write_docs = data.map((mongo_doc) => {
30
- const op = Object.entries(mongo_doc).reduce((new_doc, [key, value]) => {
31
- let new_key = key !== "_id" && key.startsWith('_') ? `$_${key}` : key
32
- new_doc[new_key] = value
33
- return new_doc
34
- }, {})
29
+ const new_docs = data.map((mongo_doc) => {
30
+ const current_doc = current_docs_by_id[mongo_doc._id] || {}
31
+
32
+ const op = Object.entries(mongo_doc)
33
+ .reduce((new_doc, [key, value]) => {
34
+ let new_key = key !== "_id" && key.startsWith('_') ? `${UNDERSCORE_PREFIX}${key}` : key
35
+ new_doc[new_key] = value
36
+ return new_doc
37
+ }, current_doc)
35
38
 
36
39
  op._rev = revs_map[mongo_doc._id]
37
40
  return op
38
41
  })
39
42
 
40
- await collection.bulkDocs(write_docs)
41
- // console.timeEnd("find")
43
+ await collection.bulkDocs(new_docs)
42
44
  }
43
45
 
44
46
  export default update_docs
@@ -1,22 +0,0 @@
1
- /* @flow */
2
- const getInitials = require("./getInitials")
3
-
4
-
5
- const inputs = [
6
- ["Philippe de Reynal", "PR"],
7
- ["Sarah Connor", "SC"],
8
- ["Sarah O'Connor", "SC"],
9
- ["Cédric O", "CO"],
10
- ["Chloë Grace Moretz", "CGM"],
11
- ["Ella Marija Lani Yelich-O'Connor", "EML"],
12
- ["Hannah John-Kamen", "HJK"],
13
- ["npm", "NPM"],
14
- ["Apple", "APP"],
15
- ]
16
-
17
- inputs.forEach((i) => {
18
- test(i[0], () => {
19
- const initials = getInitials(i[0])
20
- expect(initials).toBe(i[1])
21
- })
22
- })
@@ -1,50 +0,0 @@
1
- /* @flow */
2
- const satisfies_projection = require("./satisfies_projection")
3
-
4
-
5
- let doc = {
6
- _id: '1',
7
- field1: 'value1',
8
- field2: {
9
- subField1: 'value2',
10
- subField2: 'value3'
11
- }
12
- }
13
-
14
- let projection = {
15
- field1: 1,
16
- 'field2.subField1': 1,
17
- 'field2.subField2': 1
18
- }
19
-
20
-
21
-
22
- test("simple", () => {
23
- expect(satisfies_projection(doc, projection)).toBe(true)
24
- })
25
-
26
- test("missing field", () => {
27
- expect(satisfies_projection(doc, {
28
- missing_field: 1,
29
- ...projection
30
- })).toBe(false)
31
- })
32
-
33
- test("matching and missing field", () => {
34
- expect(satisfies_projection(doc, {
35
- field1: true,
36
- missing_field: 1,
37
- ...projection
38
- })).toBe(false)
39
- })
40
-
41
- test("missing nested field", () => {
42
- expect(satisfies_projection(doc, {
43
- "missing_field.missing_nested": 1,
44
- ...projection
45
- })).toBe(false)
46
- })
47
-
48
- test("empty", () => {
49
- expect(satisfies_projection(doc, {field1: 1})).toBe(true)
50
- })