hide-a-bed 4.0.3 → 4.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +304 -73
- package/cjs/impl/bulk.cjs +158 -10
- package/cjs/impl/crud.cjs +19 -12
- package/cjs/impl/errors.cjs +12 -0
- package/cjs/impl/patch.cjs +19 -0
- package/cjs/impl/queryBuilder.cjs +99 -0
- package/cjs/impl/stream.cjs +12 -1
- package/cjs/impl/trackedEmitter.cjs +54 -0
- package/cjs/impl/transactionErrors.cjs +70 -0
- package/cjs/index.cjs +21 -5
- package/cjs/schema/bind.cjs +4 -0
- package/cjs/schema/bulk.cjs +35 -11
- package/cjs/schema/config.cjs +1 -0
- package/cjs/schema/crud.cjs +23 -1
- package/cjs/schema/patch.cjs +17 -2
- package/cjs/schema/query.cjs +2 -1
- package/config.json +5 -0
- package/impl/bulk.d.mts +4 -0
- package/impl/bulk.d.mts.map +1 -1
- package/impl/bulk.mjs +200 -13
- package/impl/crud.d.mts +2 -0
- package/impl/crud.d.mts.map +1 -1
- package/impl/crud.mjs +25 -15
- package/impl/errors.d.mts +8 -0
- package/impl/errors.d.mts.map +1 -1
- package/impl/errors.mjs +12 -0
- package/impl/patch.d.mts +2 -0
- package/impl/patch.d.mts.map +1 -1
- package/impl/patch.mjs +22 -1
- package/impl/query.d.mts +18 -9
- package/impl/query.d.mts.map +1 -1
- package/impl/queryBuilder.d.mts +94 -0
- package/impl/queryBuilder.d.mts.map +1 -0
- package/impl/queryBuilder.mjs +99 -0
- package/impl/stream.d.mts.map +1 -1
- package/impl/stream.mjs +12 -1
- package/impl/trackedEmitter.d.mts +8 -0
- package/impl/trackedEmitter.d.mts.map +1 -0
- package/impl/trackedEmitter.mjs +33 -0
- package/impl/transactionErrors.d.mts +57 -0
- package/impl/transactionErrors.d.mts.map +1 -0
- package/impl/transactionErrors.mjs +47 -0
- package/index.d.mts +18 -3
- package/index.d.mts.map +1 -1
- package/index.mjs +42 -11
- package/package.json +9 -4
- package/schema/bind.d.mts +382 -45
- package/schema/bind.d.mts.map +1 -1
- package/schema/bind.mjs +6 -2
- package/schema/bulk.d.mts +559 -16
- package/schema/bulk.d.mts.map +1 -1
- package/schema/bulk.mjs +40 -10
- package/schema/config.d.mts.map +1 -1
- package/schema/config.mjs +1 -0
- package/schema/crud.d.mts +240 -15
- package/schema/crud.d.mts.map +1 -1
- package/schema/crud.mjs +27 -1
- package/schema/patch.d.mts +138 -2
- package/schema/patch.d.mts.map +1 -1
- package/schema/patch.mjs +22 -2
- package/schema/query.d.mts +62 -30
- package/schema/query.d.mts.map +1 -1
- package/schema/query.mjs +4 -1
- package/schema/stream.d.mts +18 -9
- package/schema/stream.d.mts.map +1 -1
package/README.md
CHANGED
|
@@ -4,47 +4,75 @@ API
|
|
|
4
4
|
### Setup
|
|
5
5
|
|
|
6
6
|
Depending on your environment, use import or require
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
```
|
|
11
|
-
```
|
|
12
|
-
const { get, put, patch, remove, bulkSave, bulkGet, bulkRemove, query } = require('hide-a-bed')
|
|
13
|
-
```
|
|
7
|
+
```import { get, put, query } from 'hide-a-bed'```
|
|
8
|
+
or
|
|
9
|
+
```const { get, put, query } = require('hide-a-bed')```
|
|
14
10
|
|
|
15
11
|
### Config
|
|
16
12
|
|
|
17
13
|
Anywhere you see a config, it is an object with the following setup
|
|
14
|
+
```{ couch: 'https://username:pass@the.couch.url.com:5984' }```
|
|
15
|
+
And it is passed in as the first argument of all the functions
|
|
16
|
+
```const doc = await get(config, 'doc-123')```
|
|
18
17
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
Couch get is weird. We have chosen to return ```undefined``` if the doc is not found. All other things throw. If you want
|
|
23
|
-
not_found to also throw an exception, add the following to your config:
|
|
18
|
+
See [Advanced Config Options](#advanced-config-options) for more advanced settings.
|
|
19
|
+
|
|
20
|
+
#### bindConfig
|
|
24
21
|
|
|
22
|
+
A convience method to bind the config, so you dont need to pass it in.
|
|
25
23
|
```
|
|
26
|
-
{
|
|
24
|
+
import { bindConfig } from 'hide-a-bed'
|
|
25
|
+
const db = bindConfig(process.env)
|
|
26
|
+
const doc = db.get('doc-123')
|
|
27
27
|
```
|
|
28
28
|
|
|
29
|
+
### API Quick Reference
|
|
30
|
+
|
|
31
|
+
| Document Operations | Bulk Operations | View Operations |
|
|
32
|
+
|-------------------|-----------------|-----------------|
|
|
33
|
+
| [`get()`](#get) | [`bulkGet()`](#bulkget) | [`query()`](#query) |
|
|
34
|
+
| [`put()`](#put) | [`bulkSave()`](#bulksave) | [`queryStream()`](#querystream) |
|
|
35
|
+
| [`patch()`](#patch) | [`bulkRemove()`](#bulkremove) | [`createQuery()`](#createquery) |
|
|
36
|
+
| [`patchDangerously()`](#patchdangerously) | [`bulkGetDictionary()`](#bulkgetdictionary) | |
|
|
37
|
+
| [`getAtRev()`](#getatrev) | [`bulkSaveTransaction()`](#bulksavetransaction) | |
|
|
38
|
+
|
|
29
39
|
### Document Operations
|
|
30
40
|
|
|
41
|
+
#### get
|
|
31
42
|
|
|
32
|
-
#### get(config, id)
|
|
33
43
|
Get a single document by ID.
|
|
34
|
-
|
|
44
|
+
|
|
45
|
+
**Parameters:**
|
|
46
|
+
- `config`: Object with couch URL string and optional throwOnGetNotFound flag
|
|
47
|
+
- `id`: Document ID string
|
|
48
|
+
- `config`: Object with
|
|
49
|
+
* `couch` URL string
|
|
50
|
+
* `throwOnGetNotFound` default false. If true, 404 docs throw
|
|
35
51
|
- `id`: Document ID string
|
|
36
52
|
- Returns: Promise resolving to document object or null if not found
|
|
37
53
|
|
|
38
54
|
```javascript
|
|
39
55
|
const config = { couch: 'http://localhost:5984/mydb' }
|
|
40
56
|
const doc = await get(config, 'doc-123')
|
|
41
|
-
|
|
42
|
-
|
|
57
|
+
console.log(doc._id, doc._rev)
|
|
58
|
+
|
|
59
|
+
const notThereIsNull = await get(config, 'does-not-exist')
|
|
60
|
+
console.log(notThereIsNull) // null
|
|
61
|
+
|
|
62
|
+
try {
|
|
63
|
+
const config = { couch: '', throwOnGetNotFound: true }
|
|
64
|
+
await get(config, 'does-not-exist')
|
|
65
|
+
} catch (err) {
|
|
66
|
+
if (err.name === 'NotFoundError') console.log('Document not found')
|
|
43
67
|
}
|
|
68
|
+
|
|
44
69
|
```
|
|
45
70
|
|
|
46
|
-
#### put
|
|
71
|
+
#### put
|
|
72
|
+
|
|
47
73
|
Save a document.
|
|
74
|
+
|
|
75
|
+
**Parameters:**
|
|
48
76
|
- `config`: Object with `couch` URL string
|
|
49
77
|
- `doc`: Document object with `_id` property
|
|
50
78
|
- Returns: Promise resolving to response with `ok`, `id`, `rev` properties
|
|
@@ -58,16 +86,21 @@ const doc = {
|
|
|
58
86
|
}
|
|
59
87
|
const result = await put(config, doc)
|
|
60
88
|
// result: { ok: true, id: 'doc-123', rev: '1-abc123' }
|
|
89
|
+
|
|
90
|
+
// imaginary rev returns a conflict
|
|
91
|
+
const doc = { _id: 'notThereDoc', _rev: '32-does-not-compute'}
|
|
92
|
+
const result2 = await db.put(doc)
|
|
93
|
+
console.log(result2) // { ok: false, error: 'conflict', statusCode: 409 }
|
|
61
94
|
```
|
|
62
95
|
|
|
63
|
-
#### patch
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
96
|
+
#### patch
|
|
97
|
+
|
|
98
|
+
Update specific properties of a document, you must know the _rev, and passed in with properties.
|
|
99
|
+
|
|
100
|
+
**Parameters:**
|
|
101
|
+
- `config`: Object with couch URL string
|
|
69
102
|
- `id`: Document ID string
|
|
70
|
-
- `properties`: Object with properties to update
|
|
103
|
+
- `properties`: Object with properties to update, one _must_ be the current _rev
|
|
71
104
|
- Returns: Promise resolving to response with `ok`, `id`, `rev` properties
|
|
72
105
|
|
|
73
106
|
```javascript
|
|
@@ -77,29 +110,66 @@ const config = {
|
|
|
77
110
|
delay: 500
|
|
78
111
|
}
|
|
79
112
|
const properties = {
|
|
113
|
+
_rev: '3-fdskjhfsdkjhfsd',
|
|
80
114
|
name: 'Alice Smith',
|
|
81
115
|
updated: true
|
|
82
116
|
}
|
|
83
117
|
const result = await patch(config, 'doc-123', properties)
|
|
84
118
|
// result: { ok: true, id: 'doc-123', rev: '2-xyz789' }
|
|
85
119
|
```
|
|
120
|
+
#### patchDangerously
|
|
86
121
|
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
- `
|
|
91
|
-
-
|
|
122
|
+
Update specific properties of a document, no _rev is needed.
|
|
123
|
+
|
|
124
|
+
**Parameters:**
|
|
125
|
+
- `config`: Object with couch URL string
|
|
126
|
+
- `id`: Document ID string
|
|
127
|
+
- `properties`: Object with properties to update
|
|
128
|
+
|
|
129
|
+
*warning* - this can clobber data. It will retry even if a conflict happens. There are some use cases for this, but you have been warned, hence the name.
|
|
130
|
+
|
|
131
|
+
- `id`: Document ID string
|
|
132
|
+
- `properties`: Object with properties to update
|
|
133
|
+
- Returns: Promise resolving to response with `ok`, `id`, `rev` properties
|
|
134
|
+
|
|
135
|
+
```javascript
|
|
136
|
+
const config = {
|
|
137
|
+
couch: 'http://localhost:5984/mydb',
|
|
138
|
+
retries: 3,
|
|
139
|
+
delay: 500
|
|
140
|
+
}
|
|
141
|
+
const properties = {
|
|
142
|
+
name: 'Alice Smith',
|
|
143
|
+
updated: true
|
|
144
|
+
}
|
|
145
|
+
const result = await patchDangerously(config, 'doc-123', properties)
|
|
146
|
+
// result: { ok: true, id: 'doc-123', rev: '2-xyz789' }
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
#### getAtRev
|
|
150
|
+
|
|
151
|
+
Return a document at the rev specified.
|
|
152
|
+
|
|
153
|
+
**Parameters:**
|
|
154
|
+
- `config`: Object with couch URL string
|
|
155
|
+
- `id`: Document ID string
|
|
156
|
+
- `rev`: Revision string to retrieve
|
|
157
|
+
|
|
158
|
+
*CouchDB* is not a version control db. This is a special function for unique situations. The _rev might not be around as couch cleans up old revs.
|
|
92
159
|
|
|
93
160
|
```javascript
|
|
94
161
|
const config = { couch: 'http://localhost:5984/mydb' }
|
|
95
|
-
const
|
|
96
|
-
|
|
162
|
+
const doc = await getAtRev(config, 'doc-123', '2-fsdjfsdakljfsajlksd')
|
|
163
|
+
console.log(doc._id, doc._rev)
|
|
97
164
|
```
|
|
98
165
|
|
|
99
166
|
### Bulk Operations
|
|
100
167
|
|
|
101
|
-
#### bulkSave
|
|
168
|
+
#### bulkSave
|
|
169
|
+
|
|
102
170
|
Save multiple documents in one request.
|
|
171
|
+
|
|
172
|
+
**Parameters:**
|
|
103
173
|
- `config`: Object with `couch` URL string
|
|
104
174
|
- `docs`: Array of document objects, each with `_id`
|
|
105
175
|
- Returns: Promise resolving to array of results with `ok`, `id`, `rev` for each doc
|
|
@@ -117,24 +187,33 @@ const results = await bulkSave(config, docs)
|
|
|
117
187
|
// ]
|
|
118
188
|
```
|
|
119
189
|
|
|
120
|
-
#### bulkGet
|
|
190
|
+
#### bulkGet
|
|
191
|
+
|
|
121
192
|
Get multiple documents by ID.
|
|
193
|
+
|
|
194
|
+
**Parameters:**
|
|
122
195
|
- `config`: Object with `couch` URL string
|
|
123
196
|
- `ids`: Array of document ID strings
|
|
124
197
|
- Returns: Promise resolving to array of documents
|
|
125
198
|
|
|
199
|
+
Not found documents will still have a row in the results, but the doc will be null, and the error property will be set
|
|
200
|
+
|
|
126
201
|
```javascript
|
|
127
202
|
const config = { couch: 'http://localhost:5984/mydb' }
|
|
128
|
-
const ids = ['doc1', 'doc2']
|
|
203
|
+
const ids = ['doc1', 'doc2', 'doesNotExist']
|
|
129
204
|
const docs = await bulkGet(config, ids)
|
|
130
205
|
// docs: [
|
|
131
206
|
// { _id: 'doc1', _rev: '1-abc123', type: 'user', name: 'Alice' },
|
|
132
|
-
// { _id: 'doc2', _rev: '1-def456', type: 'user', name: 'Bob' }
|
|
207
|
+
// { _id: 'doc2', _rev: '1-def456', type: 'user', name: 'Bob' },
|
|
208
|
+
// { key: 'notThereDoc', error: 'not_found' }
|
|
133
209
|
// ]
|
|
134
210
|
```
|
|
135
211
|
|
|
136
|
-
#### bulkRemove
|
|
212
|
+
#### bulkRemove
|
|
213
|
+
|
|
137
214
|
Delete multiple documents in one request.
|
|
215
|
+
|
|
216
|
+
**Parameters:**
|
|
138
217
|
- `config`: Object with `couch` URL string
|
|
139
218
|
- `ids`: Array of document ID strings to delete
|
|
140
219
|
- Returns: Promise resolving to array of results with `ok`, `id`, `rev` for each deletion
|
|
@@ -149,10 +228,99 @@ const results = await bulkRemove(config, ids)
|
|
|
149
228
|
// ]
|
|
150
229
|
```
|
|
151
230
|
|
|
231
|
+
#### bulkGetDictionary
|
|
232
|
+
|
|
233
|
+
Adds some convenience to bulkGet. Found and notFound documents are separated. Both properties are records of id to result. This makes it easy to deal with the results.
|
|
234
|
+
|
|
235
|
+
**Parameters:**
|
|
236
|
+
- `config`: Object with `couch` URL string
|
|
237
|
+
- `ids`: Array of document ID strings to delete
|
|
238
|
+
- Returns: Promise resolving to an object with found and notFound properties.
|
|
239
|
+
|
|
240
|
+
*found* looks like
|
|
241
|
+
```
|
|
242
|
+
{
|
|
243
|
+
id1: { _id: 'id1', _rev: '1-221', data: {} },
|
|
244
|
+
id2: { _id: 'id2', _rev: '4-421', data: {} },
|
|
245
|
+
}
|
|
246
|
+
```
|
|
247
|
+
|
|
248
|
+
*notFound* looks like
|
|
249
|
+
```
|
|
250
|
+
{
|
|
251
|
+
id3: { key: 'id1', error: 'not_found' }
|
|
252
|
+
}
|
|
253
|
+
```
|
|
254
|
+
|
|
255
|
+
```javascript
|
|
256
|
+
const config = { couch: 'http://localhost:5984/mydb' }
|
|
257
|
+
const ids = ['doc1', 'doc2']
|
|
258
|
+
const results = await bulkGetDictionary(config, ids)
|
|
259
|
+
// results: {
|
|
260
|
+
// found: {
|
|
261
|
+
// id1: { _id: 'id1', _rev: '1-221', data: {} },
|
|
262
|
+
// id2: { _id: 'id2', _rev: '4-421', data: {} },
|
|
263
|
+
// },
|
|
264
|
+
// notFound: {
|
|
265
|
+
// id3: { key: 'id1', error: 'not_found' }
|
|
266
|
+
// }
|
|
267
|
+
// }
|
|
268
|
+
```
|
|
269
|
+
|
|
270
|
+
#### bulkSaveTransaction
|
|
271
|
+
|
|
272
|
+
Perform a bulk save operation with all-or-nothing semantics.
|
|
273
|
+
|
|
274
|
+
**Parameters:**
|
|
275
|
+
- `config`: Object with `couch` URL string
|
|
276
|
+
- `transactionId`: Unique identifier for the transaction
|
|
277
|
+
- `docs`: Array of document objects to save
|
|
278
|
+
- Returns: Promise resolving to array of results with `ok`, `id`, `rev` for each doc
|
|
279
|
+
|
|
280
|
+
This operation ensures that either all documents are saved successfully, or none are, maintaining data consistency. If any document fails to save, the operation will attempt to roll back all changes.
|
|
281
|
+
|
|
282
|
+
Note: The transactionId has to be unique for the lifetime of the app. It is used to prevent two processes from executing the same transaction. It is up to you to craft a transactionId that uniquely represents this transaction, and that also is the same if another process tries to generate it.
|
|
283
|
+
|
|
284
|
+
Exceptions to handle:
|
|
285
|
+
- `TransactionSetupError`: Thrown if the transaction document cannot be created. Usually because it already exists
|
|
286
|
+
- `TransactionVersionConflictError`: Thrown if there are version conflicts with existing documents.
|
|
287
|
+
- `TransactionBulkOperationError`: Thrown if the bulk save operation fails for some documents.
|
|
288
|
+
- `TransactionRollbackError`: Thrown if the rollback operation fails after a transaction failure.
|
|
289
|
+
|
|
290
|
+
```javascript
|
|
291
|
+
const config = { couch: 'http://localhost:5984/mydb' }
|
|
292
|
+
const transactionId = 'txn-123'
|
|
293
|
+
const docs = [
|
|
294
|
+
{ _id: 'doc1', type: 'user', name: 'Alice', _rev: '1-abc123' },
|
|
295
|
+
{ _id: 'doc2', type: 'user', name: 'Bob', _rev: '1-def456' }
|
|
296
|
+
]
|
|
297
|
+
|
|
298
|
+
try {
|
|
299
|
+
const results = await bulkSaveTransaction(config, transactionId, docs)
|
|
300
|
+
console.log('Transaction successful:', results)
|
|
301
|
+
} catch (error) {
|
|
302
|
+
if (error instanceof TransactionSetupError) {
|
|
303
|
+
// the transaction could not start - usually an existing transaction with the same id
|
|
304
|
+
console.error('Transaction setup failed:', error)
|
|
305
|
+
} else if (error instanceof TransactionVersionConflictError) {
|
|
306
|
+
// one or more of the versions of the docs provided dont match with what is currently in the db
|
|
307
|
+
console.error('Version conflict error:', error)
|
|
308
|
+
} else if (error instanceof TransactionRollbackError) {
|
|
309
|
+
// the transaction was rolled back - so the 'or none' condition occured
|
|
310
|
+
console.error('Rollback error:', error)
|
|
311
|
+
} else {
|
|
312
|
+
console.error('Unexpected error:', error)
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
```
|
|
316
|
+
|
|
152
317
|
### View Queries
|
|
153
318
|
|
|
154
|
-
#### query
|
|
319
|
+
#### query
|
|
320
|
+
|
|
155
321
|
Query a view with options.
|
|
322
|
+
|
|
323
|
+
**Parameters:**
|
|
156
324
|
- `config`: Object with `couch` URL string
|
|
157
325
|
- `view`: View path string (e.g. '_design/doc/_view/name')
|
|
158
326
|
- `options`: Optional object with query parameters:
|
|
@@ -191,6 +359,104 @@ const result = await query(config, view, options)
|
|
|
191
359
|
// }
|
|
192
360
|
```
|
|
193
361
|
|
|
362
|
+
#### createQuery()
|
|
363
|
+
Create a query builder to help construct view queries with a fluent interface.
|
|
364
|
+
- Returns: QueryBuilder instance with methods:
|
|
365
|
+
- `key(value)`: Set exact key match
|
|
366
|
+
- `startKey(value)`: Set range start key
|
|
367
|
+
- `endKey(value)`: Set range end key
|
|
368
|
+
- `descending(bool)`: Set descending sort order
|
|
369
|
+
- `skip(number)`: Set number of results to skip
|
|
370
|
+
- `limit(number)`: Set max number of results
|
|
371
|
+
- `includeDocs(bool)`: Include full documents
|
|
372
|
+
- `reduce(bool)`: Enable/disable reduce
|
|
373
|
+
- `group(bool)`: Enable/disable grouping
|
|
374
|
+
- `groupLevel(number)`: Set group level
|
|
375
|
+
- `build()`: Return the constructed query options object
|
|
376
|
+
|
|
377
|
+
```javascript
|
|
378
|
+
const options = createQuery()
|
|
379
|
+
.startKey('A')
|
|
380
|
+
.endKey('B')
|
|
381
|
+
.includeDocs(true)
|
|
382
|
+
.limit(10)
|
|
383
|
+
.build()
|
|
384
|
+
|
|
385
|
+
const result = await query(config, view, options)
|
|
386
|
+
```
|
|
387
|
+
|
|
388
|
+
#### queryStream
|
|
389
|
+
|
|
390
|
+
Use Cases *Streaming Data*
|
|
391
|
+
|
|
392
|
+
**Parameters:**
|
|
393
|
+
- `config`: Object with couch URL string
|
|
394
|
+
- `view`: View path string
|
|
395
|
+
- `options`: Query options object
|
|
396
|
+
- `onRow`: Function called for each row in the results
|
|
397
|
+
|
|
398
|
+
Want to stream data from couch? You can with queryStream. It looks identical to query, except you add an extra 'onRow' function
|
|
399
|
+
|
|
400
|
+
Here is a small hapi example of streaming data from couch to the client as ndjson.
|
|
401
|
+
We do a small transform by only streaming the doc. you can do a lot of things in the onrow function.
|
|
402
|
+
|
|
403
|
+
```
|
|
404
|
+
import Hapi from '@hapi/hapi';
|
|
405
|
+
import { Readable } from 'stream';
|
|
406
|
+
import { queryStream } from bindConfig(process.env)
|
|
407
|
+
const view = '_design/users/_view/by_name'
|
|
408
|
+
|
|
409
|
+
const init = async () => {
|
|
410
|
+
const server = Hapi.server({ port: 3000 })
|
|
411
|
+
server.route({
|
|
412
|
+
method: 'GET',
|
|
413
|
+
path: '/stream',
|
|
414
|
+
handler: async (req, h) => {
|
|
415
|
+
const stream = new Readable({ read() {} });
|
|
416
|
+
const onRow = ({id, key, value, doc}) => stream.push(JSON.stringify(doc) + '\n')
|
|
417
|
+
const options = { startkey: req.query.startLetter, endkey: req.query.startLetter + '|', include_docs: true}
|
|
418
|
+
await queryStream(view, options, onRow)
|
|
419
|
+
stream.push(null) // end stream
|
|
420
|
+
return h.response(stream).type('application/x-ndjson');
|
|
421
|
+
}
|
|
422
|
+
})
|
|
423
|
+
|
|
424
|
+
await server.start();
|
|
425
|
+
console.log(`Server running on ${server.info.uri}`);
|
|
426
|
+
}
|
|
427
|
+
init()
|
|
428
|
+
```
|
|
429
|
+
Advanced Config Options
|
|
430
|
+
=======================
|
|
431
|
+
|
|
432
|
+
The config object supports the following properties:
|
|
433
|
+
|
|
434
|
+
| Property | Type | Default | Description |
|
|
435
|
+
|----------|------|---------|-------------|
|
|
436
|
+
| couch | string | required | The URL of the CouchDB database |
|
|
437
|
+
| throwOnGetNotFound | boolean | false | If true, throws an error when get() returns 404. If false, returns undefined |
|
|
438
|
+
| bindWithRetry | boolean | true | When using bindConfig(), adds retry logic to bound methods |
|
|
439
|
+
| maxRetries | number | 3 | Maximum number of retry attempts for retryable operations |
|
|
440
|
+
| initialDelay | number | 1000 | Initial delay in milliseconds before first retry |
|
|
441
|
+
| backoffFactor | number | 2 | Multiplier for exponential backoff between retries |
|
|
442
|
+
| useConsoleLogger | boolean | false | If true, enables console logging when no logger is provided |
|
|
443
|
+
| logger | object/function | undefined | Custom logging interface (winston-style object or function) |
|
|
444
|
+
|
|
445
|
+
Example configuration with all options:
|
|
446
|
+
```javascript
|
|
447
|
+
const config = {
|
|
448
|
+
couch: 'http://localhost:5984/mydb',
|
|
449
|
+
throwOnGetNotFound: true,
|
|
450
|
+
bindWithRetry: true,
|
|
451
|
+
maxRetries: 5,
|
|
452
|
+
initialDelay: 2000,
|
|
453
|
+
backoffFactor: 1.5,
|
|
454
|
+
useConsoleLogger: true,
|
|
455
|
+
logger: (level, ...args) => console.log(level, ...args)
|
|
456
|
+
}
|
|
457
|
+
```
|
|
458
|
+
|
|
459
|
+
|
|
194
460
|
Logging Support
|
|
195
461
|
==============
|
|
196
462
|
|
|
@@ -235,38 +501,3 @@ Each operation logs appropriate information at these levels:
|
|
|
235
501
|
- debug: Detailed operation information
|
|
236
502
|
|
|
237
503
|
|
|
238
|
-
Streaming Support
|
|
239
|
-
=================
|
|
240
|
-
|
|
241
|
-
Want to stream data from couch? You can with queryStream. It looks identical to query, except you add an extra 'onRow' function
|
|
242
|
-
|
|
243
|
-
Here is a small hapi example of streaming data from couch to the client as ndjson.
|
|
244
|
-
We do a small transform by only streaming the doc. you can do a lot of things in the onrow function.
|
|
245
|
-
|
|
246
|
-
```
|
|
247
|
-
import Hapi from '@hapi/hapi';
|
|
248
|
-
import { Readable } from 'stream';
|
|
249
|
-
import { queryStream } from bindConfig(process.env)
|
|
250
|
-
const view = '_design/users/_view/by_name'
|
|
251
|
-
|
|
252
|
-
const init = async () => {
|
|
253
|
-
const server = Hapi.server({ port: 3000 })
|
|
254
|
-
server.route({
|
|
255
|
-
method: 'GET',
|
|
256
|
-
path: '/stream',
|
|
257
|
-
handler: async (request, h) => {
|
|
258
|
-
const stream = new Readable({ read() {} });
|
|
259
|
-
const onRow = ({id, key, value, doc}) => stream.push(JSON.stringify(doc) + '\n')
|
|
260
|
-
const options = { startkey: req.query.startLetter, endkey: req.query.startLetter + '|', include_docs: true}
|
|
261
|
-
await queryStream(view, options, onRow)
|
|
262
|
-
stream.push(null) // end stream
|
|
263
|
-
return h.response(stream).type('application/x-ndjson');
|
|
264
|
-
}
|
|
265
|
-
})
|
|
266
|
-
|
|
267
|
-
await server.start();
|
|
268
|
-
console.log(`Server running on ${server.info.uri}`);
|
|
269
|
-
}
|
|
270
|
-
init()
|
|
271
|
-
```
|
|
272
|
-
|
package/cjs/impl/bulk.cjs
CHANGED
|
@@ -29,14 +29,21 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
29
29
|
var bulk_exports = {};
|
|
30
30
|
__export(bulk_exports, {
|
|
31
31
|
bulkGet: () => bulkGet,
|
|
32
|
+
bulkGetDictionary: () => bulkGetDictionary,
|
|
32
33
|
bulkRemove: () => bulkRemove,
|
|
33
|
-
bulkSave: () => bulkSave
|
|
34
|
+
bulkSave: () => bulkSave,
|
|
35
|
+
bulkSaveTransaction: () => bulkSaveTransaction
|
|
34
36
|
});
|
|
35
37
|
module.exports = __toCommonJS(bulk_exports);
|
|
36
38
|
var import_needle = __toESM(require("needle"), 1);
|
|
37
39
|
var import_bulk = require("../schema/bulk.cjs");
|
|
40
|
+
var import_retry = require("./retry.cjs");
|
|
41
|
+
var import_crud = require("./crud.cjs");
|
|
38
42
|
var import_errors = require("./errors.cjs");
|
|
43
|
+
var import_transactionErrors = require("./transactionErrors.cjs");
|
|
39
44
|
var import_logger = require("./logger.cjs");
|
|
45
|
+
var import_crud2 = require("../schema/crud.cjs");
|
|
46
|
+
var import_trackedEmitter = require("./trackedEmitter.cjs");
|
|
40
47
|
const opts = {
|
|
41
48
|
json: true,
|
|
42
49
|
headers: {
|
|
@@ -83,10 +90,10 @@ const bulkGet = import_bulk.BulkGet.implement(async (config, ids) => {
|
|
|
83
90
|
const keys = ids;
|
|
84
91
|
logger.info(`Starting bulk get for ${keys.length} documents`);
|
|
85
92
|
const url = `${config.couch}/_all_docs?include_docs=true`;
|
|
86
|
-
const
|
|
93
|
+
const payload = { keys };
|
|
87
94
|
let resp;
|
|
88
95
|
try {
|
|
89
|
-
resp = await (0, import_needle.default)("post", url,
|
|
96
|
+
resp = await (0, import_needle.default)("post", url, payload, opts);
|
|
90
97
|
} catch (err) {
|
|
91
98
|
logger.error("Network error during bulk get:", err);
|
|
92
99
|
import_errors.RetryableError.handleNetworkError(err);
|
|
@@ -103,17 +110,158 @@ const bulkGet = import_bulk.BulkGet.implement(async (config, ids) => {
|
|
|
103
110
|
logger.error(`Unexpected status code: ${resp.statusCode}`);
|
|
104
111
|
throw new Error("could not fetch");
|
|
105
112
|
}
|
|
106
|
-
const
|
|
107
|
-
|
|
108
|
-
logger.info(`Successfully retrieved ${docs.length} documents`);
|
|
109
|
-
return docs;
|
|
113
|
+
const body = resp.body;
|
|
114
|
+
return body;
|
|
110
115
|
});
|
|
111
116
|
const bulkRemove = import_bulk.BulkRemove.implement(async (config, ids) => {
|
|
112
117
|
const logger = (0, import_logger.createLogger)(config);
|
|
113
118
|
logger.info(`Starting bulk remove for ${ids.length} documents`);
|
|
114
|
-
const
|
|
119
|
+
const resp = await bulkGet(config, ids);
|
|
120
|
+
const toRemove = [];
|
|
121
|
+
resp.rows.forEach((row) => {
|
|
122
|
+
if (!row.doc) return;
|
|
123
|
+
try {
|
|
124
|
+
const d = import_crud2.CouchDoc.parse(row.doc);
|
|
125
|
+
d._deleted = true;
|
|
126
|
+
toRemove.push(d);
|
|
127
|
+
} catch (e) {
|
|
128
|
+
logger.warn(`Invalid document structure in bulk remove: ${row.id}`, e);
|
|
129
|
+
}
|
|
130
|
+
});
|
|
131
|
+
return bulkSave(config, toRemove);
|
|
132
|
+
});
|
|
133
|
+
const bulkGetDictionary = import_bulk.BulkGetDictionary.implement(async (config, ids) => {
|
|
134
|
+
const resp = await bulkGet(config, ids);
|
|
135
|
+
const results = { found: {}, notFound: {} };
|
|
136
|
+
resp.rows.forEach(
|
|
137
|
+
/** @param { import('../schema/query.mjs').ViewRowSchema } row */
|
|
138
|
+
(row) => {
|
|
139
|
+
if (!row.key) return;
|
|
140
|
+
if (row.error) {
|
|
141
|
+
results.notFound[row.key] = row;
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
144
|
+
try {
|
|
145
|
+
const doc = import_crud2.CouchDoc.parse(row.doc);
|
|
146
|
+
results.found[doc._id] = doc;
|
|
147
|
+
} catch (e) {
|
|
148
|
+
results.notFound[row.key] = row;
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
);
|
|
152
|
+
return results;
|
|
153
|
+
});
|
|
154
|
+
const bulkSaveTransaction = import_bulk.BulkSaveTransaction.implement(async (config, transactionId, docs) => {
|
|
155
|
+
const emitter = (0, import_trackedEmitter.setupEmitter)(config);
|
|
156
|
+
const logger = (0, import_logger.createLogger)(config);
|
|
157
|
+
const retryOptions = {
|
|
158
|
+
maxRetries: config.maxRetries ?? 10,
|
|
159
|
+
initialDelay: config.initialDelay ?? 1e3,
|
|
160
|
+
backoffFactor: config.backoffFactor ?? 2
|
|
161
|
+
};
|
|
162
|
+
const _put = config.bindWithRetry ? (0, import_retry.withRetry)(import_crud.put.bind(null, config), retryOptions) : import_crud.put.bind(null, config);
|
|
163
|
+
logger.info(`Starting bulk save transaction ${transactionId} for ${docs.length} documents`);
|
|
164
|
+
const txnDoc = {
|
|
165
|
+
_id: `txn:${transactionId}`,
|
|
166
|
+
_rev: null,
|
|
167
|
+
type: "transaction",
|
|
168
|
+
status: "pending",
|
|
169
|
+
changes: docs,
|
|
170
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
171
|
+
};
|
|
172
|
+
let txnresp = await _put(txnDoc);
|
|
173
|
+
logger.debug("Transaction document created:", txnDoc, txnresp);
|
|
174
|
+
await emitter.emit("transaction-created", { txnresp, txnDoc });
|
|
175
|
+
if (txnresp.error) {
|
|
176
|
+
throw new import_transactionErrors.TransactionSetupError("Failed to create transaction document", {
|
|
177
|
+
error: txnresp.error,
|
|
178
|
+
response: txnresp.body
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
const existingDocs = await bulkGetDictionary(config, docs.map((d) => d._id));
|
|
182
|
+
logger.debug("Fetched current revisions of documents:", existingDocs);
|
|
183
|
+
await emitter.emit("transaction-revs-fetched", existingDocs);
|
|
184
|
+
const revErrors = [];
|
|
185
|
+
docs.forEach((d) => {
|
|
186
|
+
if (existingDocs.found[d._id] && existingDocs.found[d._id]._rev !== d._rev) revErrors.push(d._id);
|
|
187
|
+
if (existingDocs.notFound[d._id] && d._rev) revErrors.push(d._id);
|
|
188
|
+
});
|
|
189
|
+
if (revErrors.length > 0) {
|
|
190
|
+
throw new import_transactionErrors.TransactionVersionConflictError(revErrors);
|
|
191
|
+
}
|
|
192
|
+
logger.debug("Checked document revisions:", existingDocs);
|
|
193
|
+
await emitter.emit("transaction-revs-checked", existingDocs);
|
|
194
|
+
const providedDocsById = {};
|
|
115
195
|
docs.forEach((d) => {
|
|
116
|
-
d.
|
|
196
|
+
if (!d._id) return;
|
|
197
|
+
providedDocsById[d._id] = d;
|
|
117
198
|
});
|
|
118
|
-
|
|
199
|
+
const newDocsToRollback = [];
|
|
200
|
+
const potentialExistingDocsToRollack = [];
|
|
201
|
+
const failedDocs = [];
|
|
202
|
+
try {
|
|
203
|
+
logger.info("Transaction started:", txnDoc);
|
|
204
|
+
await emitter.emit("transaction-started", txnDoc);
|
|
205
|
+
const results = await bulkSave(config, docs);
|
|
206
|
+
logger.info("Transaction updates applied:", results);
|
|
207
|
+
await emitter.emit("transaction-updates-applied", results);
|
|
208
|
+
results.forEach((r) => {
|
|
209
|
+
if (!r.id) return;
|
|
210
|
+
if (!r.error) {
|
|
211
|
+
if (existingDocs.notFound[r.id]) newDocsToRollback.push(r);
|
|
212
|
+
if (existingDocs.found[r.id]) potentialExistingDocsToRollack.push(r);
|
|
213
|
+
} else {
|
|
214
|
+
failedDocs.push(r);
|
|
215
|
+
}
|
|
216
|
+
});
|
|
217
|
+
if (failedDocs.length > 0) {
|
|
218
|
+
throw new import_transactionErrors.TransactionBulkOperationError(failedDocs);
|
|
219
|
+
}
|
|
220
|
+
txnDoc.status = "completed";
|
|
221
|
+
txnDoc._rev = txnresp.rev;
|
|
222
|
+
txnresp = await _put(txnDoc);
|
|
223
|
+
logger.info("Transaction completed:", txnDoc);
|
|
224
|
+
await emitter.emit("transaction-completed", { txnresp, txnDoc });
|
|
225
|
+
if (txnresp.statusCode !== 201) {
|
|
226
|
+
logger.error("Failed to update transaction status to completed");
|
|
227
|
+
}
|
|
228
|
+
return results;
|
|
229
|
+
} catch (error) {
|
|
230
|
+
logger.error("Transaction failed, attempting rollback:", error);
|
|
231
|
+
const toRollback = [];
|
|
232
|
+
potentialExistingDocsToRollack.forEach((row) => {
|
|
233
|
+
if (!row.id || !row.rev) return;
|
|
234
|
+
const doc = existingDocs.found[row.id];
|
|
235
|
+
doc._rev = row.rev;
|
|
236
|
+
toRollback.push(doc);
|
|
237
|
+
});
|
|
238
|
+
newDocsToRollback.forEach((d) => {
|
|
239
|
+
if (!d.id || !d.rev) return;
|
|
240
|
+
const before = structuredClone(providedDocsById[d.id]);
|
|
241
|
+
before._rev = d.rev;
|
|
242
|
+
before._deleted = true;
|
|
243
|
+
toRollback.push(before);
|
|
244
|
+
});
|
|
245
|
+
const bulkRollbackResult = await bulkSave(config, toRollback);
|
|
246
|
+
let status = "rolled_back";
|
|
247
|
+
bulkRollbackResult.forEach((r) => {
|
|
248
|
+
if (r.error) status = "rollback_failed";
|
|
249
|
+
});
|
|
250
|
+
logger.warn("Transaction rolled back:", { bulkRollbackResult, status });
|
|
251
|
+
await emitter.emit("transaction-rolled-back", { bulkRollbackResult, status });
|
|
252
|
+
txnDoc.status = status;
|
|
253
|
+
txnDoc._rev = txnresp.rev;
|
|
254
|
+
txnresp = await _put(txnDoc);
|
|
255
|
+
logger.warn("Transaction rollback status updated:", txnDoc);
|
|
256
|
+
await emitter.emit("transaction-rolled-back-status", { txnresp, txnDoc });
|
|
257
|
+
if (txnresp.statusCode !== 201) {
|
|
258
|
+
logger.error("Failed to update transaction status to rolled_back");
|
|
259
|
+
}
|
|
260
|
+
throw new import_transactionErrors.TransactionRollbackError(
|
|
261
|
+
"Transaction failed and rollback was unsuccessful",
|
|
262
|
+
/** @type {Error} */
|
|
263
|
+
error,
|
|
264
|
+
bulkRollbackResult
|
|
265
|
+
);
|
|
266
|
+
}
|
|
119
267
|
});
|