hide-a-bed 5.2.8 → 6.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.prettierrc +7 -0
- package/README.md +270 -218
- package/dist/cjs/index.cjs +1952 -0
- package/dist/esm/index.mjs +1898 -0
- package/eslint.config.js +15 -0
- package/impl/bindConfig.mts +140 -0
- package/impl/bulkGet.mts +256 -0
- package/impl/bulkRemove.mts +98 -0
- package/impl/bulkSave.mts +286 -0
- package/impl/get.mts +137 -0
- package/impl/getDBInfo.mts +67 -0
- package/impl/patch.mts +134 -0
- package/impl/put.mts +56 -0
- package/impl/query.mts +224 -0
- package/impl/remove.mts +65 -0
- package/impl/retry.mts +66 -0
- package/impl/stream.mts +143 -0
- package/impl/sugar/lock.mts +103 -0
- package/impl/sugar/{watch.mjs → watch.mts} +56 -22
- package/impl/utils/errors.mts +130 -0
- package/impl/utils/logger.mts +62 -0
- package/impl/utils/mergeNeedleOpts.mts +16 -0
- package/impl/utils/parseRows.mts +117 -0
- package/impl/utils/queryBuilder.mts +173 -0
- package/impl/utils/queryString.mts +44 -0
- package/impl/{trackedEmitter.mjs → utils/trackedEmitter.mts} +9 -7
- package/impl/utils/transactionErrors.mts +71 -0
- package/index.mts +82 -0
- package/migration_guides/v6.md +70 -0
- package/package.json +49 -32
- package/schema/config.mts +81 -0
- package/schema/couch/couch.input.schema.ts +43 -0
- package/schema/couch/couch.output.schema.ts +169 -0
- package/schema/sugar/lock.mts +18 -0
- package/schema/sugar/watch.mts +14 -0
- package/schema/util.mts +8 -0
- package/tsconfig.json +10 -4
- package/tsdown.config.ts +16 -0
- package/typedoc.json +4 -0
- package/types/output/eslint.config.d.ts +3 -0
- package/types/output/eslint.config.d.ts.map +1 -0
- package/types/output/impl/bindConfig.d.mts +174 -0
- package/types/output/impl/bindConfig.d.mts.map +1 -0
- package/types/output/impl/bulkGet.d.mts +75 -0
- package/types/output/impl/bulkGet.d.mts.map +1 -0
- package/types/output/impl/bulkGet.test.d.mts +2 -0
- package/types/output/impl/bulkGet.test.d.mts.map +1 -0
- package/types/output/impl/bulkRemove.d.mts +63 -0
- package/types/output/impl/bulkRemove.d.mts.map +1 -0
- package/types/output/impl/bulkRemove.test.d.mts +2 -0
- package/types/output/impl/bulkRemove.test.d.mts.map +1 -0
- package/types/output/impl/bulkSave.d.mts +64 -0
- package/types/output/impl/bulkSave.d.mts.map +1 -0
- package/types/output/impl/bulkSave.test.d.mts +2 -0
- package/types/output/impl/bulkSave.test.d.mts.map +1 -0
- package/types/output/impl/get.d.mts +20 -0
- package/types/output/impl/get.d.mts.map +1 -0
- package/types/output/impl/get.test.d.mts +2 -0
- package/types/output/impl/get.test.d.mts.map +1 -0
- package/types/output/impl/getDBInfo.d.mts +52 -0
- package/types/output/impl/getDBInfo.d.mts.map +1 -0
- package/types/output/impl/getDBInfo.test.d.mts +2 -0
- package/types/output/impl/getDBInfo.test.d.mts.map +1 -0
- package/types/output/impl/patch.d.mts +45 -0
- package/types/output/impl/patch.d.mts.map +1 -0
- package/types/output/impl/patch.test.d.mts +2 -0
- package/types/output/impl/patch.test.d.mts.map +1 -0
- package/types/output/impl/put.d.mts +5 -0
- package/types/output/impl/put.d.mts.map +1 -0
- package/types/output/impl/put.test.d.mts +2 -0
- package/types/output/impl/put.test.d.mts.map +1 -0
- package/types/output/impl/query.d.mts +47 -0
- package/types/output/impl/query.d.mts.map +1 -0
- package/types/output/impl/query.test.d.mts +2 -0
- package/types/output/impl/query.test.d.mts.map +1 -0
- package/types/output/impl/remove.d.mts +9 -0
- package/types/output/impl/remove.d.mts.map +1 -0
- package/types/output/impl/remove.test.d.mts +2 -0
- package/types/output/impl/remove.test.d.mts.map +1 -0
- package/types/output/impl/retry.d.mts +32 -0
- package/types/output/impl/retry.d.mts.map +1 -0
- package/types/output/impl/retry.test.d.mts +2 -0
- package/types/output/impl/retry.test.d.mts.map +1 -0
- package/types/output/impl/stream.d.mts +13 -0
- package/types/output/impl/stream.d.mts.map +1 -0
- package/types/output/impl/stream.test.d.mts +2 -0
- package/types/output/impl/stream.test.d.mts.map +1 -0
- package/types/output/impl/sugar/lock.d.mts +24 -0
- package/types/output/impl/sugar/lock.d.mts.map +1 -0
- package/types/output/impl/sugar/lock.test.d.mts +2 -0
- package/types/output/impl/sugar/lock.test.d.mts.map +1 -0
- package/types/output/impl/sugar/watch.d.mts +21 -0
- package/types/output/impl/sugar/watch.d.mts.map +1 -0
- package/types/output/impl/sugar/watch.test.d.mts +2 -0
- package/types/output/impl/sugar/watch.test.d.mts.map +1 -0
- package/types/output/impl/utils/errors.d.mts +78 -0
- package/types/output/impl/utils/errors.d.mts.map +1 -0
- package/types/output/impl/utils/errors.test.d.mts +2 -0
- package/types/output/impl/utils/errors.test.d.mts.map +1 -0
- package/types/output/impl/utils/logger.d.mts +11 -0
- package/types/output/impl/utils/logger.d.mts.map +1 -0
- package/types/output/impl/utils/logger.test.d.mts +2 -0
- package/types/output/impl/utils/logger.test.d.mts.map +1 -0
- package/types/output/impl/utils/mergeNeedleOpts.d.mts +53 -0
- package/types/output/impl/utils/mergeNeedleOpts.d.mts.map +1 -0
- package/types/output/impl/utils/parseRows.d.mts +15 -0
- package/types/output/impl/utils/parseRows.d.mts.map +1 -0
- package/types/output/impl/utils/parseRows.test.d.mts +2 -0
- package/types/output/impl/utils/parseRows.test.d.mts.map +1 -0
- package/types/output/impl/utils/queryBuilder.d.mts +68 -0
- package/types/output/impl/utils/queryBuilder.d.mts.map +1 -0
- package/types/output/impl/utils/queryBuilder.test.d.mts +2 -0
- package/types/output/impl/utils/queryBuilder.test.d.mts.map +1 -0
- package/types/output/impl/utils/queryString.d.mts +9 -0
- package/types/output/impl/utils/queryString.d.mts.map +1 -0
- package/types/output/impl/utils/queryString.test.d.mts +2 -0
- package/types/output/impl/utils/queryString.test.d.mts.map +1 -0
- package/types/output/impl/utils/trackedEmitter.d.mts +7 -0
- package/types/output/impl/utils/trackedEmitter.d.mts.map +1 -0
- package/{impl → types/output/impl/utils}/transactionErrors.d.mts +16 -31
- package/types/output/impl/utils/transactionErrors.d.mts.map +1 -0
- package/types/output/index.d.mts +32 -0
- package/types/output/index.d.mts.map +1 -0
- package/types/output/index.test.d.mts +2 -0
- package/types/output/index.test.d.mts.map +1 -0
- package/types/output/schema/config.d.mts +90 -0
- package/types/output/schema/config.d.mts.map +1 -0
- package/types/output/schema/couch/couch.input.schema.d.ts +29 -0
- package/types/output/schema/couch/couch.input.schema.d.ts.map +1 -0
- package/types/output/schema/couch/couch.output.schema.d.ts +113 -0
- package/types/output/schema/couch/couch.output.schema.d.ts.map +1 -0
- package/types/output/schema/sugar/lock.d.mts +19 -0
- package/types/output/schema/sugar/lock.d.mts.map +1 -0
- package/types/output/schema/sugar/watch.d.mts +11 -0
- package/types/output/schema/sugar/watch.d.mts.map +1 -0
- package/types/output/schema/util.d.mts +85 -0
- package/types/output/schema/util.d.mts.map +1 -0
- package/types/output/tsdown.config.d.ts +3 -0
- package/types/output/tsdown.config.d.ts.map +1 -0
- package/types/output/types/standard-schema.d.ts +60 -0
- package/types/output/types/standard-schema.d.ts.map +1 -0
- package/types/standard-schema.ts +76 -0
- package/types/utils.d.ts +1 -0
- package/cjs/impl/bulk.cjs +0 -275
- package/cjs/impl/changes.cjs +0 -67
- package/cjs/impl/crud.cjs +0 -127
- package/cjs/impl/errors.cjs +0 -75
- package/cjs/impl/logger.cjs +0 -70
- package/cjs/impl/patch.cjs +0 -95
- package/cjs/impl/query.cjs +0 -116
- package/cjs/impl/queryBuilder.cjs +0 -163
- package/cjs/impl/retry.cjs +0 -55
- package/cjs/impl/stream.cjs +0 -121
- package/cjs/impl/sugar/lock.cjs +0 -81
- package/cjs/impl/sugar/watch.cjs +0 -159
- package/cjs/impl/trackedEmitter.cjs +0 -54
- package/cjs/impl/transactionErrors.cjs +0 -70
- package/cjs/impl/util.cjs +0 -64
- package/cjs/index.cjs +0 -132
- package/cjs/integration/changes.cjs +0 -76
- package/cjs/integration/disconnect-watch.cjs +0 -52
- package/cjs/integration/watch.cjs +0 -59
- package/cjs/schema/bind.cjs +0 -59
- package/cjs/schema/bulk.cjs +0 -92
- package/cjs/schema/changes.cjs +0 -68
- package/cjs/schema/config.cjs +0 -48
- package/cjs/schema/crud.cjs +0 -77
- package/cjs/schema/patch.cjs +0 -53
- package/cjs/schema/query.cjs +0 -62
- package/cjs/schema/stream.cjs +0 -42
- package/cjs/schema/sugar/lock.cjs +0 -59
- package/cjs/schema/sugar/watch.cjs +0 -42
- package/cjs/schema/util.cjs +0 -39
- package/config.json +0 -5
- package/docs/compiler.png +0 -0
- package/dualmode.config.json +0 -11
- package/impl/bulk.d.mts +0 -11
- package/impl/bulk.d.mts.map +0 -1
- package/impl/bulk.mjs +0 -291
- package/impl/changes.d.mts +0 -12
- package/impl/changes.d.mts.map +0 -1
- package/impl/changes.mjs +0 -53
- package/impl/crud.d.mts +0 -7
- package/impl/crud.d.mts.map +0 -1
- package/impl/crud.mjs +0 -108
- package/impl/errors.d.mts +0 -43
- package/impl/errors.d.mts.map +0 -1
- package/impl/errors.mjs +0 -65
- package/impl/logger.d.mts +0 -32
- package/impl/logger.d.mts.map +0 -1
- package/impl/logger.mjs +0 -59
- package/impl/patch.d.mts +0 -6
- package/impl/patch.d.mts.map +0 -1
- package/impl/patch.mjs +0 -88
- package/impl/query.d.mts +0 -195
- package/impl/query.d.mts.map +0 -1
- package/impl/query.mjs +0 -122
- package/impl/queryBuilder.d.mts +0 -154
- package/impl/queryBuilder.d.mts.map +0 -1
- package/impl/queryBuilder.mjs +0 -175
- package/impl/retry.d.mts +0 -2
- package/impl/retry.d.mts.map +0 -1
- package/impl/retry.mjs +0 -39
- package/impl/stream.d.mts +0 -3
- package/impl/stream.d.mts.map +0 -1
- package/impl/stream.mjs +0 -98
- package/impl/sugar/lock.d.mts +0 -5
- package/impl/sugar/lock.d.mts.map +0 -1
- package/impl/sugar/lock.mjs +0 -70
- package/impl/sugar/watch.d.mts +0 -34
- package/impl/sugar/watch.d.mts.map +0 -1
- package/impl/trackedEmitter.d.mts +0 -8
- package/impl/trackedEmitter.d.mts.map +0 -1
- package/impl/transactionErrors.d.mts.map +0 -1
- package/impl/transactionErrors.mjs +0 -47
- package/impl/util.d.mts +0 -3
- package/impl/util.d.mts.map +0 -1
- package/impl/util.mjs +0 -35
- package/index.d.mts +0 -80
- package/index.d.mts.map +0 -1
- package/index.mjs +0 -141
- package/integration/changes.mjs +0 -60
- package/integration/disconnect-watch.mjs +0 -36
- package/integration/watch.mjs +0 -40
- package/schema/bind.d.mts +0 -5461
- package/schema/bind.d.mts.map +0 -1
- package/schema/bind.mjs +0 -43
- package/schema/bulk.d.mts +0 -923
- package/schema/bulk.d.mts.map +0 -1
- package/schema/bulk.mjs +0 -83
- package/schema/changes.d.mts +0 -191
- package/schema/changes.d.mts.map +0 -1
- package/schema/changes.mjs +0 -59
- package/schema/config.d.mts +0 -79
- package/schema/config.d.mts.map +0 -1
- package/schema/config.mjs +0 -26
- package/schema/crud.d.mts +0 -491
- package/schema/crud.d.mts.map +0 -1
- package/schema/crud.mjs +0 -64
- package/schema/patch.d.mts +0 -255
- package/schema/patch.d.mts.map +0 -1
- package/schema/patch.mjs +0 -42
- package/schema/query.d.mts +0 -406
- package/schema/query.d.mts.map +0 -1
- package/schema/query.mjs +0 -45
- package/schema/stream.d.mts +0 -211
- package/schema/stream.d.mts.map +0 -1
- package/schema/stream.mjs +0 -23
- package/schema/sugar/lock.d.mts +0 -238
- package/schema/sugar/lock.d.mts.map +0 -1
- package/schema/sugar/lock.mjs +0 -50
- package/schema/sugar/watch.d.mts +0 -127
- package/schema/sugar/watch.d.mts.map +0 -1
- package/schema/sugar/watch.mjs +0 -29
- package/schema/util.d.mts +0 -160
- package/schema/util.d.mts.map +0 -1
- package/schema/util.mjs +0 -35
- package/types/changes-stream.d.ts +0 -11
|
@@ -0,0 +1,1952 @@
|
|
|
1
|
+
//#region rolldown:runtime
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __copyProps = (to, from, except, desc) => {
|
|
9
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
10
|
+
for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
|
|
11
|
+
key = keys[i];
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except) {
|
|
13
|
+
__defProp(to, key, {
|
|
14
|
+
get: ((k) => from[k]).bind(null, key),
|
|
15
|
+
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
return to;
|
|
21
|
+
};
|
|
22
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
|
|
23
|
+
value: mod,
|
|
24
|
+
enumerable: true
|
|
25
|
+
}) : target, mod));
|
|
26
|
+
|
|
27
|
+
//#endregion
|
|
28
|
+
let zod = require("zod");
|
|
29
|
+
zod = __toESM(zod);
|
|
30
|
+
let node_timers_promises = require("node:timers/promises");
|
|
31
|
+
let needle = require("needle");
|
|
32
|
+
needle = __toESM(needle);
|
|
33
|
+
let stream_chain = require("stream-chain");
|
|
34
|
+
stream_chain = __toESM(stream_chain);
|
|
35
|
+
let stream_json_Parser_js = require("stream-json/Parser.js");
|
|
36
|
+
stream_json_Parser_js = __toESM(stream_json_Parser_js);
|
|
37
|
+
let stream_json_filters_Pick_js = require("stream-json/filters/Pick.js");
|
|
38
|
+
stream_json_filters_Pick_js = __toESM(stream_json_filters_Pick_js);
|
|
39
|
+
let stream_json_streamers_StreamArray_js = require("stream-json/streamers/StreamArray.js");
|
|
40
|
+
stream_json_streamers_StreamArray_js = __toESM(stream_json_streamers_StreamArray_js);
|
|
41
|
+
let events = require("events");
|
|
42
|
+
|
|
43
|
+
//#region impl/utils/queryBuilder.mts
|
|
44
|
+
/**
|
|
45
|
+
* A builder class for constructing CouchDB view query options.
|
|
46
|
+
* Provides a fluent API for setting various query parameters.
|
|
47
|
+
* @example
|
|
48
|
+
* const queryOptions = new QueryBuilder()
|
|
49
|
+
* .limit(10)
|
|
50
|
+
* .include_docs()
|
|
51
|
+
* .startKey('someKey')
|
|
52
|
+
* .build();
|
|
53
|
+
* @see SimpleViewOptions for the full list of options.
|
|
54
|
+
*
|
|
55
|
+
* @remarks
|
|
56
|
+
* Each method corresponds to a CouchDB view option and returns the builder instance for chaining.
|
|
57
|
+
*
|
|
58
|
+
* @returns The constructed SimpleViewOptions object.
|
|
59
|
+
*/
|
|
60
|
+
var QueryBuilder = class {
|
|
61
|
+
#options = {};
|
|
62
|
+
descending(descending = true) {
|
|
63
|
+
this.#options.descending = descending;
|
|
64
|
+
return this;
|
|
65
|
+
}
|
|
66
|
+
endkey_docid(endkeyDocId) {
|
|
67
|
+
this.#options.endkey_docid = endkeyDocId;
|
|
68
|
+
return this;
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* Alias for endkey_docid
|
|
72
|
+
*/
|
|
73
|
+
end_key_doc_id(endkeyDocId) {
|
|
74
|
+
this.#options.endkey_docid = endkeyDocId;
|
|
75
|
+
return this;
|
|
76
|
+
}
|
|
77
|
+
endkey(endkey) {
|
|
78
|
+
this.#options.endkey = endkey;
|
|
79
|
+
return this;
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Alias for endkey
|
|
83
|
+
*/
|
|
84
|
+
endKey(endkey) {
|
|
85
|
+
this.#options.endkey = endkey;
|
|
86
|
+
return this;
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Alias for endkey
|
|
90
|
+
*/
|
|
91
|
+
end_key(endkey) {
|
|
92
|
+
this.#options.endkey = endkey;
|
|
93
|
+
return this;
|
|
94
|
+
}
|
|
95
|
+
group(group = true) {
|
|
96
|
+
this.#options.group = group;
|
|
97
|
+
return this;
|
|
98
|
+
}
|
|
99
|
+
group_level(level) {
|
|
100
|
+
this.#options.group_level = level;
|
|
101
|
+
return this;
|
|
102
|
+
}
|
|
103
|
+
include_docs(includeDocs = true) {
|
|
104
|
+
this.#options.include_docs = includeDocs;
|
|
105
|
+
return this;
|
|
106
|
+
}
|
|
107
|
+
inclusive_end(inclusiveEnd = true) {
|
|
108
|
+
this.#options.inclusive_end = inclusiveEnd;
|
|
109
|
+
return this;
|
|
110
|
+
}
|
|
111
|
+
key(key) {
|
|
112
|
+
this.#options.key = key;
|
|
113
|
+
return this;
|
|
114
|
+
}
|
|
115
|
+
keys(keys) {
|
|
116
|
+
this.#options.keys = keys;
|
|
117
|
+
return this;
|
|
118
|
+
}
|
|
119
|
+
limit(limit) {
|
|
120
|
+
this.#options.limit = limit;
|
|
121
|
+
return this;
|
|
122
|
+
}
|
|
123
|
+
reduce(reduce = true) {
|
|
124
|
+
this.#options.reduce = reduce;
|
|
125
|
+
return this;
|
|
126
|
+
}
|
|
127
|
+
skip(skip) {
|
|
128
|
+
this.#options.skip = skip;
|
|
129
|
+
return this;
|
|
130
|
+
}
|
|
131
|
+
sorted(sorted = true) {
|
|
132
|
+
this.#options.sorted = sorted;
|
|
133
|
+
return this;
|
|
134
|
+
}
|
|
135
|
+
stable(stable = true) {
|
|
136
|
+
this.#options.stable = stable;
|
|
137
|
+
return this;
|
|
138
|
+
}
|
|
139
|
+
startkey(startkey) {
|
|
140
|
+
this.#options.startkey = startkey;
|
|
141
|
+
return this;
|
|
142
|
+
}
|
|
143
|
+
/**
|
|
144
|
+
* Alias for startkey
|
|
145
|
+
*/
|
|
146
|
+
startKey(startkey) {
|
|
147
|
+
this.#options.startkey = startkey;
|
|
148
|
+
return this;
|
|
149
|
+
}
|
|
150
|
+
/**
|
|
151
|
+
* Alias for startkey
|
|
152
|
+
*/
|
|
153
|
+
start_key(startkey) {
|
|
154
|
+
this.#options.startkey = startkey;
|
|
155
|
+
return this;
|
|
156
|
+
}
|
|
157
|
+
startkey_docid(startkeyDocId) {
|
|
158
|
+
this.#options.startkey_docid = startkeyDocId;
|
|
159
|
+
return this;
|
|
160
|
+
}
|
|
161
|
+
/**
|
|
162
|
+
* Alias for startkey_docid
|
|
163
|
+
*/
|
|
164
|
+
start_key_doc_id(startkeyDocId) {
|
|
165
|
+
this.#options.startkey_docid = startkeyDocId;
|
|
166
|
+
return this;
|
|
167
|
+
}
|
|
168
|
+
update(update) {
|
|
169
|
+
this.#options.update = update;
|
|
170
|
+
return this;
|
|
171
|
+
}
|
|
172
|
+
update_seq(updateSeq = true) {
|
|
173
|
+
this.#options.update_seq = updateSeq;
|
|
174
|
+
return this;
|
|
175
|
+
}
|
|
176
|
+
/**
|
|
177
|
+
* Builds and returns the ViewOptions object.
|
|
178
|
+
*/
|
|
179
|
+
build() {
|
|
180
|
+
return { ...this.#options };
|
|
181
|
+
}
|
|
182
|
+
};
|
|
183
|
+
const createQuery = () => new QueryBuilder();
|
|
184
|
+
|
|
185
|
+
//#endregion
|
|
186
|
+
//#region schema/config.mts
|
|
187
|
+
const anyArgs = zod.z.array(zod.z.any());
|
|
188
|
+
const LoggerSchema = zod.z.object({
|
|
189
|
+
error: zod.z.function({
|
|
190
|
+
input: anyArgs,
|
|
191
|
+
output: zod.z.void()
|
|
192
|
+
}).optional(),
|
|
193
|
+
warn: zod.z.function({
|
|
194
|
+
input: anyArgs,
|
|
195
|
+
output: zod.z.void()
|
|
196
|
+
}).optional(),
|
|
197
|
+
info: zod.z.function({
|
|
198
|
+
input: anyArgs,
|
|
199
|
+
output: zod.z.void()
|
|
200
|
+
}).optional(),
|
|
201
|
+
debug: zod.z.function({
|
|
202
|
+
input: anyArgs,
|
|
203
|
+
output: zod.z.void()
|
|
204
|
+
}).optional()
|
|
205
|
+
}).or(zod.z.function({
|
|
206
|
+
input: anyArgs,
|
|
207
|
+
output: zod.z.void()
|
|
208
|
+
}));
|
|
209
|
+
const NeedleBaseOptions = zod.z.object({
|
|
210
|
+
json: zod.z.boolean(),
|
|
211
|
+
headers: zod.z.record(zod.z.string(), zod.z.string()),
|
|
212
|
+
parse_response: zod.z.boolean().optional()
|
|
213
|
+
});
|
|
214
|
+
const NeedleOptions = zod.z.object({
|
|
215
|
+
json: zod.z.boolean().optional(),
|
|
216
|
+
compressed: zod.z.boolean().optional(),
|
|
217
|
+
follow_max: zod.z.number().optional(),
|
|
218
|
+
follow_set_cookie: zod.z.boolean().optional(),
|
|
219
|
+
follow_set_referer: zod.z.boolean().optional(),
|
|
220
|
+
follow: zod.z.number().optional(),
|
|
221
|
+
timeout: zod.z.number().optional(),
|
|
222
|
+
read_timeout: zod.z.number().optional(),
|
|
223
|
+
parse_response: zod.z.boolean().optional(),
|
|
224
|
+
decode: zod.z.boolean().optional(),
|
|
225
|
+
parse_cookies: zod.z.boolean().optional(),
|
|
226
|
+
cookies: zod.z.record(zod.z.string(), zod.z.string()).optional(),
|
|
227
|
+
headers: zod.z.record(zod.z.string(), zod.z.string()).optional(),
|
|
228
|
+
auth: zod.z.enum([
|
|
229
|
+
"auto",
|
|
230
|
+
"digest",
|
|
231
|
+
"basic"
|
|
232
|
+
]).optional(),
|
|
233
|
+
username: zod.z.string().optional(),
|
|
234
|
+
password: zod.z.string().optional(),
|
|
235
|
+
proxy: zod.z.string().optional(),
|
|
236
|
+
agent: zod.z.any().optional(),
|
|
237
|
+
rejectUnauthorized: zod.z.boolean().optional(),
|
|
238
|
+
output: zod.z.string().optional(),
|
|
239
|
+
parse: zod.z.boolean().optional(),
|
|
240
|
+
multipart: zod.z.boolean().optional(),
|
|
241
|
+
open_timeout: zod.z.number().optional(),
|
|
242
|
+
response_timeout: zod.z.number().optional(),
|
|
243
|
+
keepAlive: zod.z.boolean().optional()
|
|
244
|
+
});
|
|
245
|
+
const CouchConfig = zod.z.strictObject({
|
|
246
|
+
backoffFactor: zod.z.number().optional().default(2).describe("multiplier for exponential backoff"),
|
|
247
|
+
bindWithRetry: zod.z.boolean().optional().default(true).describe("should we bind with retry"),
|
|
248
|
+
couch: zod.z.string().describe("the url of the couch db"),
|
|
249
|
+
initialDelay: zod.z.number().optional().default(1e3).describe("initial retry delay in milliseconds"),
|
|
250
|
+
logger: LoggerSchema.optional().describe("logging interface supporting winston-like or simple function interface"),
|
|
251
|
+
maxRetries: zod.z.number().optional().default(3).describe("maximum number of retry attempts"),
|
|
252
|
+
needleOpts: NeedleOptions.optional(),
|
|
253
|
+
throwOnGetNotFound: zod.z.boolean().optional().default(false).describe("if a get is 404 should we throw or return undefined"),
|
|
254
|
+
useConsoleLogger: zod.z.boolean().optional().default(false).describe("turn on console as a fallback logger"),
|
|
255
|
+
"~emitter": zod.z.any().optional().describe("emitter for events"),
|
|
256
|
+
"~normalizedLogger": zod.z.any().optional()
|
|
257
|
+
}).describe("The std config object");
|
|
258
|
+
|
|
259
|
+
//#endregion
|
|
260
|
+
//#region impl/utils/errors.mts
|
|
261
|
+
const RETRYABLE_STATUS_CODES = new Set([
|
|
262
|
+
408,
|
|
263
|
+
429,
|
|
264
|
+
500,
|
|
265
|
+
502,
|
|
266
|
+
503,
|
|
267
|
+
504
|
|
268
|
+
]);
|
|
269
|
+
const NETWORK_ERROR_STATUS_MAP = {
|
|
270
|
+
ECONNREFUSED: 503,
|
|
271
|
+
ECONNRESET: 503,
|
|
272
|
+
ETIMEDOUT: 503,
|
|
273
|
+
ENETUNREACH: 503,
|
|
274
|
+
ENOTFOUND: 503,
|
|
275
|
+
EPIPE: 503,
|
|
276
|
+
EHOSTUNREACH: 503,
|
|
277
|
+
ESOCKETTIMEDOUT: 503
|
|
278
|
+
};
|
|
279
|
+
const isNetworkError = (value) => {
|
|
280
|
+
if (typeof value !== "object" || value === null) return false;
|
|
281
|
+
const candidate = value;
|
|
282
|
+
return typeof candidate.code === "string" && candidate.code in NETWORK_ERROR_STATUS_MAP;
|
|
283
|
+
};
|
|
284
|
+
/**
|
|
285
|
+
* Error thrown when a requested CouchDB document cannot be found.
|
|
286
|
+
*
|
|
287
|
+
* @remarks
|
|
288
|
+
* The `docId` property exposes the identifier that triggered the failure, which is
|
|
289
|
+
* helpful for logging and retry strategies.
|
|
290
|
+
*
|
|
291
|
+
* @public
|
|
292
|
+
*/
|
|
293
|
+
var NotFoundError = class extends Error {
|
|
294
|
+
/**
|
|
295
|
+
* Identifier of the missing document.
|
|
296
|
+
*/
|
|
297
|
+
docId;
|
|
298
|
+
/**
|
|
299
|
+
* Creates a new {@link NotFoundError} instance.
|
|
300
|
+
*
|
|
301
|
+
* @param docId - The identifier of the document that was not found.
|
|
302
|
+
* @param message - Optional custom error message.
|
|
303
|
+
*/
|
|
304
|
+
constructor(docId, message = "Document not found") {
|
|
305
|
+
super(message);
|
|
306
|
+
this.name = "NotFoundError";
|
|
307
|
+
this.docId = docId;
|
|
308
|
+
}
|
|
309
|
+
};
|
|
310
|
+
/**
|
|
311
|
+
* Error signalling that an operation can be retried due to transient conditions.
|
|
312
|
+
*
|
|
313
|
+
* @remarks
|
|
314
|
+
* Use `RetryableError.isRetryableStatusCode` and `RetryableError.handleNetworkError`
|
|
315
|
+
* to detect when a failure should trigger retry logic.
|
|
316
|
+
*
|
|
317
|
+
* @public
|
|
318
|
+
*/
|
|
319
|
+
var RetryableError = class RetryableError extends Error {
|
|
320
|
+
/**
|
|
321
|
+
* HTTP status code associated with the retryable failure, when available.
|
|
322
|
+
*/
|
|
323
|
+
statusCode;
|
|
324
|
+
/**
|
|
325
|
+
* Creates a new {@link RetryableError} instance.
|
|
326
|
+
*
|
|
327
|
+
* @param message - Detailed description of the failure.
|
|
328
|
+
* @param statusCode - Optional HTTP status code corresponding to the failure.
|
|
329
|
+
*/
|
|
330
|
+
constructor(message, statusCode) {
|
|
331
|
+
super(message);
|
|
332
|
+
this.name = "RetryableError";
|
|
333
|
+
this.statusCode = statusCode;
|
|
334
|
+
}
|
|
335
|
+
/**
|
|
336
|
+
* Determines whether the provided status code should be treated as retryable.
|
|
337
|
+
*
|
|
338
|
+
* @param statusCode - HTTP status code returned by CouchDB.
|
|
339
|
+
*
|
|
340
|
+
* @returns `true` if the status code is considered retryable; otherwise `false`.
|
|
341
|
+
*/
|
|
342
|
+
static isRetryableStatusCode(statusCode) {
|
|
343
|
+
if (typeof statusCode !== "number") return false;
|
|
344
|
+
return RETRYABLE_STATUS_CODES.has(statusCode);
|
|
345
|
+
}
|
|
346
|
+
/**
|
|
347
|
+
* Converts low-level network errors into {@link RetryableError} instances when possible.
|
|
348
|
+
*
|
|
349
|
+
* @param err - The error thrown by the underlying HTTP client.
|
|
350
|
+
*
|
|
351
|
+
* @throws {@link RetryableError} When the error maps to a retryable network condition.
|
|
352
|
+
* @throws {*} Re-throws the original error when it cannot be mapped.
|
|
353
|
+
*/
|
|
354
|
+
static handleNetworkError(err) {
|
|
355
|
+
if (isNetworkError(err)) {
|
|
356
|
+
const statusCode = NETWORK_ERROR_STATUS_MAP[err.code];
|
|
357
|
+
if (statusCode) throw new RetryableError(`Network error: ${err.code}`, statusCode);
|
|
358
|
+
}
|
|
359
|
+
throw err;
|
|
360
|
+
}
|
|
361
|
+
};
|
|
362
|
+
function isConflictError(err) {
|
|
363
|
+
if (typeof err !== "object" || err === null) return false;
|
|
364
|
+
return err.statusCode === 409;
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
//#endregion
|
|
368
|
+
//#region impl/retry.mts
|
|
369
|
+
/**
|
|
370
|
+
* Wrap an async-capable function with retry semantics that respect {@link RetryableError}.
|
|
371
|
+
* @typeParam Fn - The function signature to decorate with retry handling.
|
|
372
|
+
* @param fn The function to invoke with retry support.
|
|
373
|
+
* @param options Retry tuning parameters.
|
|
374
|
+
* @returns A function mirroring `fn` that automatically retries on {@link RetryableError}.
|
|
375
|
+
*/
|
|
376
|
+
function withRetry(fn, options = {}) {
|
|
377
|
+
const { maxRetries = 3, initialDelay = 1e3, backoffFactor = 2, maxDelay = 3e4 } = options;
|
|
378
|
+
return async (...args) => {
|
|
379
|
+
let delay = initialDelay;
|
|
380
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) try {
|
|
381
|
+
return await fn(...args);
|
|
382
|
+
} catch (error) {
|
|
383
|
+
if (!(error instanceof RetryableError)) throw error;
|
|
384
|
+
if (attempt === maxRetries) throw error;
|
|
385
|
+
await (0, node_timers_promises.setTimeout)(Math.min(delay, maxDelay));
|
|
386
|
+
delay *= backoffFactor;
|
|
387
|
+
}
|
|
388
|
+
throw new RetryableError("withRetry exhausted retry attempts without resolving the operation");
|
|
389
|
+
};
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
//#endregion
|
|
393
|
+
//#region impl/utils/logger.mts
|
|
394
|
+
const noop = () => {};
|
|
395
|
+
const createConsoleLogger = () => ({
|
|
396
|
+
error: (...args) => console.error(...args),
|
|
397
|
+
warn: (...args) => console.warn(...args),
|
|
398
|
+
info: (...args) => console.info(...args),
|
|
399
|
+
debug: (...args) => console.debug(...args)
|
|
400
|
+
});
|
|
401
|
+
const createNoopLogger = () => ({
|
|
402
|
+
error: noop,
|
|
403
|
+
warn: noop,
|
|
404
|
+
info: noop,
|
|
405
|
+
debug: noop
|
|
406
|
+
});
|
|
407
|
+
function createLogger(config) {
|
|
408
|
+
if (config["~normalizedLogger"]) return config["~normalizedLogger"];
|
|
409
|
+
if (!config.logger) {
|
|
410
|
+
const normalized$1 = config.useConsoleLogger ? createConsoleLogger() : createNoopLogger();
|
|
411
|
+
config["~normalizedLogger"] = normalized$1;
|
|
412
|
+
return normalized$1;
|
|
413
|
+
}
|
|
414
|
+
if (typeof config.logger === "function") {
|
|
415
|
+
const loggerFn = config.logger;
|
|
416
|
+
const normalized$1 = {
|
|
417
|
+
error: (...args) => loggerFn("error", ...args),
|
|
418
|
+
warn: (...args) => loggerFn("warn", ...args),
|
|
419
|
+
info: (...args) => loggerFn("info", ...args),
|
|
420
|
+
debug: (...args) => loggerFn("debug", ...args)
|
|
421
|
+
};
|
|
422
|
+
config["~normalizedLogger"] = normalized$1;
|
|
423
|
+
return normalized$1;
|
|
424
|
+
}
|
|
425
|
+
const loggerObj = config.logger;
|
|
426
|
+
const normalized = {
|
|
427
|
+
error: loggerObj.error ?? noop,
|
|
428
|
+
warn: loggerObj.warn ?? noop,
|
|
429
|
+
info: loggerObj.info ?? noop,
|
|
430
|
+
debug: loggerObj.debug ?? noop
|
|
431
|
+
};
|
|
432
|
+
config["~normalizedLogger"] = normalized;
|
|
433
|
+
return normalized;
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
//#endregion
|
|
437
|
+
//#region schema/util.mts
|
|
438
|
+
const MergeNeedleOpts = zod.z.function({
|
|
439
|
+
input: [CouchConfig, NeedleBaseOptions],
|
|
440
|
+
output: NeedleOptions
|
|
441
|
+
});
|
|
442
|
+
|
|
443
|
+
//#endregion
|
|
444
|
+
//#region impl/utils/mergeNeedleOpts.mts
|
|
445
|
+
const mergeNeedleOpts = MergeNeedleOpts.implement((config, opts) => {
|
|
446
|
+
if (config.needleOpts) return {
|
|
447
|
+
...opts,
|
|
448
|
+
...config.needleOpts,
|
|
449
|
+
headers: {
|
|
450
|
+
...opts.headers,
|
|
451
|
+
...config.needleOpts.headers ?? {}
|
|
452
|
+
}
|
|
453
|
+
};
|
|
454
|
+
return opts;
|
|
455
|
+
});
|
|
456
|
+
|
|
457
|
+
//#endregion
|
|
458
|
+
//#region schema/couch/couch.output.schema.ts
|
|
459
|
+
/**
|
|
460
|
+
* Default schema for a returned CouchDB document if no validation schema is provided.
|
|
461
|
+
*/
|
|
462
|
+
const CouchDoc = zod.z.looseObject({
|
|
463
|
+
_id: zod.z.string().describe("the couch doc id"),
|
|
464
|
+
_rev: zod.z.string().optional().nullish().describe("the doc revision"),
|
|
465
|
+
_deleted: zod.z.boolean().optional().describe("is the doc deleted")
|
|
466
|
+
});
|
|
467
|
+
/**
|
|
468
|
+
* Default schema for a CouchDB view row if no validation schema is provided.
|
|
469
|
+
*/
|
|
470
|
+
const ViewRow = zod.z.object({
|
|
471
|
+
id: zod.z.string().optional(),
|
|
472
|
+
key: zod.z.any().nullish(),
|
|
473
|
+
value: zod.z.any().nullish(),
|
|
474
|
+
doc: CouchDoc.nullish(),
|
|
475
|
+
error: zod.z.string().optional().describe("usually not_found, if something is wrong with this doc")
|
|
476
|
+
});
|
|
477
|
+
/**
|
|
478
|
+
* Response type for a CouchDB view query if no validation schemas are provided.
|
|
479
|
+
*/
|
|
480
|
+
const ViewQueryResponse = zod.z.object({
|
|
481
|
+
total_rows: zod.z.number().nonnegative().optional().describe("total rows in the view"),
|
|
482
|
+
offset: zod.z.number().nonnegative().optional().describe("the offset of the first row in this result set"),
|
|
483
|
+
error: zod.z.string().optional().describe("if something is wrong"),
|
|
484
|
+
rows: zod.z.array(ViewRow).optional().describe("the rows returned by the view"),
|
|
485
|
+
update_seq: zod.z.number().optional().describe("the update sequence of the database at the time of the query")
|
|
486
|
+
});
|
|
487
|
+
/**
|
|
488
|
+
* CouchDB _bulk_docs response schema
|
|
489
|
+
*/
|
|
490
|
+
const BulkSaveResponse = zod.z.array(zod.z.object({
|
|
491
|
+
ok: zod.z.boolean().nullish(),
|
|
492
|
+
id: zod.z.string().nullish(),
|
|
493
|
+
rev: zod.z.string().nullish(),
|
|
494
|
+
error: zod.z.string().nullish().describe("if an error occurred, one word reason, eg conflict"),
|
|
495
|
+
reason: zod.z.string().nullish().describe("a full error message")
|
|
496
|
+
}));
|
|
497
|
+
const CouchPutResponse = zod.z.object({
|
|
498
|
+
ok: zod.z.boolean().optional().describe("did the request succeed"),
|
|
499
|
+
error: zod.z.string().optional().describe("the error message, if did not succeed"),
|
|
500
|
+
statusCode: zod.z.number(),
|
|
501
|
+
id: zod.z.string().optional().describe("the couch doc id"),
|
|
502
|
+
rev: zod.z.string().optional().describe("the new rev of the doc")
|
|
503
|
+
});
|
|
504
|
+
const CouchDBInfo = zod.z.looseObject({
|
|
505
|
+
cluster: zod.z.object({
|
|
506
|
+
n: zod.z.number().describe("Replicas. The number of copies of every document.").optional(),
|
|
507
|
+
q: zod.z.number().describe("Shards. The number of range partitions.").optional(),
|
|
508
|
+
r: zod.z.number().describe("Read quorum. The number of consistent copies of a document that need to be read before a successful reply.").optional(),
|
|
509
|
+
w: zod.z.number().describe("Write quorum. The number of copies of a document that need to be written before a successful reply.").optional()
|
|
510
|
+
}).optional(),
|
|
511
|
+
compact_running: zod.z.boolean().describe("Set to true if the database compaction routine is operating on this database.").optional(),
|
|
512
|
+
db_name: zod.z.string().describe("The name of the database."),
|
|
513
|
+
disk_format_version: zod.z.number().describe("The version of the physical format used for the data when it is stored on disk.").optional(),
|
|
514
|
+
doc_count: zod.z.number().describe("A count of the documents in the specified database.").optional(),
|
|
515
|
+
doc_del_count: zod.z.number().describe("Number of deleted documents").optional(),
|
|
516
|
+
instance_start_time: zod.z.string().optional(),
|
|
517
|
+
purge_seq: zod.z.string().describe("An opaque string that describes the purge state of the database. Do not rely on this string for counting the number of purge operations.").optional(),
|
|
518
|
+
sizes: zod.z.object({
|
|
519
|
+
active: zod.z.number().describe("The size of live data inside the database, in bytes.").optional(),
|
|
520
|
+
external: zod.z.number().describe("The uncompressed size of database contents in bytes.").optional(),
|
|
521
|
+
file: zod.z.number().describe("The size of the database file on disk in bytes. Views indexes are not included in the calculation.").optional()
|
|
522
|
+
}).optional(),
|
|
523
|
+
update_seq: zod.z.string().or(zod.z.number()).describe("An opaque string that describes the state of the database. Do not rely on this string for counting the number of updates.").optional(),
|
|
524
|
+
props: zod.z.object({ partitioned: zod.z.boolean().describe("If present and true, this indicates that the database is partitioned.").optional() }).optional()
|
|
525
|
+
});
|
|
526
|
+
|
|
527
|
+
//#endregion
|
|
528
|
+
//#region impl/utils/parseRows.mts
|
|
529
|
+
async function parseRows(rows, options) {
|
|
530
|
+
if (!Array.isArray(rows)) throw new Error("invalid rows format");
|
|
531
|
+
const isFinalRow = (row) => row !== "skip";
|
|
532
|
+
return (await Promise.all(rows.map(async (row) => {
|
|
533
|
+
try {
|
|
534
|
+
/**
|
|
535
|
+
* If no doc is present, parse without doc validation.
|
|
536
|
+
* This allows handling of not-found documents or rows without docs.
|
|
537
|
+
*/
|
|
538
|
+
if (row.doc == null) {
|
|
539
|
+
const parsedRow = zod.z.looseObject(ViewRow.shape).parse(row);
|
|
540
|
+
if (options.keySchema) {
|
|
541
|
+
const parsedKey$1 = await options.keySchema["~standard"].validate(row.key);
|
|
542
|
+
if (parsedKey$1.issues) throw parsedKey$1.issues;
|
|
543
|
+
parsedRow.key = parsedKey$1.value;
|
|
544
|
+
}
|
|
545
|
+
if (options.valueSchema) {
|
|
546
|
+
const parsedValue$1 = await options.valueSchema["~standard"].validate(row.value);
|
|
547
|
+
if (parsedValue$1.issues) throw parsedValue$1.issues;
|
|
548
|
+
parsedRow.value = parsedValue$1.value;
|
|
549
|
+
}
|
|
550
|
+
return parsedRow;
|
|
551
|
+
}
|
|
552
|
+
let parsedDoc = row.doc;
|
|
553
|
+
let parsedKey = row.key;
|
|
554
|
+
let parsedValue = row.value;
|
|
555
|
+
if (options.docSchema) {
|
|
556
|
+
const parsedDocRes = await options.docSchema["~standard"].validate(row.doc);
|
|
557
|
+
if (parsedDocRes.issues) if (options.onInvalidDoc === "skip") return "skip";
|
|
558
|
+
else throw parsedDocRes.issues;
|
|
559
|
+
else parsedDoc = parsedDocRes.value;
|
|
560
|
+
}
|
|
561
|
+
if (options.keySchema) {
|
|
562
|
+
const parsedKeyRes = await options.keySchema["~standard"].validate(row.key);
|
|
563
|
+
if (parsedKeyRes.issues) throw parsedKeyRes.issues;
|
|
564
|
+
else parsedKey = parsedKeyRes.value;
|
|
565
|
+
}
|
|
566
|
+
if (options.valueSchema) {
|
|
567
|
+
const parsedValueRes = await options.valueSchema["~standard"].validate(row.value);
|
|
568
|
+
if (parsedValueRes.issues) throw parsedValueRes.issues;
|
|
569
|
+
else parsedValue = parsedValueRes.value;
|
|
570
|
+
}
|
|
571
|
+
return {
|
|
572
|
+
...row,
|
|
573
|
+
doc: parsedDoc,
|
|
574
|
+
key: parsedKey,
|
|
575
|
+
value: parsedValue
|
|
576
|
+
};
|
|
577
|
+
} catch (e) {
|
|
578
|
+
if (options.onInvalidDoc === "skip") return "skip";
|
|
579
|
+
else throw e;
|
|
580
|
+
}
|
|
581
|
+
}))).filter(isFinalRow);
|
|
582
|
+
}
|
|
583
|
+
|
|
584
|
+
//#endregion
|
|
585
|
+
//#region impl/bulkGet.mts
|
|
586
|
+
/**
|
|
587
|
+
* Executes the bulk get operation against CouchDB.
|
|
588
|
+
*
|
|
589
|
+
* @param _config CouchDB configuration
|
|
590
|
+
* @param ids Array of document IDs to retrieve
|
|
591
|
+
* @param includeDocs Whether to include documents in the response
|
|
592
|
+
*
|
|
593
|
+
* @returns The raw response body from CouchDB
|
|
594
|
+
*
|
|
595
|
+
* @throws {RetryableError} When a retryable HTTP status code is encountered or no response is received.
|
|
596
|
+
* @throws {Error} When CouchDB returns a non-retryable error payload.
|
|
597
|
+
*/
|
|
598
|
+
async function executeBulkGet(_config, ids, includeDocs) {
|
|
599
|
+
const configParseResult = CouchConfig.safeParse(_config);
|
|
600
|
+
const logger = createLogger(_config);
|
|
601
|
+
logger.info(`Starting bulk get for ${ids.length} documents`);
|
|
602
|
+
if (!configParseResult.success) {
|
|
603
|
+
logger.error("Invalid configuration provided for bulk get", configParseResult.error);
|
|
604
|
+
throw configParseResult.error;
|
|
605
|
+
}
|
|
606
|
+
const config = configParseResult.data;
|
|
607
|
+
const url = `${config.couch}/_all_docs${includeDocs ? "?include_docs=true" : ""}`;
|
|
608
|
+
const payload = { keys: ids };
|
|
609
|
+
const mergedOpts = mergeNeedleOpts(config, {
|
|
610
|
+
json: true,
|
|
611
|
+
headers: { "Content-Type": "application/json" }
|
|
612
|
+
});
|
|
613
|
+
try {
|
|
614
|
+
const resp = await (0, needle.default)("post", url, payload, mergedOpts);
|
|
615
|
+
if (RetryableError.isRetryableStatusCode(resp.statusCode)) {
|
|
616
|
+
logger.warn(`Retryable status code received: ${resp.statusCode}`);
|
|
617
|
+
throw new RetryableError("retryable error during bulk get", resp.statusCode);
|
|
618
|
+
}
|
|
619
|
+
if (resp.statusCode !== 200) {
|
|
620
|
+
logger.error(`Unexpected status code: ${resp.statusCode}`);
|
|
621
|
+
throw new Error("could not fetch");
|
|
622
|
+
}
|
|
623
|
+
return resp.body;
|
|
624
|
+
} catch (err) {
|
|
625
|
+
logger.error("Network error during bulk get:", err);
|
|
626
|
+
RetryableError.handleNetworkError(err);
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
/**
|
|
630
|
+
* Bulk get documents by IDs with options.
|
|
631
|
+
*
|
|
632
|
+
* @template DocSchema - schema (StandardSchemaV1) used to validate each returned document, if provided.
|
|
633
|
+
*
|
|
634
|
+
* @param config - CouchDB configuration data that is validated before use.
|
|
635
|
+
* @param ids - Array of document IDs to retrieve.
|
|
636
|
+
* @param options - Options for bulk get operation, including whether to include documents and validation schema.
|
|
637
|
+
*
|
|
638
|
+
* @returns The bulk get response with rows optionally validated against the supplied document schema.
|
|
639
|
+
*
|
|
640
|
+
* @throws {RetryableError} When a retryable HTTP status code is encountered or no response is received.
|
|
641
|
+
* @throws {Error<StandardSchemaV1.FailureResult["issues"]>} When the configuration or validation schemas fail to parse.
|
|
642
|
+
* @throws {Error} When CouchDB returns a non-retryable error payload.
|
|
643
|
+
*/
|
|
644
|
+
async function _bulkGetWithOptions(config, ids, options = {}) {
|
|
645
|
+
const body = await executeBulkGet(config, ids, options.includeDocs ?? true);
|
|
646
|
+
if (!body) throw new RetryableError("no response", 503);
|
|
647
|
+
if (body.error) throw new Error(typeof body.reason === "string" ? body.reason : "could not fetch");
|
|
648
|
+
const docSchema = options.validate?.docSchema || CouchDoc;
|
|
649
|
+
const rows = await parseRows(body.rows, {
|
|
650
|
+
onInvalidDoc: options.validate?.onInvalidDoc,
|
|
651
|
+
docSchema
|
|
652
|
+
});
|
|
653
|
+
return {
|
|
654
|
+
...body,
|
|
655
|
+
rows
|
|
656
|
+
};
|
|
657
|
+
}
|
|
658
|
+
/**
|
|
659
|
+
* Bulk get documents by IDs.
|
|
660
|
+
*
|
|
661
|
+
* @remarks
|
|
662
|
+
* By default, documents are included in the response. To exclude documents, set `includeDocs` to `false`.
|
|
663
|
+
* When `includeDocs` is `true`, you can provide a schema (StandardSchemaV1) to validate the documents.
|
|
664
|
+
* When a schema is provided, you can specify how to handle invalid documents using `onInvalidDoc` option.
|
|
665
|
+
* `onInvalidDoc` can be set to `'throw'` (default) to throw an error on invalid documents, or `'skip'` to omit them from the results.
|
|
666
|
+
*
|
|
667
|
+
* @template DocSchema - schema (StandardSchemaV1) used to validate each returned document, if provided.
|
|
668
|
+
*
|
|
669
|
+
* @param config - CouchDB configuration data that is validated before use.
|
|
670
|
+
* @param ids - Array of document IDs to retrieve.
|
|
671
|
+
* @param options - Options for bulk get operation, including whether to include documents and validation schema.
|
|
672
|
+
*
|
|
673
|
+
* @returns The bulk get response with rows optionally validated against the supplied document schema.
|
|
674
|
+
*
|
|
675
|
+
* @throws {RetryableError} When a retryable HTTP status code is encountered or no response is received.
|
|
676
|
+
* @throws {Error<StandardSchemaV1.FailureResult["issues"]>} When the configuration or validation schemas fail to parse.
|
|
677
|
+
* @throws {Error} When CouchDB returns a non-retryable error payload.
|
|
678
|
+
*/
|
|
679
|
+
async function bulkGet(config, ids, options = {}) {
|
|
680
|
+
return _bulkGetWithOptions(config, ids, {
|
|
681
|
+
includeDocs: options.includeDocs,
|
|
682
|
+
validate: options?.validate
|
|
683
|
+
});
|
|
684
|
+
}
|
|
685
|
+
/**
|
|
686
|
+
* Bulk get documents by IDs and return a dictionary of found and not found documents.
|
|
687
|
+
*
|
|
688
|
+
* @template DocSchema - Schema used to validate each returned document, if provided. Note: if a document is found and it fails validation this will throw a Error<StandardSchemaV1.FailureResult["issues"]>.
|
|
689
|
+
*
|
|
690
|
+
* @param config - CouchDB configuration data that is validated before use.
|
|
691
|
+
* @param ids - Array of document IDs to retrieve.
|
|
692
|
+
* @param options - Options for bulk get operation, including validation schema.
|
|
693
|
+
*
|
|
694
|
+
* @returns An object containing found documents and not found rows.
|
|
695
|
+
*
|
|
696
|
+
* @throws {RetryableError} When a retryable HTTP status code is encountered or no response is received.
|
|
697
|
+
* @throws {Error<StandardSchemaV1.FailureResult["issues"]>} When the configuration or validation schemas fail to parse.
|
|
698
|
+
* @throws {Error} When CouchDB returns a non-retryable error payload.
|
|
699
|
+
*/
|
|
700
|
+
async function bulkGetDictionary(config, ids, options) {
|
|
701
|
+
const response = await bulkGet(config, ids, {
|
|
702
|
+
includeDocs: true,
|
|
703
|
+
...options
|
|
704
|
+
});
|
|
705
|
+
const results = {
|
|
706
|
+
found: {},
|
|
707
|
+
notFound: {}
|
|
708
|
+
};
|
|
709
|
+
for (const row of response.rows ?? []) {
|
|
710
|
+
const key = typeof row.key === "string" ? row.key : row.id;
|
|
711
|
+
if (!key) continue;
|
|
712
|
+
if (row.error || !row.doc) {
|
|
713
|
+
results.notFound[key] = row;
|
|
714
|
+
continue;
|
|
715
|
+
}
|
|
716
|
+
const doc = row.doc;
|
|
717
|
+
const docId = typeof doc?._id === "string" ? doc._id : row.id;
|
|
718
|
+
if (!docId) {
|
|
719
|
+
results.notFound[key] = row;
|
|
720
|
+
continue;
|
|
721
|
+
}
|
|
722
|
+
results.found[docId] = doc;
|
|
723
|
+
}
|
|
724
|
+
return results;
|
|
725
|
+
}
|
|
726
|
+
|
|
727
|
+
//#endregion
|
|
728
|
+
//#region impl/get.mts
|
|
729
|
+
const ValidSchema = zod.z.custom((value) => {
|
|
730
|
+
return value !== null && typeof value === "object" && "~standard" in value;
|
|
731
|
+
}, { message: "docSchema must be a valid StandardSchemaV1 schema" });
|
|
732
|
+
const CouchGetOptions = zod.z.object({
|
|
733
|
+
rev: zod.z.string().optional().describe("the couch doc revision"),
|
|
734
|
+
validate: zod.z.object({ docSchema: ValidSchema.optional() }).optional().describe("optional document validation rules")
|
|
735
|
+
});
|
|
736
|
+
async function _getWithOptions(config, id, options) {
|
|
737
|
+
const parsedOptions = CouchGetOptions.parse({
|
|
738
|
+
rev: options.rev,
|
|
739
|
+
validate: options.validate
|
|
740
|
+
});
|
|
741
|
+
const logger = createLogger(config);
|
|
742
|
+
const rev = parsedOptions.rev;
|
|
743
|
+
const path = rev ? `${id}?rev=${rev}` : id;
|
|
744
|
+
const url = `${config.couch}/${path}`;
|
|
745
|
+
const requestOptions = mergeNeedleOpts(config, {
|
|
746
|
+
json: true,
|
|
747
|
+
headers: { "Content-Type": "application/json" }
|
|
748
|
+
});
|
|
749
|
+
logger.info(`Getting document with id: ${id}, rev ${rev ?? "latest"}`);
|
|
750
|
+
try {
|
|
751
|
+
const resp = await (0, needle.default)("get", url, null, requestOptions);
|
|
752
|
+
if (!resp) {
|
|
753
|
+
logger.error("No response received from get request");
|
|
754
|
+
throw new RetryableError("no response", 503);
|
|
755
|
+
}
|
|
756
|
+
const body = resp.body ?? null;
|
|
757
|
+
if (resp.statusCode === 404) {
|
|
758
|
+
if (config.throwOnGetNotFound) {
|
|
759
|
+
const reason = typeof body?.reason === "string" ? body.reason : "not_found";
|
|
760
|
+
logger.warn(`Document not found (throwing error): ${id}, rev ${rev ?? "latest"}`);
|
|
761
|
+
throw new NotFoundError(id, reason);
|
|
762
|
+
}
|
|
763
|
+
logger.debug(`Document not found (returning undefined): ${id}, rev ${rev ?? "latest"}`);
|
|
764
|
+
return null;
|
|
765
|
+
}
|
|
766
|
+
if (RetryableError.isRetryableStatusCode(resp.statusCode)) {
|
|
767
|
+
const reason = typeof body?.reason === "string" ? body.reason : "retryable error";
|
|
768
|
+
logger.warn(`Retryable status code received: ${resp.statusCode}`);
|
|
769
|
+
throw new RetryableError(reason, resp.statusCode);
|
|
770
|
+
}
|
|
771
|
+
if (resp.statusCode !== 200) {
|
|
772
|
+
const reason = typeof body?.reason === "string" ? body.reason : "failed";
|
|
773
|
+
logger.error(`Unexpected status code: ${resp.statusCode}`);
|
|
774
|
+
throw new Error(reason);
|
|
775
|
+
}
|
|
776
|
+
const typedDoc = await (parsedOptions.validate?.docSchema ?? CouchDoc)["~standard"].validate(body);
|
|
777
|
+
if (typedDoc.issues) throw typedDoc.issues;
|
|
778
|
+
logger.info(`Successfully retrieved document: ${id}, rev ${rev ?? "latest"}`);
|
|
779
|
+
return typedDoc.value;
|
|
780
|
+
} catch (err) {
|
|
781
|
+
logger.error("Error during get operation:", err);
|
|
782
|
+
RetryableError.handleNetworkError(err);
|
|
783
|
+
}
|
|
784
|
+
}
|
|
785
|
+
async function get(config, id, options) {
|
|
786
|
+
return _getWithOptions(config, id, options ?? {});
|
|
787
|
+
}
|
|
788
|
+
async function getAtRev(config, id, rev, options) {
|
|
789
|
+
return _getWithOptions(config, id, {
|
|
790
|
+
...options,
|
|
791
|
+
rev
|
|
792
|
+
});
|
|
793
|
+
}
|
|
794
|
+
|
|
795
|
+
//#endregion
|
|
796
|
+
//#region schema/couch/couch.input.schema.ts
|
|
797
|
+
const ViewOptions = zod.default.object({
|
|
798
|
+
descending: zod.default.boolean().optional().describe("sort results descending"),
|
|
799
|
+
endkey_docid: zod.default.string().optional().describe("stop returning records when this document ID is reached"),
|
|
800
|
+
endkey: zod.default.any().optional(),
|
|
801
|
+
group_level: zod.default.number().positive().optional().describe("group the results at this level"),
|
|
802
|
+
group: zod.default.boolean().optional().describe("group the results"),
|
|
803
|
+
include_docs: zod.default.boolean().optional().describe("join the id to the doc and return it"),
|
|
804
|
+
inclusive_end: zod.default.boolean().optional().describe("whether the endkey is included in the result, default true"),
|
|
805
|
+
key: zod.default.any().optional(),
|
|
806
|
+
keys: zod.default.array(zod.default.any()).optional(),
|
|
807
|
+
limit: zod.default.number().nonnegative().optional().describe("limit the results to this many rows"),
|
|
808
|
+
reduce: zod.default.boolean().optional().describe("reduce the results"),
|
|
809
|
+
skip: zod.default.number().nonnegative().optional().describe("skip this many rows"),
|
|
810
|
+
sorted: zod.default.boolean().optional().describe("sort returned rows, default true"),
|
|
811
|
+
stable: zod.default.boolean().optional().describe("ensure the view index is not updated during the query, default false"),
|
|
812
|
+
startkey: zod.default.any().optional(),
|
|
813
|
+
startkey_docid: zod.default.string().optional().describe("start returning records when this document ID is reached"),
|
|
814
|
+
update: zod.default.enum([
|
|
815
|
+
"true",
|
|
816
|
+
"false",
|
|
817
|
+
"lazy"
|
|
818
|
+
]).optional().describe("whether to update the view index before returning results, default true"),
|
|
819
|
+
update_seq: zod.default.boolean().optional().describe("include the update sequence in the result")
|
|
820
|
+
}).describe("base options for a CouchDB view query");
|
|
821
|
+
|
|
822
|
+
//#endregion
|
|
823
|
+
//#region impl/utils/queryString.mts
|
|
824
|
+
const KEYS_TO_QUOTE = [
|
|
825
|
+
"endkey_docid",
|
|
826
|
+
"endkey",
|
|
827
|
+
"key",
|
|
828
|
+
"keys",
|
|
829
|
+
"startkey",
|
|
830
|
+
"startkey_docid",
|
|
831
|
+
"update"
|
|
832
|
+
];
|
|
833
|
+
/**
|
|
834
|
+
* Serialize CouchDB view options into a URL-safe query string, quoting values CouchDB expects as JSON.
|
|
835
|
+
* @param options The view options to serialize
|
|
836
|
+
* @param params The list of option keys that require JSON quoting
|
|
837
|
+
* @returns The serialized query string
|
|
838
|
+
*/
|
|
839
|
+
function queryString(options = {}) {
|
|
840
|
+
const searchParams = new URLSearchParams();
|
|
841
|
+
const parsedOptions = ViewOptions.parse(options);
|
|
842
|
+
Object.entries(parsedOptions).forEach(([key, rawValue]) => {
|
|
843
|
+
let value = rawValue;
|
|
844
|
+
if (KEYS_TO_QUOTE.includes(key)) {
|
|
845
|
+
if (typeof value === "string") value = `"${value}"`;
|
|
846
|
+
if (Array.isArray(value)) value = "[" + value.map((i) => {
|
|
847
|
+
if (i === null) return "null";
|
|
848
|
+
if (typeof i === "string") return `"${i}"`;
|
|
849
|
+
if (typeof i === "object" && Object.keys(i).length === 0) return "{}";
|
|
850
|
+
if (typeof i === "object") return JSON.stringify(i);
|
|
851
|
+
return i;
|
|
852
|
+
}).join(",") + "]";
|
|
853
|
+
}
|
|
854
|
+
searchParams.set(key, String(value));
|
|
855
|
+
});
|
|
856
|
+
return searchParams.toString();
|
|
857
|
+
}
|
|
858
|
+
|
|
859
|
+
//#endregion
|
|
860
|
+
//#region impl/stream.mts
|
|
861
|
+
/**
|
|
862
|
+
* Execute a CouchDB view query and stream rows as they are received.
|
|
863
|
+
* @param rawConfig CouchDB configuration
|
|
864
|
+
* @param view The CouchDB view to query
|
|
865
|
+
* @param options Query options
|
|
866
|
+
* @param onRow Callback invoked for each row received
|
|
867
|
+
*/
|
|
868
|
+
async function queryStream(rawConfig, view, options, onRow) {
|
|
869
|
+
return new Promise((resolve, reject) => {
|
|
870
|
+
const config = CouchConfig.parse(rawConfig);
|
|
871
|
+
const logger = createLogger(config);
|
|
872
|
+
logger.info(`Starting view query stream: ${view}`);
|
|
873
|
+
logger.debug("Query options:", options);
|
|
874
|
+
const queryOptions = options ?? {};
|
|
875
|
+
let method = "GET";
|
|
876
|
+
let payload = null;
|
|
877
|
+
let qs = queryString(queryOptions);
|
|
878
|
+
logger.debug("Generated query string:", qs);
|
|
879
|
+
if (typeof queryOptions.keys !== "undefined") {
|
|
880
|
+
const MAX_URL_LENGTH = 2e3;
|
|
881
|
+
const keysAsString = `keys=${encodeURIComponent(JSON.stringify(queryOptions.keys))}`;
|
|
882
|
+
if (keysAsString.length + qs.length + 1 <= MAX_URL_LENGTH) qs += (qs.length > 0 ? "&" : "") + keysAsString;
|
|
883
|
+
else {
|
|
884
|
+
method = "POST";
|
|
885
|
+
payload = { keys: queryOptions.keys };
|
|
886
|
+
}
|
|
887
|
+
}
|
|
888
|
+
const url = `${config.couch}/${view}?${qs}`;
|
|
889
|
+
const mergedOpts = mergeNeedleOpts(config, {
|
|
890
|
+
json: true,
|
|
891
|
+
headers: { "Content-Type": "application/json" },
|
|
892
|
+
parse_response: false
|
|
893
|
+
});
|
|
894
|
+
const parserPipeline = stream_chain.default.chain([
|
|
895
|
+
new stream_json_Parser_js.default(),
|
|
896
|
+
new stream_json_filters_Pick_js.default({ filter: "rows" }),
|
|
897
|
+
new stream_json_streamers_StreamArray_js.default()
|
|
898
|
+
]);
|
|
899
|
+
let rowCount = 0;
|
|
900
|
+
let settled = false;
|
|
901
|
+
const settleReject = (err) => {
|
|
902
|
+
if (settled) return;
|
|
903
|
+
settled = true;
|
|
904
|
+
reject(err);
|
|
905
|
+
};
|
|
906
|
+
const settleResolve = () => {
|
|
907
|
+
if (settled) return;
|
|
908
|
+
settled = true;
|
|
909
|
+
resolve();
|
|
910
|
+
};
|
|
911
|
+
let request = null;
|
|
912
|
+
parserPipeline.on("data", (chunk) => {
|
|
913
|
+
try {
|
|
914
|
+
rowCount++;
|
|
915
|
+
onRow(chunk.value);
|
|
916
|
+
} catch (callbackErr) {
|
|
917
|
+
const error = callbackErr instanceof Error ? callbackErr : new Error(String(callbackErr));
|
|
918
|
+
parserPipeline.destroy(error);
|
|
919
|
+
settleReject(error);
|
|
920
|
+
}
|
|
921
|
+
});
|
|
922
|
+
parserPipeline.on("error", (err) => {
|
|
923
|
+
logger.error("Stream parsing error:", err);
|
|
924
|
+
parserPipeline.destroy();
|
|
925
|
+
settleReject(new Error(`Stream parsing error: ${err.message}`, { cause: err }));
|
|
926
|
+
});
|
|
927
|
+
parserPipeline.on("end", () => {
|
|
928
|
+
logger.info(`Stream completed, processed ${rowCount} rows`);
|
|
929
|
+
settleResolve();
|
|
930
|
+
});
|
|
931
|
+
request = method === "GET" ? needle.default.get(url, mergedOpts) : needle.default.post(url, payload, mergedOpts);
|
|
932
|
+
request.on("response", (response) => {
|
|
933
|
+
logger.debug(`Received response with status code: ${response.statusCode}`);
|
|
934
|
+
if (RetryableError.isRetryableStatusCode(response.statusCode)) {
|
|
935
|
+
logger.warn(`Retryable status code received: ${response.statusCode}`);
|
|
936
|
+
settleReject(new RetryableError("retryable error during stream query", response.statusCode));
|
|
937
|
+
request.destroy();
|
|
938
|
+
}
|
|
939
|
+
});
|
|
940
|
+
request.on("error", (err) => {
|
|
941
|
+
logger.error("Network error during stream query:", err);
|
|
942
|
+
parserPipeline.destroy(err);
|
|
943
|
+
try {
|
|
944
|
+
RetryableError.handleNetworkError(err);
|
|
945
|
+
} catch (retryErr) {
|
|
946
|
+
settleReject(retryErr);
|
|
947
|
+
return;
|
|
948
|
+
} finally {
|
|
949
|
+
settleReject(err);
|
|
950
|
+
}
|
|
951
|
+
});
|
|
952
|
+
request.pipe(parserPipeline);
|
|
953
|
+
});
|
|
954
|
+
}
|
|
955
|
+
|
|
956
|
+
//#endregion
|
|
957
|
+
//#region impl/put.mts
|
|
958
|
+
const put = async (configInput, doc) => {
|
|
959
|
+
const config = CouchConfig.parse(configInput);
|
|
960
|
+
const logger = createLogger(config);
|
|
961
|
+
const url = `${config.couch}/${doc._id}`;
|
|
962
|
+
const body = doc;
|
|
963
|
+
const mergedOpts = mergeNeedleOpts(config, {
|
|
964
|
+
json: true,
|
|
965
|
+
headers: { "Content-Type": "application/json" }
|
|
966
|
+
});
|
|
967
|
+
logger.info(`Putting document with id: ${doc._id}`);
|
|
968
|
+
let resp;
|
|
969
|
+
try {
|
|
970
|
+
resp = await (0, needle.default)("put", url, body, mergedOpts);
|
|
971
|
+
} catch (err) {
|
|
972
|
+
logger.error("Error during put operation:", err);
|
|
973
|
+
RetryableError.handleNetworkError(err);
|
|
974
|
+
}
|
|
975
|
+
if (!resp) {
|
|
976
|
+
logger.error("No response received from put request");
|
|
977
|
+
throw new RetryableError("no response", 503);
|
|
978
|
+
}
|
|
979
|
+
const result = resp?.body || {};
|
|
980
|
+
result.statusCode = resp.statusCode;
|
|
981
|
+
if (resp.statusCode === 409) {
|
|
982
|
+
logger.warn(`Conflict detected for document: ${doc._id}`);
|
|
983
|
+
result.ok = false;
|
|
984
|
+
result.error = "conflict";
|
|
985
|
+
return CouchPutResponse.parse(result);
|
|
986
|
+
}
|
|
987
|
+
if (RetryableError.isRetryableStatusCode(resp.statusCode)) {
|
|
988
|
+
logger.warn(`Retryable status code received: ${resp.statusCode}`);
|
|
989
|
+
throw new RetryableError(result.reason || "retryable error", resp.statusCode);
|
|
990
|
+
}
|
|
991
|
+
logger.info(`Successfully saved document: ${doc._id}`);
|
|
992
|
+
return CouchPutResponse.parse(result);
|
|
993
|
+
};
|
|
994
|
+
|
|
995
|
+
//#endregion
|
|
996
|
+
//#region impl/patch.mts
|
|
997
|
+
const PatchProperties = zod.z.looseObject({ _rev: zod.z.string("_rev is required for patch operations") }).describe("Patch payload with _rev");
|
|
998
|
+
/**
|
|
999
|
+
* Patch a CouchDB document by merging provided properties.
|
|
1000
|
+
* Validates that the _rev matches before applying the patch.
|
|
1001
|
+
*
|
|
1002
|
+
* @param configInput - CouchDB configuration
|
|
1003
|
+
* @param id - Document ID to patch
|
|
1004
|
+
* @param _properties - Properties to merge into the document (must include _rev)
|
|
1005
|
+
* @returns The result of the put operation
|
|
1006
|
+
*
|
|
1007
|
+
* @throws Error if the _rev does not match or other errors occur
|
|
1008
|
+
*/
|
|
1009
|
+
const patch = async (configInput, id, _properties) => {
|
|
1010
|
+
const config = CouchConfig.parse(configInput);
|
|
1011
|
+
const properties = PatchProperties.parse(_properties);
|
|
1012
|
+
const logger = createLogger(configInput);
|
|
1013
|
+
logger.info(`Starting patch operation for document ${id}`);
|
|
1014
|
+
logger.debug("Patch properties:", properties);
|
|
1015
|
+
const doc = await get(config, id);
|
|
1016
|
+
if (doc?._rev !== properties._rev) return {
|
|
1017
|
+
statusCode: 409,
|
|
1018
|
+
ok: false,
|
|
1019
|
+
error: "conflict"
|
|
1020
|
+
};
|
|
1021
|
+
const updatedDoc = {
|
|
1022
|
+
...doc,
|
|
1023
|
+
...properties
|
|
1024
|
+
};
|
|
1025
|
+
logger.debug("Merged document:", updatedDoc);
|
|
1026
|
+
const result = await put(config, updatedDoc);
|
|
1027
|
+
logger.info(`Successfully patched document ${id}, rev: ${result.rev}`);
|
|
1028
|
+
return result;
|
|
1029
|
+
};
|
|
1030
|
+
/**
|
|
1031
|
+
* Patch a CouchDB document by merging provided properties.
|
|
1032
|
+
* This function will retry on conflicts using an exponential backoff strategy.
|
|
1033
|
+
*
|
|
1034
|
+
* @remarks patchDangerously can clobber data. It will retry even if a conflict happens. There are some use cases for this, but you have been warned, hence the name.
|
|
1035
|
+
*
|
|
1036
|
+
* @param configInput - CouchDB configuration
|
|
1037
|
+
* @param id - Document ID to patch
|
|
1038
|
+
* @param properties - Properties to merge into the document
|
|
1039
|
+
* @returns The result of the put operation or an error if max retries are exceeded
|
|
1040
|
+
*
|
|
1041
|
+
* @throws Error if max retries are exceeded or other errors occur
|
|
1042
|
+
*/
|
|
1043
|
+
const patchDangerously = async (configInput, id, properties) => {
|
|
1044
|
+
const config = CouchConfig.parse(configInput);
|
|
1045
|
+
const logger = createLogger(config);
|
|
1046
|
+
const maxRetries = config.maxRetries || 5;
|
|
1047
|
+
let delay = config.initialDelay || 1e3;
|
|
1048
|
+
let attempts = 0;
|
|
1049
|
+
logger.info(`Starting patch operation for document ${id}`);
|
|
1050
|
+
logger.debug("Patch properties:", properties);
|
|
1051
|
+
while (attempts <= maxRetries) {
|
|
1052
|
+
logger.debug(`Attempt ${attempts + 1} of ${maxRetries + 1}`);
|
|
1053
|
+
try {
|
|
1054
|
+
const doc = await get(config, id);
|
|
1055
|
+
if (!doc) {
|
|
1056
|
+
logger.warn(`Document ${id} not found`);
|
|
1057
|
+
return {
|
|
1058
|
+
ok: false,
|
|
1059
|
+
statusCode: 404,
|
|
1060
|
+
error: "not_found"
|
|
1061
|
+
};
|
|
1062
|
+
}
|
|
1063
|
+
const updatedDoc = {
|
|
1064
|
+
...doc,
|
|
1065
|
+
...properties
|
|
1066
|
+
};
|
|
1067
|
+
logger.debug("Merged document:", updatedDoc);
|
|
1068
|
+
const result = await put(config, updatedDoc);
|
|
1069
|
+
if (result.ok) {
|
|
1070
|
+
logger.info(`Successfully patched document ${id}, rev: ${result.rev}`);
|
|
1071
|
+
return result;
|
|
1072
|
+
}
|
|
1073
|
+
attempts++;
|
|
1074
|
+
if (attempts > maxRetries) {
|
|
1075
|
+
logger.error(`Failed to patch ${id} after ${maxRetries} attempts`);
|
|
1076
|
+
throw new Error(`Failed to patch after ${maxRetries} attempts`);
|
|
1077
|
+
}
|
|
1078
|
+
logger.warn(`Conflict detected for ${id}, retrying (attempt ${attempts})`);
|
|
1079
|
+
await (0, node_timers_promises.setTimeout)(delay);
|
|
1080
|
+
delay *= config.backoffFactor || 2;
|
|
1081
|
+
logger.debug(`Next retry delay: ${delay}ms`);
|
|
1082
|
+
} catch (err) {
|
|
1083
|
+
if (typeof err === "object" && err !== null && "message" in err && err.message === "not_found") {
|
|
1084
|
+
logger.warn(`Document ${id} not found during patch operation`);
|
|
1085
|
+
return {
|
|
1086
|
+
ok: false,
|
|
1087
|
+
statusCode: 404,
|
|
1088
|
+
error: "not_found"
|
|
1089
|
+
};
|
|
1090
|
+
}
|
|
1091
|
+
attempts++;
|
|
1092
|
+
if (attempts > maxRetries) {
|
|
1093
|
+
const error = `Failed to patch after ${maxRetries} attempts: ${err}`;
|
|
1094
|
+
logger.error(error);
|
|
1095
|
+
return {
|
|
1096
|
+
ok: false,
|
|
1097
|
+
statusCode: 500,
|
|
1098
|
+
error
|
|
1099
|
+
};
|
|
1100
|
+
}
|
|
1101
|
+
logger.warn(`Error during patch attempt ${attempts}: ${err}`);
|
|
1102
|
+
await (0, node_timers_promises.setTimeout)(delay);
|
|
1103
|
+
logger.debug(`Retrying after ${delay}ms`);
|
|
1104
|
+
}
|
|
1105
|
+
}
|
|
1106
|
+
};
|
|
1107
|
+
|
|
1108
|
+
//#endregion
|
|
1109
|
+
//#region impl/query.mts
|
|
1110
|
+
/**
|
|
1111
|
+
* Executes a CouchDB view query with optional schema validation and automatic handling
|
|
1112
|
+
* of HTTP method selection, query string construction, and retryable errors.
|
|
1113
|
+
*
|
|
1114
|
+
* @remarks
|
|
1115
|
+
* When using the validation feature, each row in the response will be validated against the provided
|
|
1116
|
+
* Types will be inferred from the StandardSchemaV1 supplied in the `options.validate` object.
|
|
1117
|
+
*
|
|
1118
|
+
* @template DocSchema - StandardSchemaV1 used to validate each returned `doc`, if provided.
|
|
1119
|
+
* @template KeySchema - StandardSchemaV1 used to validate each row `key`, if provided.
|
|
1120
|
+
* @template ValueSchema - StandardSchemaV1 used to validate each row `value`, if provided.
|
|
1121
|
+
*
|
|
1122
|
+
* @param _config - CouchDB configuration data that is validated before use.
|
|
1123
|
+
* @param view - Fully qualified design document and view identifier (e.g., `_design/foo/_view/bar`).
|
|
1124
|
+
* @param options - CouchDB view options, including optional validation schemas.
|
|
1125
|
+
*
|
|
1126
|
+
* @returns The parsed view response with rows validated against the supplied schemas.
|
|
1127
|
+
*
|
|
1128
|
+
* @throws {RetryableError} When a retryable HTTP status code is encountered or no response is received.
|
|
1129
|
+
* @throws {Error<Array<StandardSchemaV1.Issue>>} When the configuration or validation schemas fail to parse.
|
|
1130
|
+
* @throws {Error} When CouchDB returns a non-retryable error payload.
|
|
1131
|
+
*/
|
|
1132
|
+
async function query(_config, view, options = {}) {
|
|
1133
|
+
const configParseResult = CouchConfig.safeParse(_config);
|
|
1134
|
+
const logger = createLogger(_config);
|
|
1135
|
+
logger.info(`Starting view query: ${view}`);
|
|
1136
|
+
logger.debug("Query options:", ViewOptions.parse(options || {}));
|
|
1137
|
+
if (!configParseResult.success) {
|
|
1138
|
+
logger.error(`Invalid configuration provided: ${zod.z.prettifyError(configParseResult.error)}`);
|
|
1139
|
+
throw configParseResult.error;
|
|
1140
|
+
}
|
|
1141
|
+
const config = configParseResult.data;
|
|
1142
|
+
let qs = queryString(options);
|
|
1143
|
+
let method = "get";
|
|
1144
|
+
let payload = null;
|
|
1145
|
+
const mergedOpts = mergeNeedleOpts(config, {
|
|
1146
|
+
json: true,
|
|
1147
|
+
headers: { "Content-Type": "application/json" }
|
|
1148
|
+
});
|
|
1149
|
+
if (typeof options.keys !== "undefined") {
|
|
1150
|
+
const MAX_URL_LENGTH = 2e3;
|
|
1151
|
+
const _options = structuredClone(options);
|
|
1152
|
+
delete _options.keys;
|
|
1153
|
+
qs = queryString(_options);
|
|
1154
|
+
const keysAsString = `keys=${JSON.stringify(options.keys)}`;
|
|
1155
|
+
if (keysAsString.length + qs.length + 1 <= MAX_URL_LENGTH) {
|
|
1156
|
+
method = "get";
|
|
1157
|
+
if (qs.length > 0) qs += "&";
|
|
1158
|
+
else qs = "";
|
|
1159
|
+
qs += keysAsString;
|
|
1160
|
+
} else {
|
|
1161
|
+
method = "post";
|
|
1162
|
+
payload = { keys: options.keys };
|
|
1163
|
+
}
|
|
1164
|
+
}
|
|
1165
|
+
logger.debug("Generated query string:", qs);
|
|
1166
|
+
const url = `${config.couch}/${view}?${qs}`;
|
|
1167
|
+
let results;
|
|
1168
|
+
try {
|
|
1169
|
+
logger.debug(`Sending ${method} request to: ${url}`);
|
|
1170
|
+
results = method === "get" ? await (0, needle.default)("get", url, mergedOpts) : await (0, needle.default)("post", url, payload, mergedOpts);
|
|
1171
|
+
} catch (err) {
|
|
1172
|
+
logger.error("Network error during query:", err);
|
|
1173
|
+
RetryableError.handleNetworkError(err);
|
|
1174
|
+
}
|
|
1175
|
+
if (!results) {
|
|
1176
|
+
logger.error("No response received from query request");
|
|
1177
|
+
throw new RetryableError("no response", 503);
|
|
1178
|
+
}
|
|
1179
|
+
const body = results.body;
|
|
1180
|
+
if (RetryableError.isRetryableStatusCode(results.statusCode)) {
|
|
1181
|
+
logger.warn(`Retryable status code received: ${results.statusCode}`);
|
|
1182
|
+
throw new RetryableError(body.error || "retryable error during query", results.statusCode);
|
|
1183
|
+
}
|
|
1184
|
+
if (body.error) {
|
|
1185
|
+
logger.error(`Query error: ${JSON.stringify(body)}`);
|
|
1186
|
+
throw new Error(`CouchDB query error: ${body.error} - ${body.reason || ""}`);
|
|
1187
|
+
}
|
|
1188
|
+
if (options.validate && body.rows) body.rows = await parseRows(body.rows, options.validate);
|
|
1189
|
+
logger.info(`Successfully executed view query: ${view}`);
|
|
1190
|
+
logger.debug("Query response:", body);
|
|
1191
|
+
return body;
|
|
1192
|
+
}
|
|
1193
|
+
|
|
1194
|
+
//#endregion
|
|
1195
|
+
//#region impl/utils/trackedEmitter.mts
|
|
1196
|
+
const setupEmitter = (config) => {
|
|
1197
|
+
if (!config["~emitter"]) return { emit: async () => {} };
|
|
1198
|
+
return config["~emitter"];
|
|
1199
|
+
};
|
|
1200
|
+
|
|
1201
|
+
//#endregion
|
|
1202
|
+
//#region impl/utils/transactionErrors.mts
|
|
1203
|
+
var TransactionSetupError = class extends Error {
|
|
1204
|
+
details;
|
|
1205
|
+
constructor(message, details = {}) {
|
|
1206
|
+
super(message);
|
|
1207
|
+
this.name = "TransactionSetupError";
|
|
1208
|
+
this.details = details;
|
|
1209
|
+
}
|
|
1210
|
+
};
|
|
1211
|
+
var TransactionVersionConflictError = class extends Error {
|
|
1212
|
+
conflictingIds;
|
|
1213
|
+
constructor(conflictingIds) {
|
|
1214
|
+
super(`Revision mismatch for documents: ${conflictingIds.join(", ")}`);
|
|
1215
|
+
this.name = "TransactionVersionConflictError";
|
|
1216
|
+
this.conflictingIds = conflictingIds;
|
|
1217
|
+
}
|
|
1218
|
+
};
|
|
1219
|
+
var TransactionBulkOperationError = class extends Error {
|
|
1220
|
+
failedDocs;
|
|
1221
|
+
constructor(failedDocs) {
|
|
1222
|
+
super(`Failed to save documents: ${failedDocs.map((d) => d.id).join(", ")}`);
|
|
1223
|
+
this.name = "TransactionBulkOperationError";
|
|
1224
|
+
this.failedDocs = failedDocs;
|
|
1225
|
+
}
|
|
1226
|
+
};
|
|
1227
|
+
var TransactionRollbackError = class extends Error {
|
|
1228
|
+
originalError;
|
|
1229
|
+
rollbackResults;
|
|
1230
|
+
constructor(message, originalError, rollbackResults) {
|
|
1231
|
+
super(message);
|
|
1232
|
+
this.name = "TransactionRollbackError";
|
|
1233
|
+
this.originalError = originalError;
|
|
1234
|
+
this.rollbackResults = rollbackResults;
|
|
1235
|
+
}
|
|
1236
|
+
};
|
|
1237
|
+
|
|
1238
|
+
//#endregion
|
|
1239
|
+
//#region impl/bulkSave.mts
|
|
1240
|
+
/**
|
|
1241
|
+
* Bulk saves documents to CouchDB using the _bulk_docs endpoint.
|
|
1242
|
+
*
|
|
1243
|
+
* @see
|
|
1244
|
+
* https://docs.couchdb.org/en/stable/api/database/bulk-api.html#db-bulk-docs
|
|
1245
|
+
*
|
|
1246
|
+
* @param {CouchConfigInput} config - The CouchDB configuration.
|
|
1247
|
+
* @param {CouchDocInput[]} docs - An array of documents to save.
|
|
1248
|
+
* @returns {Promise<BulkSaveResponse>} - The response from CouchDB after the bulk save operation.
|
|
1249
|
+
*
|
|
1250
|
+
* @throws {RetryableError} When a retryable HTTP status code is encountered or no response is received.
|
|
1251
|
+
* @throws {Error} When CouchDB returns a non-retryable error payload.
|
|
1252
|
+
*/
|
|
1253
|
+
const bulkSave = async (config, docs) => {
|
|
1254
|
+
const logger = createLogger(config);
|
|
1255
|
+
if (docs == null || !docs.length) {
|
|
1256
|
+
logger.error("bulkSave called with no docs");
|
|
1257
|
+
throw new Error("no docs provided");
|
|
1258
|
+
}
|
|
1259
|
+
logger.info(`Starting bulk save of ${docs.length} documents`);
|
|
1260
|
+
const url = `${config.couch}/_bulk_docs`;
|
|
1261
|
+
const body = { docs };
|
|
1262
|
+
const mergedOpts = mergeNeedleOpts(config, {
|
|
1263
|
+
json: true,
|
|
1264
|
+
headers: { "Content-Type": "application/json" }
|
|
1265
|
+
});
|
|
1266
|
+
let resp;
|
|
1267
|
+
try {
|
|
1268
|
+
resp = await (0, needle.default)("post", url, body, mergedOpts);
|
|
1269
|
+
} catch (err) {
|
|
1270
|
+
logger.error("Network error during bulk save:", err);
|
|
1271
|
+
RetryableError.handleNetworkError(err);
|
|
1272
|
+
}
|
|
1273
|
+
if (!resp) {
|
|
1274
|
+
logger.error("No response received from bulk save request");
|
|
1275
|
+
throw new RetryableError("no response", 503);
|
|
1276
|
+
}
|
|
1277
|
+
if (RetryableError.isRetryableStatusCode(resp.statusCode)) {
|
|
1278
|
+
logger.warn(`Retryable status code received: ${resp.statusCode}`);
|
|
1279
|
+
throw new RetryableError("retryable error during bulk save", resp.statusCode);
|
|
1280
|
+
}
|
|
1281
|
+
if (resp.statusCode !== 201) {
|
|
1282
|
+
logger.error(`Unexpected status code: ${resp.statusCode}`);
|
|
1283
|
+
throw new Error("could not save");
|
|
1284
|
+
}
|
|
1285
|
+
const results = resp?.body || [];
|
|
1286
|
+
return BulkSaveResponse.parse(results);
|
|
1287
|
+
};
|
|
1288
|
+
/**
|
|
1289
|
+
* Performs a bulk save of documents within a transaction context.
|
|
1290
|
+
*
|
|
1291
|
+
* @remarks
|
|
1292
|
+
* This operation ensures that either all documents are saved successfully, or none are, maintaining data consistency.
|
|
1293
|
+
* If any document fails to save, the operation will attempt to roll back all changes.
|
|
1294
|
+
*
|
|
1295
|
+
* The transactionId has to be unique for the lifetime of the app. It is used to prevent two processes from executing the same transaction. It is up to you to craft a transactionId that uniquely represents this transaction, and that also is the same if another process tries to generate it.
|
|
1296
|
+
*
|
|
1297
|
+
* Exceptions to handle:
|
|
1298
|
+
*
|
|
1299
|
+
* `TransactionSetupError` Thrown if the transaction document cannot be created. Usually because it already exists
|
|
1300
|
+
* `TransactionVersionConflictError` Thrown if there are version conflicts with existing documents.
|
|
1301
|
+
* `TransactionBulkOperationError` Thrown if the bulk save operation fails for some documents.
|
|
1302
|
+
* `TransactionRollbackError` Thrown if the rollback operation fails after a transaction failure.
|
|
1303
|
+
*
|
|
1304
|
+
* @example
|
|
1305
|
+
* ```ts
|
|
1306
|
+
* const docsToSave = [
|
|
1307
|
+
* { _id: 'doc1', foo: 'bar' },
|
|
1308
|
+
* { _id: 'doc2', foo: 'baz' }
|
|
1309
|
+
* ];
|
|
1310
|
+
*
|
|
1311
|
+
* try {
|
|
1312
|
+
* const results = await bulkSaveTransaction(config, 'unique-transaction-id', docsToSave);
|
|
1313
|
+
* console.log('Bulk save successful:', results);
|
|
1314
|
+
* } catch (error) {
|
|
1315
|
+
* console.error('Bulk save transaction failed:', error);
|
|
1316
|
+
* }
|
|
1317
|
+
* ```
|
|
1318
|
+
*
|
|
1319
|
+
* @param {CouchConfigInput} config - The CouchDB configuration.
|
|
1320
|
+
* @param {string} transactionId - A unique identifier for the transaction.
|
|
1321
|
+
* @param {CouchDocInput[]} docs - An array of documents to save.
|
|
1322
|
+
* @returns {Promise<BulkSaveResponse>} - The transaction save results.
|
|
1323
|
+
* @throws {TransactionSetupError} When the transaction document cannot be created.
|
|
1324
|
+
* @throws {TransactionVersionConflictError} When there are version conflicts with existing documents.
|
|
1325
|
+
* @throws {TransactionBulkOperationError} When the bulk save operation fails for some documents.
|
|
1326
|
+
* @throws {TransactionRollbackError} When the rollback operation fails after a transaction failure.
|
|
1327
|
+
*/
|
|
1328
|
+
const bulkSaveTransaction = async (config, transactionId, docs) => {
|
|
1329
|
+
const emitter = setupEmitter(config);
|
|
1330
|
+
const logger = createLogger(config);
|
|
1331
|
+
const retryOptions = {
|
|
1332
|
+
maxRetries: config.maxRetries ?? 10,
|
|
1333
|
+
initialDelay: config.initialDelay ?? 1e3,
|
|
1334
|
+
backoffFactor: config.backoffFactor ?? 2
|
|
1335
|
+
};
|
|
1336
|
+
const _put = config.bindWithRetry ? withRetry(put.bind(null, config), retryOptions) : put.bind(null, config);
|
|
1337
|
+
logger.info(`Starting bulk save transaction ${transactionId} for ${docs.length} documents`);
|
|
1338
|
+
const transactionDoc = {
|
|
1339
|
+
_id: `txn:${transactionId}`,
|
|
1340
|
+
_rev: null,
|
|
1341
|
+
type: "transaction",
|
|
1342
|
+
status: "pending",
|
|
1343
|
+
changes: docs,
|
|
1344
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
1345
|
+
};
|
|
1346
|
+
let transactionResponse = await _put(transactionDoc);
|
|
1347
|
+
logger.debug("Transaction document created:", transactionDoc, transactionResponse);
|
|
1348
|
+
await emitter.emit("transaction-created", {
|
|
1349
|
+
transactionResponse,
|
|
1350
|
+
txnDoc: transactionDoc
|
|
1351
|
+
});
|
|
1352
|
+
if (transactionResponse.error) throw new TransactionSetupError("Failed to create transaction document", {
|
|
1353
|
+
error: transactionResponse.error,
|
|
1354
|
+
response: transactionResponse
|
|
1355
|
+
});
|
|
1356
|
+
const existingDocs = await bulkGetDictionary(config, docs.map((d) => d._id));
|
|
1357
|
+
logger.debug("Fetched current revisions of documents:", existingDocs);
|
|
1358
|
+
await emitter.emit("transaction-revs-fetched", existingDocs);
|
|
1359
|
+
/** @type {string[]} */
|
|
1360
|
+
const revErrors = [];
|
|
1361
|
+
docs.forEach((d) => {
|
|
1362
|
+
if (!d._id) return;
|
|
1363
|
+
if (existingDocs.found[d._id] && existingDocs.found[d._id]._rev !== d._rev) revErrors.push(d._id);
|
|
1364
|
+
if (existingDocs.notFound[d._id] && d._rev) revErrors.push(d._id);
|
|
1365
|
+
});
|
|
1366
|
+
if (revErrors.length > 0) throw new TransactionVersionConflictError(revErrors);
|
|
1367
|
+
logger.debug("Checked document revisions:", existingDocs);
|
|
1368
|
+
await emitter.emit("transaction-revs-checked", existingDocs);
|
|
1369
|
+
const providedDocsById = {};
|
|
1370
|
+
docs.forEach((d) => {
|
|
1371
|
+
if (!d._id) return;
|
|
1372
|
+
providedDocsById[d._id] = d;
|
|
1373
|
+
});
|
|
1374
|
+
const newDocsToRollback = [];
|
|
1375
|
+
const potentialExistingDocsToRollback = [];
|
|
1376
|
+
const failedDocs = [];
|
|
1377
|
+
try {
|
|
1378
|
+
logger.info("Transaction started:", transactionDoc);
|
|
1379
|
+
await emitter.emit("transaction-started", transactionDoc);
|
|
1380
|
+
const results = await bulkSave(config, docs);
|
|
1381
|
+
logger.info("Transaction updates applied:", results);
|
|
1382
|
+
await emitter.emit("transaction-updates-applied", results);
|
|
1383
|
+
results.forEach((r) => {
|
|
1384
|
+
if (!r.id) return;
|
|
1385
|
+
if (!r.error) {
|
|
1386
|
+
if (existingDocs.notFound[r.id]) newDocsToRollback.push(r);
|
|
1387
|
+
if (existingDocs.found[r.id]) potentialExistingDocsToRollback.push(r);
|
|
1388
|
+
} else failedDocs.push(r);
|
|
1389
|
+
});
|
|
1390
|
+
if (failedDocs.length > 0) throw new TransactionBulkOperationError(failedDocs);
|
|
1391
|
+
transactionDoc.status = "completed";
|
|
1392
|
+
transactionDoc._rev = transactionResponse.rev;
|
|
1393
|
+
transactionResponse = await _put(transactionDoc);
|
|
1394
|
+
logger.info("Transaction completed:", transactionDoc);
|
|
1395
|
+
await emitter.emit("transaction-completed", {
|
|
1396
|
+
transactionResponse,
|
|
1397
|
+
transactionDoc
|
|
1398
|
+
});
|
|
1399
|
+
if (transactionResponse.statusCode !== 201) logger.error("Failed to update transaction status to completed");
|
|
1400
|
+
return results;
|
|
1401
|
+
} catch (error) {
|
|
1402
|
+
logger.error("Transaction failed, attempting rollback:", error);
|
|
1403
|
+
const toRollback = [];
|
|
1404
|
+
potentialExistingDocsToRollback.forEach((row) => {
|
|
1405
|
+
if (!row.id || !row.rev) return;
|
|
1406
|
+
const doc = existingDocs.found[row.id];
|
|
1407
|
+
doc._rev = row.rev;
|
|
1408
|
+
toRollback.push(doc);
|
|
1409
|
+
});
|
|
1410
|
+
newDocsToRollback.forEach((d) => {
|
|
1411
|
+
if (!d.id || !d.rev) return;
|
|
1412
|
+
const before = JSON.parse(JSON.stringify(providedDocsById[d.id]));
|
|
1413
|
+
before._rev = d.rev;
|
|
1414
|
+
before._deleted = true;
|
|
1415
|
+
toRollback.push(before);
|
|
1416
|
+
});
|
|
1417
|
+
const bulkRollbackResult = await bulkSave(config, toRollback);
|
|
1418
|
+
let status = "rolled_back";
|
|
1419
|
+
bulkRollbackResult.forEach((r) => {
|
|
1420
|
+
if (r.error) status = "rollback_failed";
|
|
1421
|
+
});
|
|
1422
|
+
logger.warn("Transaction rolled back:", {
|
|
1423
|
+
bulkRollbackResult,
|
|
1424
|
+
status
|
|
1425
|
+
});
|
|
1426
|
+
await emitter.emit("transaction-rolled-back", {
|
|
1427
|
+
bulkRollbackResult,
|
|
1428
|
+
status
|
|
1429
|
+
});
|
|
1430
|
+
transactionDoc.status = status;
|
|
1431
|
+
transactionDoc._rev = transactionResponse.rev || null;
|
|
1432
|
+
transactionResponse = await _put(transactionDoc);
|
|
1433
|
+
logger.warn("Transaction rollback status updated:", transactionDoc);
|
|
1434
|
+
await emitter.emit("transaction-rolled-back-status", {
|
|
1435
|
+
transactionResponse,
|
|
1436
|
+
transactionDoc
|
|
1437
|
+
});
|
|
1438
|
+
if (transactionResponse.statusCode !== 201) logger.error("Failed to update transaction status to rolled_back");
|
|
1439
|
+
throw new TransactionRollbackError("Transaction failed and rollback was unsuccessful", error, bulkRollbackResult);
|
|
1440
|
+
}
|
|
1441
|
+
};
|
|
1442
|
+
|
|
1443
|
+
//#endregion
|
|
1444
|
+
//#region impl/remove.mts
|
|
1445
|
+
const remove = async (configInput, id, rev) => {
|
|
1446
|
+
const config = CouchConfig.parse(configInput);
|
|
1447
|
+
const logger = createLogger(config);
|
|
1448
|
+
const url = `${config.couch}/${id}?rev=${rev}`;
|
|
1449
|
+
const mergedOpts = mergeNeedleOpts(config, {
|
|
1450
|
+
json: true,
|
|
1451
|
+
headers: { "Content-Type": "application/json" }
|
|
1452
|
+
});
|
|
1453
|
+
logger.info(`Deleting document with id: ${id}`);
|
|
1454
|
+
let resp;
|
|
1455
|
+
try {
|
|
1456
|
+
resp = await (0, needle.default)("delete", url, null, mergedOpts);
|
|
1457
|
+
} catch (err) {
|
|
1458
|
+
logger.error("Error during delete operation:", err);
|
|
1459
|
+
RetryableError.handleNetworkError(err);
|
|
1460
|
+
}
|
|
1461
|
+
if (!resp) {
|
|
1462
|
+
logger.error("No response received from delete request");
|
|
1463
|
+
throw new RetryableError("no response", 503);
|
|
1464
|
+
}
|
|
1465
|
+
let result;
|
|
1466
|
+
if (typeof resp.body === "string") try {
|
|
1467
|
+
result = JSON.parse(resp.body);
|
|
1468
|
+
} catch {
|
|
1469
|
+
result = {};
|
|
1470
|
+
}
|
|
1471
|
+
else result = resp.body || {};
|
|
1472
|
+
result.statusCode = resp.statusCode;
|
|
1473
|
+
if (resp.statusCode === 404) {
|
|
1474
|
+
logger.warn(`Document not found for deletion: ${id}`);
|
|
1475
|
+
result.ok = false;
|
|
1476
|
+
result.error = "not_found";
|
|
1477
|
+
return CouchPutResponse.parse(result);
|
|
1478
|
+
}
|
|
1479
|
+
if (RetryableError.isRetryableStatusCode(resp.statusCode)) {
|
|
1480
|
+
logger.warn(`Retryable status code received: ${resp.statusCode}`);
|
|
1481
|
+
throw new RetryableError(result.reason || "retryable error", resp.statusCode);
|
|
1482
|
+
}
|
|
1483
|
+
if (resp.statusCode !== 200) {
|
|
1484
|
+
logger.error(`Unexpected status code: ${resp.statusCode}`);
|
|
1485
|
+
throw new Error(result.reason || "failed");
|
|
1486
|
+
}
|
|
1487
|
+
logger.info(`Successfully deleted document: ${id}`);
|
|
1488
|
+
return CouchPutResponse.parse(result);
|
|
1489
|
+
};
|
|
1490
|
+
|
|
1491
|
+
//#endregion
|
|
1492
|
+
//#region impl/bulkRemove.mts
|
|
1493
|
+
/**
|
|
1494
|
+
* Removes multiple documents from a CouchDB database using the _bulk_docs endpoint.
|
|
1495
|
+
* It first retrieves the documents by their IDs, marks them as deleted, and then
|
|
1496
|
+
* sends them back to the database for deletion.
|
|
1497
|
+
*
|
|
1498
|
+
* See https://docs.couchdb.org/en/stable/api/database/bulk-api.html#post--db-_bulk_docs
|
|
1499
|
+
*
|
|
1500
|
+
* @param configInput - The CouchDB configuration input.
|
|
1501
|
+
* @param ids - An array of document IDs to be removed.
|
|
1502
|
+
* @returns A promise that resolves to an array of results from the bulk delete operation.
|
|
1503
|
+
*
|
|
1504
|
+
* @example
|
|
1505
|
+
* ```ts
|
|
1506
|
+
* const config: CouchConfigInput = {
|
|
1507
|
+
* couch: 'http://localhost:5984/mydb',
|
|
1508
|
+
* useConsoleLogger: true
|
|
1509
|
+
* };
|
|
1510
|
+
* const idsToRemove = ['doc1', 'doc2', 'doc3'];
|
|
1511
|
+
* const results = await bulkRemove(config, idsToRemove);
|
|
1512
|
+
* console.log(results);
|
|
1513
|
+
* ```
|
|
1514
|
+
*
|
|
1515
|
+
* @throws Will throw an error if the provided configuration is invalid or if the bulk delete operation fails.
|
|
1516
|
+
*/
|
|
1517
|
+
const bulkRemove = async (configInput, ids) => {
|
|
1518
|
+
const config = CouchConfig.parse(configInput);
|
|
1519
|
+
const logger = createLogger(config);
|
|
1520
|
+
logger.info(`Starting bulk remove for ${ids.length} documents`);
|
|
1521
|
+
const resp = await bulkGet(config, ids);
|
|
1522
|
+
const toRemove = [];
|
|
1523
|
+
resp.rows?.forEach((row) => {
|
|
1524
|
+
if (!row.doc) return;
|
|
1525
|
+
try {
|
|
1526
|
+
const d = CouchDoc.parse(row.doc);
|
|
1527
|
+
d._deleted = true;
|
|
1528
|
+
toRemove.push(d);
|
|
1529
|
+
} catch (e) {
|
|
1530
|
+
logger.warn(`Invalid document structure in bulk remove: ${row.id}`, e);
|
|
1531
|
+
}
|
|
1532
|
+
});
|
|
1533
|
+
if (!toRemove.length) return [];
|
|
1534
|
+
return await bulkSave(config, toRemove);
|
|
1535
|
+
};
|
|
1536
|
+
/**
|
|
1537
|
+
* Removes multiple documents from a CouchDB database by their IDs using individual delete operations.
|
|
1538
|
+
* It first retrieves the documents to get their revision IDs, then deletes each document one by one.
|
|
1539
|
+
*
|
|
1540
|
+
* See https://docs.couchdb.org/en/stable/api/document/common.html#delete--db-docid
|
|
1541
|
+
*
|
|
1542
|
+
* @param configInput - The CouchDB configuration input.
|
|
1543
|
+
* @param ids - An array of document IDs to be removed.
|
|
1544
|
+
* @returns A promise that resolves to an array of results from the individual delete operations.
|
|
1545
|
+
*
|
|
1546
|
+
* @example
|
|
1547
|
+
* ```ts
|
|
1548
|
+
* const config: CouchConfigInput = {
|
|
1549
|
+
* couch: 'http://localhost:5984/mydb',
|
|
1550
|
+
* useConsoleLogger: true
|
|
1551
|
+
* };
|
|
1552
|
+
* const idsToRemove = ['doc1', 'doc2', 'doc3'];
|
|
1553
|
+
* const results = await bulkRemoveMap(config, idsToRemove);
|
|
1554
|
+
* console.log(results);
|
|
1555
|
+
* ```
|
|
1556
|
+
*
|
|
1557
|
+
* @throws Will throw an error if the provided configuration is invalid or if any delete operation fails.
|
|
1558
|
+
*/
|
|
1559
|
+
const bulkRemoveMap = async (configInput, ids) => {
|
|
1560
|
+
const config = CouchConfig.parse(configInput);
|
|
1561
|
+
const logger = createLogger(config);
|
|
1562
|
+
logger.info(`Starting bulk remove map for ${ids.length} documents`);
|
|
1563
|
+
const { rows } = await bulkGet(config, ids, { includeDocs: false });
|
|
1564
|
+
const results = [];
|
|
1565
|
+
for (const row of rows || []) try {
|
|
1566
|
+
if (!row.value?.rev) throw new Error(`no rev found for doc ${row.id}`);
|
|
1567
|
+
if (!row.id) throw new Error(`no id found for doc ${row}`);
|
|
1568
|
+
const result = await remove(config, row.id, row.value.rev);
|
|
1569
|
+
results.push(result);
|
|
1570
|
+
} catch (e) {
|
|
1571
|
+
logger.warn(`Error removing a doc in bulk remove map: ${row.id}`, e);
|
|
1572
|
+
}
|
|
1573
|
+
return results;
|
|
1574
|
+
};
|
|
1575
|
+
|
|
1576
|
+
//#endregion
|
|
1577
|
+
//#region impl/getDBInfo.mts
|
|
1578
|
+
/**
|
|
1579
|
+
* Fetches and returns CouchDB database information.
|
|
1580
|
+
*
|
|
1581
|
+
* @see {@link https://docs.couchdb.org/en/stable/api/database/common.html#get--db | CouchDB API Documentation}
|
|
1582
|
+
*
|
|
1583
|
+
* @param configInput - The CouchDB configuration input.
|
|
1584
|
+
* @returns A promise that resolves to the CouchDB database information.
|
|
1585
|
+
* @throws {RetryableError} `RetryableError` If a retryable error occurs during the request.
|
|
1586
|
+
* @throws {Error} `Error` For other non-retryable errors.
|
|
1587
|
+
*
|
|
1588
|
+
* @example
|
|
1589
|
+
* ```ts
|
|
1590
|
+
* import { getDBInfo } from './impl/getDBInfo.mts';
|
|
1591
|
+
*
|
|
1592
|
+
* const config = { couch: 'http://localhost:5984/my-database' };
|
|
1593
|
+
*
|
|
1594
|
+
* getDBInfo(config)
|
|
1595
|
+
* .then(info => {
|
|
1596
|
+
* console.log('Database Info:', info);
|
|
1597
|
+
* })
|
|
1598
|
+
* .catch(err => {
|
|
1599
|
+
* console.error('Error fetching database info:', err);
|
|
1600
|
+
* });
|
|
1601
|
+
* ```
|
|
1602
|
+
*/
|
|
1603
|
+
const getDBInfo = async (configInput) => {
|
|
1604
|
+
const config = CouchConfig.parse(configInput);
|
|
1605
|
+
const logger = createLogger(config);
|
|
1606
|
+
const url = `${config.couch}`;
|
|
1607
|
+
let resp;
|
|
1608
|
+
try {
|
|
1609
|
+
resp = await (0, needle.default)("get", url, mergeNeedleOpts(config, {
|
|
1610
|
+
json: true,
|
|
1611
|
+
headers: { "Content-Type": "application/json" }
|
|
1612
|
+
}));
|
|
1613
|
+
} catch (err) {
|
|
1614
|
+
logger.error("Error during get operation:", err);
|
|
1615
|
+
RetryableError.handleNetworkError(err);
|
|
1616
|
+
}
|
|
1617
|
+
if (!resp) {
|
|
1618
|
+
logger.error("No response received from get request");
|
|
1619
|
+
throw new RetryableError("no response", 503);
|
|
1620
|
+
}
|
|
1621
|
+
const result = resp.body;
|
|
1622
|
+
if (RetryableError.isRetryableStatusCode(resp.statusCode)) {
|
|
1623
|
+
logger.warn(`Retryable status code received: ${resp.statusCode}`);
|
|
1624
|
+
throw new RetryableError(result.reason ?? "retryable error", resp.statusCode);
|
|
1625
|
+
}
|
|
1626
|
+
return CouchDBInfo.parse(result);
|
|
1627
|
+
};
|
|
1628
|
+
|
|
1629
|
+
//#endregion
|
|
1630
|
+
//#region schema/sugar/lock.mts
|
|
1631
|
+
const LockDoc = CouchDoc.extend({
|
|
1632
|
+
type: zod.z.literal("lock"),
|
|
1633
|
+
locks: zod.z.string().describe("the document ID being locked"),
|
|
1634
|
+
lockedAt: zod.z.string().describe("ISO timestamp when lock was created"),
|
|
1635
|
+
lockedBy: zod.z.string().describe("username of who created the lock")
|
|
1636
|
+
});
|
|
1637
|
+
const LockOptions = zod.z.object({
|
|
1638
|
+
enableLocking: zod.z.boolean().prefault(true).describe("whether locking is enabled"),
|
|
1639
|
+
username: zod.z.string().describe("username to attribute locks to")
|
|
1640
|
+
});
|
|
1641
|
+
|
|
1642
|
+
//#endregion
|
|
1643
|
+
//#region impl/sugar/lock.mts
|
|
1644
|
+
/**
|
|
1645
|
+
* Create a lock document for the specified document ID.
|
|
1646
|
+
* Returns true if the lock was created, false if locking is disabled or a conflict occurred.
|
|
1647
|
+
*
|
|
1648
|
+
* @param configInput CouchDB configuration
|
|
1649
|
+
* @param docId The document ID to lock
|
|
1650
|
+
* @param lockOptions Locking options
|
|
1651
|
+
*
|
|
1652
|
+
* @return True if the lock was created, false otherwise
|
|
1653
|
+
*/
|
|
1654
|
+
async function createLock(configInput, docId, lockOptions) {
|
|
1655
|
+
const config = CouchConfig.parse(configInput);
|
|
1656
|
+
const options = LockOptions.parse(lockOptions);
|
|
1657
|
+
const logger = createLogger(config);
|
|
1658
|
+
if (!options.enableLocking) {
|
|
1659
|
+
logger.debug("Locking disabled, returning true without creating lock");
|
|
1660
|
+
return true;
|
|
1661
|
+
}
|
|
1662
|
+
const lock = {
|
|
1663
|
+
_id: `lock-${docId}`,
|
|
1664
|
+
type: "lock",
|
|
1665
|
+
locks: docId,
|
|
1666
|
+
lockedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1667
|
+
lockedBy: options.username
|
|
1668
|
+
};
|
|
1669
|
+
try {
|
|
1670
|
+
const result = await put(config, lock);
|
|
1671
|
+
logger.info(`Lock created for ${docId} by ${options.username}`);
|
|
1672
|
+
return result.ok === true;
|
|
1673
|
+
} catch (error) {
|
|
1674
|
+
if (isConflictError(error)) logger.warn(`Lock conflict for ${docId} - already locked`);
|
|
1675
|
+
else logger.error(`Error creating lock for ${docId}:`, error);
|
|
1676
|
+
return false;
|
|
1677
|
+
}
|
|
1678
|
+
}
|
|
1679
|
+
/**
|
|
1680
|
+
* Remove the lock document for the specified document ID if owned by the caller.
|
|
1681
|
+
*
|
|
1682
|
+
* @param configInput CouchDB configuration
|
|
1683
|
+
* @param docId The document ID to unlock
|
|
1684
|
+
* @param lockOptions Locking options
|
|
1685
|
+
*
|
|
1686
|
+
* @return Promise that resolves when the unlock operation is complete
|
|
1687
|
+
*/
|
|
1688
|
+
async function removeLock(configInput, docId, lockOptions) {
|
|
1689
|
+
const config = CouchConfig.parse(configInput);
|
|
1690
|
+
const options = LockOptions.parse(lockOptions);
|
|
1691
|
+
const logger = createLogger(config);
|
|
1692
|
+
if (!options.enableLocking) {
|
|
1693
|
+
logger.debug("Locking disabled, skipping unlock");
|
|
1694
|
+
return;
|
|
1695
|
+
}
|
|
1696
|
+
if (!docId) {
|
|
1697
|
+
logger.warn("No docId provided for unlock");
|
|
1698
|
+
return;
|
|
1699
|
+
}
|
|
1700
|
+
const existingLock = await get(config, `lock-${docId}`);
|
|
1701
|
+
if (!existingLock) {
|
|
1702
|
+
logger.debug(`No lock found for ${docId}`);
|
|
1703
|
+
return;
|
|
1704
|
+
}
|
|
1705
|
+
if (existingLock.lockedBy !== options.username) {
|
|
1706
|
+
logger.warn(`Cannot remove lock for ${docId} - owned by ${existingLock.lockedBy}`);
|
|
1707
|
+
return;
|
|
1708
|
+
}
|
|
1709
|
+
try {
|
|
1710
|
+
await put(config, {
|
|
1711
|
+
...existingLock,
|
|
1712
|
+
_deleted: true
|
|
1713
|
+
});
|
|
1714
|
+
logger.info(`Lock removed for ${docId}`);
|
|
1715
|
+
} catch (error) {
|
|
1716
|
+
logger.error(`Error removing lock for ${docId}:`, error);
|
|
1717
|
+
}
|
|
1718
|
+
}
|
|
1719
|
+
|
|
1720
|
+
//#endregion
|
|
1721
|
+
//#region schema/sugar/watch.mts
|
|
1722
|
+
const WatchOptions = zod.z.object({
|
|
1723
|
+
include_docs: zod.z.boolean().default(false),
|
|
1724
|
+
maxRetries: zod.z.number().describe("maximum number of retries before giving up"),
|
|
1725
|
+
initialDelay: zod.z.number().describe("initial delay between retries in milliseconds"),
|
|
1726
|
+
maxDelay: zod.z.number().describe("maximum delay between retries in milliseconds")
|
|
1727
|
+
}).partial();
|
|
1728
|
+
|
|
1729
|
+
//#endregion
|
|
1730
|
+
//#region impl/sugar/watch.mts
|
|
1731
|
+
/**
|
|
1732
|
+
* Watch for changes to specified document IDs in CouchDB.
|
|
1733
|
+
* Calls the onChange callback for each change detected.
|
|
1734
|
+
* Returns an emitter with methods to listen for events and stop watching.
|
|
1735
|
+
*
|
|
1736
|
+
* @param configInput CouchDB configuration
|
|
1737
|
+
* @param docIds Document ID or array of document IDs to watch
|
|
1738
|
+
* @param onChange Callback function called on each change
|
|
1739
|
+
* @param optionsInput Watch options
|
|
1740
|
+
*
|
|
1741
|
+
* @return WatchEmitter with methods to manage the watch
|
|
1742
|
+
*/
|
|
1743
|
+
function watchDocs(configInput, docIds, onChange, optionsInput = {}) {
|
|
1744
|
+
const config = CouchConfig.parse(configInput);
|
|
1745
|
+
const options = WatchOptions.parse(optionsInput);
|
|
1746
|
+
const logger = createLogger(config);
|
|
1747
|
+
const emitter = new events.EventEmitter();
|
|
1748
|
+
let lastSeq = null;
|
|
1749
|
+
let stopping = false;
|
|
1750
|
+
let retryCount = 0;
|
|
1751
|
+
let currentRequest = null;
|
|
1752
|
+
const maxRetries = options.maxRetries || 10;
|
|
1753
|
+
const initialDelay = options.initialDelay || 1e3;
|
|
1754
|
+
const maxDelay = options.maxDelay || 3e4;
|
|
1755
|
+
const _docIds = Array.isArray(docIds) ? docIds : [docIds];
|
|
1756
|
+
if (_docIds.length === 0) throw new Error("docIds must be a non-empty array");
|
|
1757
|
+
if (_docIds.length > 100) throw new Error("docIds must be an array of 100 or fewer elements");
|
|
1758
|
+
const connect = async () => {
|
|
1759
|
+
if (stopping) return;
|
|
1760
|
+
const feed = "continuous";
|
|
1761
|
+
const includeDocs = options.include_docs ?? false;
|
|
1762
|
+
const ids = _docIds.join("\",\"");
|
|
1763
|
+
const url = `${config.couch}/_changes?feed=${feed}&since=${lastSeq}&include_docs=${includeDocs}&filter=_doc_ids&doc_ids=["${ids}"]`;
|
|
1764
|
+
const mergedOpts = mergeNeedleOpts(config, {
|
|
1765
|
+
json: false,
|
|
1766
|
+
headers: { "Content-Type": "application/json" },
|
|
1767
|
+
parse_response: false
|
|
1768
|
+
});
|
|
1769
|
+
let buffer = "";
|
|
1770
|
+
currentRequest = needle.default.get(url, mergedOpts);
|
|
1771
|
+
currentRequest.on("data", (chunk) => {
|
|
1772
|
+
buffer += chunk.toString();
|
|
1773
|
+
const lines = buffer.split("\n");
|
|
1774
|
+
buffer = lines.pop() || "";
|
|
1775
|
+
for (const line of lines) if (line.trim()) try {
|
|
1776
|
+
const change = JSON.parse(line);
|
|
1777
|
+
if (!change.id) return null;
|
|
1778
|
+
logger.debug(`Change detected, watching [${_docIds}]`, change);
|
|
1779
|
+
lastSeq = change.seq || change.last_seq;
|
|
1780
|
+
emitter.emit("change", change);
|
|
1781
|
+
} catch (err) {
|
|
1782
|
+
logger.error("Error parsing change:", err, "Line:", line);
|
|
1783
|
+
}
|
|
1784
|
+
});
|
|
1785
|
+
currentRequest.on("response", (response) => {
|
|
1786
|
+
logger.debug(`Received response with status code, watching [${_docIds}]: ${response.statusCode}`);
|
|
1787
|
+
if (RetryableError.isRetryableStatusCode(response.statusCode)) {
|
|
1788
|
+
logger.warn(`Retryable status code received: ${response.statusCode}`);
|
|
1789
|
+
currentRequest?.destroy();
|
|
1790
|
+
handleReconnect();
|
|
1791
|
+
} else retryCount = 0;
|
|
1792
|
+
});
|
|
1793
|
+
currentRequest.on("error", async (err) => {
|
|
1794
|
+
if (stopping) {
|
|
1795
|
+
logger.info("stopping in progress, ignore stream error");
|
|
1796
|
+
return;
|
|
1797
|
+
}
|
|
1798
|
+
logger.error(`Network error during stream, watching [${_docIds}]:`, err.toString());
|
|
1799
|
+
try {
|
|
1800
|
+
RetryableError.handleNetworkError(err);
|
|
1801
|
+
} catch (filteredError) {
|
|
1802
|
+
if (filteredError instanceof RetryableError) {
|
|
1803
|
+
logger.info(`Retryable error, watching [${_docIds}]:`, filteredError.toString());
|
|
1804
|
+
handleReconnect();
|
|
1805
|
+
} else {
|
|
1806
|
+
logger.error(`Non-retryable error, watching [${_docIds}]`, filteredError?.toString());
|
|
1807
|
+
emitter.emit("error", filteredError);
|
|
1808
|
+
}
|
|
1809
|
+
}
|
|
1810
|
+
});
|
|
1811
|
+
currentRequest.on("end", () => {
|
|
1812
|
+
if (buffer.trim()) try {
|
|
1813
|
+
const change = JSON.parse(buffer);
|
|
1814
|
+
logger.debug("Final change detected:", change);
|
|
1815
|
+
emitter.emit("change", change);
|
|
1816
|
+
} catch (err) {
|
|
1817
|
+
logger.error("Error parsing final change:", err);
|
|
1818
|
+
}
|
|
1819
|
+
logger.info("Stream completed. Last seen seq: ", lastSeq);
|
|
1820
|
+
emitter.emit("end", { lastSeq });
|
|
1821
|
+
if (!stopping) handleReconnect();
|
|
1822
|
+
});
|
|
1823
|
+
};
|
|
1824
|
+
const handleReconnect = async () => {
|
|
1825
|
+
if (stopping || retryCount >= maxRetries) {
|
|
1826
|
+
if (retryCount >= maxRetries) {
|
|
1827
|
+
logger.error(`Max retries (${maxRetries}) reached, giving up`);
|
|
1828
|
+
emitter.emit("error", /* @__PURE__ */ new Error("Max retries reached"));
|
|
1829
|
+
}
|
|
1830
|
+
return;
|
|
1831
|
+
}
|
|
1832
|
+
const delay = Math.min(initialDelay * Math.pow(2, retryCount), maxDelay);
|
|
1833
|
+
retryCount++;
|
|
1834
|
+
logger.info(`Attempting to reconnect in ${delay}ms (attempt ${retryCount} of ${maxRetries})`);
|
|
1835
|
+
await (0, node_timers_promises.setTimeout)(delay);
|
|
1836
|
+
try {
|
|
1837
|
+
connect();
|
|
1838
|
+
} catch (err) {
|
|
1839
|
+
logger.error("Error during reconnection:", err);
|
|
1840
|
+
handleReconnect();
|
|
1841
|
+
}
|
|
1842
|
+
};
|
|
1843
|
+
connect();
|
|
1844
|
+
emitter.on("change", onChange);
|
|
1845
|
+
return {
|
|
1846
|
+
on: (event, listener) => emitter.on(event, listener),
|
|
1847
|
+
removeListener: (event, listener) => emitter.removeListener(event, listener),
|
|
1848
|
+
stop: () => {
|
|
1849
|
+
stopping = true;
|
|
1850
|
+
if (currentRequest) currentRequest.destroy();
|
|
1851
|
+
emitter.emit("end", { lastSeq });
|
|
1852
|
+
emitter.removeAllListeners();
|
|
1853
|
+
}
|
|
1854
|
+
};
|
|
1855
|
+
}
|
|
1856
|
+
|
|
1857
|
+
//#endregion
|
|
1858
|
+
//#region impl/bindConfig.mts
|
|
1859
|
+
/**
|
|
1860
|
+
* Build a validated binding that exposes CouchDB helpers plus an options() helper for overrides.
|
|
1861
|
+
* @param config The CouchDB configuration
|
|
1862
|
+
* @returns A bound instance with CouchDB operations and an options() method for overrides
|
|
1863
|
+
*/
|
|
1864
|
+
const bindConfig = (config) => {
|
|
1865
|
+
const funcs = doBind(CouchConfig.parse(config));
|
|
1866
|
+
const reconfigure = (overrides) => {
|
|
1867
|
+
return bindConfig({
|
|
1868
|
+
...config,
|
|
1869
|
+
...overrides
|
|
1870
|
+
});
|
|
1871
|
+
};
|
|
1872
|
+
return {
|
|
1873
|
+
...funcs,
|
|
1874
|
+
options: reconfigure
|
|
1875
|
+
};
|
|
1876
|
+
};
|
|
1877
|
+
/**
|
|
1878
|
+
* @internal
|
|
1879
|
+
*
|
|
1880
|
+
* Helper to bind a function to a config, optionally wrapping it with retry logic.
|
|
1881
|
+
* Casts to the appropriate bound function type.
|
|
1882
|
+
* @param func The function to bind
|
|
1883
|
+
* @param config The CouchDB configuration
|
|
1884
|
+
* @returns The bound function, possibly wrapped with retry logic
|
|
1885
|
+
*/
|
|
1886
|
+
function getBoundWithRetry(func, config) {
|
|
1887
|
+
const bound = func.bind(null, config);
|
|
1888
|
+
if (config.bindWithRetry) return withRetry(bound, {
|
|
1889
|
+
maxRetries: config.maxRetries ?? 10,
|
|
1890
|
+
initialDelay: config.initialDelay ?? 1e3,
|
|
1891
|
+
backoffFactor: config.backoffFactor ?? 2
|
|
1892
|
+
});
|
|
1893
|
+
else return bound;
|
|
1894
|
+
}
|
|
1895
|
+
/**
|
|
1896
|
+
* @internal
|
|
1897
|
+
*
|
|
1898
|
+
* Bind core CouchDB operations to a specific configuration, optionally applying retry wrappers.
|
|
1899
|
+
* @param config The CouchDB configuration
|
|
1900
|
+
* @returns An object with CouchDB operations bound to the provided configuration
|
|
1901
|
+
*/
|
|
1902
|
+
function doBind(config) {
|
|
1903
|
+
const retryOptions = {
|
|
1904
|
+
maxRetries: config.maxRetries ?? 10,
|
|
1905
|
+
initialDelay: config.initialDelay ?? 1e3,
|
|
1906
|
+
backoffFactor: config.backoffFactor ?? 2
|
|
1907
|
+
};
|
|
1908
|
+
return {
|
|
1909
|
+
bulkGet: getBoundWithRetry(bulkGet, config),
|
|
1910
|
+
bulkGetDictionary: getBoundWithRetry(bulkGetDictionary, config),
|
|
1911
|
+
get: getBoundWithRetry(get, config),
|
|
1912
|
+
getAtRev: getBoundWithRetry(getAtRev, config),
|
|
1913
|
+
query: getBoundWithRetry(query, config),
|
|
1914
|
+
bulkRemove: config.bindWithRetry ? withRetry(bulkRemove.bind(null, config), retryOptions) : bulkRemove.bind(null, config),
|
|
1915
|
+
bulkRemoveMap: config.bindWithRetry ? withRetry(bulkRemoveMap.bind(null, config), retryOptions) : bulkRemoveMap.bind(null, config),
|
|
1916
|
+
bulkSave: config.bindWithRetry ? withRetry(bulkSave.bind(null, config), retryOptions) : bulkSave.bind(null, config),
|
|
1917
|
+
bulkSaveTransaction: bulkSaveTransaction.bind(null, config),
|
|
1918
|
+
getDBInfo: config.bindWithRetry ? withRetry(getDBInfo.bind(null, config), retryOptions) : getDBInfo.bind(null, config),
|
|
1919
|
+
patch: config.bindWithRetry ? withRetry(patch.bind(null, config), retryOptions) : patch.bind(null, config),
|
|
1920
|
+
patchDangerously: patchDangerously.bind(null, config),
|
|
1921
|
+
put: config.bindWithRetry ? withRetry(put.bind(null, config), retryOptions) : put.bind(null, config),
|
|
1922
|
+
queryStream: config.bindWithRetry ? withRetry(queryStream.bind(null, config), retryOptions) : queryStream.bind(null, config),
|
|
1923
|
+
remove: config.bindWithRetry ? withRetry(remove.bind(null, config), retryOptions) : remove.bind(null, config),
|
|
1924
|
+
createLock: createLock.bind(null, config),
|
|
1925
|
+
removeLock: removeLock.bind(null, config),
|
|
1926
|
+
watchDocs: watchDocs.bind(null, config)
|
|
1927
|
+
};
|
|
1928
|
+
}
|
|
1929
|
+
|
|
1930
|
+
//#endregion
|
|
1931
|
+
exports.QueryBuilder = QueryBuilder;
|
|
1932
|
+
exports.bindConfig = bindConfig;
|
|
1933
|
+
exports.bulkGet = bulkGet;
|
|
1934
|
+
exports.bulkGetDictionary = bulkGetDictionary;
|
|
1935
|
+
exports.bulkRemove = bulkRemove;
|
|
1936
|
+
exports.bulkRemoveMap = bulkRemoveMap;
|
|
1937
|
+
exports.bulkSave = bulkSave;
|
|
1938
|
+
exports.bulkSaveTransaction = bulkSaveTransaction;
|
|
1939
|
+
exports.createLock = createLock;
|
|
1940
|
+
exports.createQuery = createQuery;
|
|
1941
|
+
exports.get = get;
|
|
1942
|
+
exports.getAtRev = getAtRev;
|
|
1943
|
+
exports.getDBInfo = getDBInfo;
|
|
1944
|
+
exports.patch = patch;
|
|
1945
|
+
exports.patchDangerously = patchDangerously;
|
|
1946
|
+
exports.put = put;
|
|
1947
|
+
exports.query = query;
|
|
1948
|
+
exports.queryStream = queryStream;
|
|
1949
|
+
exports.remove = remove;
|
|
1950
|
+
exports.removeLock = removeLock;
|
|
1951
|
+
exports.watchDocs = watchDocs;
|
|
1952
|
+
exports.withRetry = withRetry;
|