bsv-wallet-postgres 0.5.0 → 0.100.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +17 -115
- data/LICENSE +23 -80
- data/db/migrations/001_create_schema.rb +261 -0
- data/db/migrations/002_action_id_cascade.rb +66 -0
- data/db/migrations/003_schema_constraints.rb +297 -0
- data/db/migrations/004_drop_tx_reqs.rb +32 -0
- data/lib/bsv/wallet/postgres/action.rb +80 -0
- data/lib/bsv/wallet/postgres/action_label.rb +14 -0
- data/lib/bsv/wallet/postgres/arc_adapter.rb +32 -0
- data/lib/bsv/wallet/postgres/basket.rb +13 -0
- data/lib/bsv/wallet/postgres/block.rb +13 -0
- data/lib/bsv/wallet/postgres/broadcast.rb +87 -0
- data/lib/bsv/wallet/postgres/broadcast_callback.rb +54 -0
- data/lib/bsv/wallet/postgres/broadcast_queue.rb +98 -0
- data/lib/bsv/wallet/postgres/certificate.rb +13 -0
- data/lib/bsv/wallet/postgres/certificate_field.rb +13 -0
- data/lib/bsv/wallet/postgres/display_txid.rb +25 -0
- data/lib/bsv/wallet/postgres/input.rb +14 -0
- data/lib/bsv/wallet/postgres/label.rb +15 -0
- data/lib/bsv/wallet/postgres/output.rb +64 -0
- data/lib/bsv/wallet/postgres/output_basket.rb +15 -0
- data/lib/bsv/wallet/postgres/output_detail.rb +12 -0
- data/lib/bsv/wallet/postgres/output_tag.rb +14 -0
- data/lib/bsv/wallet/postgres/proof_store.rb +109 -0
- data/lib/bsv/wallet/postgres/setting.rb +32 -0
- data/lib/bsv/wallet/postgres/spendable.rb +12 -0
- data/lib/bsv/wallet/postgres/store.rb +580 -0
- data/lib/bsv/wallet/postgres/tag.rb +15 -0
- data/lib/bsv/wallet/postgres/tx_proof.rb +16 -0
- data/lib/bsv/wallet/postgres/utxo_pool.rb +58 -0
- data/lib/bsv/wallet/postgres/version.rb +9 -0
- data/lib/bsv/wallet/postgres.rb +77 -0
- data/lib/bsv-wallet-postgres.rb +1 -1
- metadata +49 -35
- data/lib/bsv/wallet_postgres/migrations/001_create_wallet_tables.rb +0 -58
- data/lib/bsv/wallet_postgres/migrations/002_add_output_state.rb +0 -33
- data/lib/bsv/wallet_postgres/migrations/003_add_wallet_settings.rb +0 -20
- data/lib/bsv/wallet_postgres/migrations/004_add_pending_metadata.rb +0 -69
- data/lib/bsv/wallet_postgres/migrations/005_add_txid_unique_index.rb +0 -27
- data/lib/bsv/wallet_postgres/migrations/006_create_broadcast_jobs.rb +0 -68
- data/lib/bsv/wallet_postgres/postgres_store.rb +0 -482
- data/lib/bsv/wallet_postgres/solid_queue_adapter.rb +0 -328
- data/lib/bsv/wallet_postgres/version.rb +0 -7
- data/lib/bsv/wallet_postgres.rb +0 -13
|
@@ -1,482 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
require 'sequel'
|
|
4
|
-
require 'sequel/extensions/migration'
|
|
5
|
-
require 'json'
|
|
6
|
-
|
|
7
|
-
module BSV
|
|
8
|
-
module Wallet
|
|
9
|
-
# PostgreSQL-backed storage adapter for +BSV::Wallet+.
|
|
10
|
-
#
|
|
11
|
-
# Implements the full {StorageAdapter} interface against a Sequel
|
|
12
|
-
# +Database+ object. Survives process restarts, scales to multiple
|
|
13
|
-
# instances, and is thread-safe via Sequel's connection pool.
|
|
14
|
-
#
|
|
15
|
-
# @example Quickstart
|
|
16
|
-
# require 'bsv-wallet-postgres'
|
|
17
|
-
#
|
|
18
|
-
# db = Sequel.connect(ENV['DATABASE_URL'])
|
|
19
|
-
# BSV::Wallet::PostgresStore.migrate!(db)
|
|
20
|
-
#
|
|
21
|
-
# store = BSV::Wallet::PostgresStore.new(db)
|
|
22
|
-
# wallet = BSV::Wallet::WalletClient.new(key, storage: store)
|
|
23
|
-
#
|
|
24
|
-
# @example Bringing your own migration runner
|
|
25
|
-
# # Copy lib/bsv/wallet_postgres/migrations/001_create_wallet_tables.rb
|
|
26
|
-
# # into your own db/migrate directory, then run your framework's
|
|
27
|
-
# # migrator as normal. `migrate!` is a convenience — not a requirement.
|
|
28
|
-
#
|
|
29
|
-
# === Design notes
|
|
30
|
-
#
|
|
31
|
-
# * JSONB is the source of truth. Every row stores the full record
|
|
32
|
-
# hash in a +data+ jsonb column; dedicated indexed columns
|
|
33
|
-
# (+basket+, +tags+, +labels+, +certifier+, ...) exist only to make
|
|
34
|
-
# queries fast. Reads return the jsonb blob so adding fields to
|
|
35
|
-
# bsv-wallet's record hashes does not require a schema change.
|
|
36
|
-
#
|
|
37
|
-
# * Outputs upsert on +outpoint+ (unique); certificates upsert on the
|
|
38
|
-
# composite unique +(type, serial_number, certifier)+. Proofs and
|
|
39
|
-
# transactions upsert on their +txid+ primary key. Actions are
|
|
40
|
-
# append-only — the interface has no natural key for actions.
|
|
41
|
-
#
|
|
42
|
-
# * Pagination is ordered by insertion (+id ASC+) to match MemoryStore.
|
|
43
|
-
#
|
|
44
|
-
# * This class is thread-safe because Sequel is — the adapter itself
|
|
45
|
-
# holds no mutable state beyond the injected database handle.
|
|
46
|
-
class PostgresStore
|
|
47
|
-
include StorageAdapter
|
|
48
|
-
|
|
49
|
-
MIGRATIONS_DIR = File.expand_path('migrations', __dir__)
|
|
50
|
-
|
|
51
|
-
# Run the shipped wallet schema migrations against +db+.
|
|
52
|
-
#
|
|
53
|
-
# Uses Sequel's migrator so every schema change ships as a numbered
|
|
54
|
-
# migration file and the database tracks which ones have been
|
|
55
|
-
# applied. Safe to call repeatedly.
|
|
56
|
-
#
|
|
57
|
-
# Consumers who prefer their own migration framework can copy the
|
|
58
|
-
# migration file(s) out of +lib/bsv/wallet_postgres/migrations/+
|
|
59
|
-
# instead of calling this helper.
|
|
60
|
-
#
|
|
61
|
-
# @param db [Sequel::Database]
|
|
62
|
-
# @return [void]
|
|
63
|
-
def self.migrate!(db)
|
|
64
|
-
Sequel::Migrator.run(db, MIGRATIONS_DIR)
|
|
65
|
-
end
|
|
66
|
-
|
|
67
|
-
# Register the global Sequel query-builder helpers used by this class
|
|
68
|
-
# (+Sequel.pg_array_op+ / +Sequel.pg_jsonb_op+). Unlike the per-database
|
|
69
|
-
# +pg_array+ / +pg_json+ extensions loaded in +initialize+, +pg_array_ops+
|
|
70
|
-
# and +pg_json_ops+ are global — they mutate Sequel's top-level namespace
|
|
71
|
-
# the first time this class body is evaluated (typically on autoload).
|
|
72
|
-
# This is an intentional side effect: any consumer that has
|
|
73
|
-
# +require 'bsv-wallet-postgres'+ in their Gemfile has opted in.
|
|
74
|
-
Sequel.extension :pg_array_ops
|
|
75
|
-
Sequel.extension :pg_json_ops
|
|
76
|
-
|
|
77
|
-
# @param db [Sequel::Database] a Sequel database handle. The caller
|
|
78
|
-
# owns connection lifecycle, pool sizing, and migrations.
|
|
79
|
-
def initialize(db)
|
|
80
|
-
@db = db
|
|
81
|
-
@db.extension :pg_array
|
|
82
|
-
@db.extension :pg_json
|
|
83
|
-
end
|
|
84
|
-
|
|
85
|
-
# @return [Sequel::Database] the underlying database handle
|
|
86
|
-
attr_reader :db
|
|
87
|
-
|
|
88
|
-
# --- Actions ---
|
|
89
|
-
|
|
90
|
-
def store_action(action_data)
|
|
91
|
-
row = action_row(action_data)
|
|
92
|
-
@db[:wallet_actions].insert(row)
|
|
93
|
-
action_data
|
|
94
|
-
end
|
|
95
|
-
|
|
96
|
-
def find_actions(query)
|
|
97
|
-
ds = filter_actions(@db[:wallet_actions], query)
|
|
98
|
-
paginate(ds, query).map { |r| symbolise_keys(r[:data]) }
|
|
99
|
-
end
|
|
100
|
-
|
|
101
|
-
def count_actions(query)
|
|
102
|
-
filter_actions(@db[:wallet_actions], query).count
|
|
103
|
-
end
|
|
104
|
-
|
|
105
|
-
def update_action_status(txid, new_status)
|
|
106
|
-
# Fetch by txid first, then update by primary key so only exactly one
|
|
107
|
-
# row is targeted. The unique index on txid makes this unambiguous, but
|
|
108
|
-
# scoping to the id column makes the intent explicit and is safe even
|
|
109
|
-
# on databases where the migration has not yet been applied.
|
|
110
|
-
row = @db[:wallet_actions].where(txid: txid).first
|
|
111
|
-
raise WalletError, "Action not found: #{txid}" unless row
|
|
112
|
-
|
|
113
|
-
@db[:wallet_actions].where(id: row[:id]).update(
|
|
114
|
-
data: Sequel.lit(
|
|
115
|
-
"data || jsonb_build_object('status', ?)",
|
|
116
|
-
new_status
|
|
117
|
-
)
|
|
118
|
-
)
|
|
119
|
-
symbolise_keys(@db[:wallet_actions].where(id: row[:id]).first[:data])
|
|
120
|
-
end
|
|
121
|
-
|
|
122
|
-
def delete_action(txid)
|
|
123
|
-
@db[:wallet_actions].where(txid: txid).delete.positive?
|
|
124
|
-
end
|
|
125
|
-
|
|
126
|
-
# --- Outputs ---
|
|
127
|
-
|
|
128
|
-
def store_output(output_data)
|
|
129
|
-
row = output_row(output_data)
|
|
130
|
-
@db[:wallet_outputs]
|
|
131
|
-
.insert_conflict(
|
|
132
|
-
target: :outpoint,
|
|
133
|
-
update: {
|
|
134
|
-
basket: row[:basket],
|
|
135
|
-
tags: row[:tags],
|
|
136
|
-
spendable: row[:spendable],
|
|
137
|
-
state: row[:state],
|
|
138
|
-
satoshis: row[:satoshis],
|
|
139
|
-
data: row[:data]
|
|
140
|
-
}
|
|
141
|
-
)
|
|
142
|
-
.insert(row)
|
|
143
|
-
output_data
|
|
144
|
-
end
|
|
145
|
-
|
|
146
|
-
def find_outputs(query)
|
|
147
|
-
ds = filter_outputs(@db[:wallet_outputs], query)
|
|
148
|
-
paginate(ds, query).map { |r| symbolise_keys(r[:data]) }
|
|
149
|
-
end
|
|
150
|
-
|
|
151
|
-
def count_outputs(query)
|
|
152
|
-
filter_outputs(@db[:wallet_outputs], query).count
|
|
153
|
-
end
|
|
154
|
-
|
|
155
|
-
def delete_output(outpoint)
|
|
156
|
-
@db[:wallet_outputs].where(outpoint: outpoint).delete.positive?
|
|
157
|
-
end
|
|
158
|
-
|
|
159
|
-
# Returns outputs whose effective state is +:spendable+.
|
|
160
|
-
#
|
|
161
|
-
# Legacy rows with +state = NULL+ are treated as spendable when the
|
|
162
|
-
# +spendable+ boolean is true (or absent), matching MemoryStore's
|
|
163
|
-
# effective_state logic.
|
|
164
|
-
#
|
|
165
|
-
# @param basket [String, nil] restrict to this basket when provided
|
|
166
|
-
# @param min_satoshis [Integer, nil] exclude outputs below this value
|
|
167
|
-
# @param sort_order [Symbol] +:asc+ or +:desc+ (default +:desc+, largest first)
|
|
168
|
-
# @return [Array<Hash>]
|
|
169
|
-
def find_spendable_outputs(basket: nil, min_satoshis: nil, sort_order: :desc)
|
|
170
|
-
ds = @db[:wallet_outputs]
|
|
171
|
-
.where(Sequel.lit('(state = ? OR (state IS NULL AND spendable = TRUE))', 'spendable'))
|
|
172
|
-
ds = ds.where(basket: basket) if basket
|
|
173
|
-
if min_satoshis
|
|
174
|
-
ds = ds.where(
|
|
175
|
-
Sequel.lit('COALESCE(satoshis, (data->>?)::bigint, 0) >= ?', 'satoshis', min_satoshis)
|
|
176
|
-
)
|
|
177
|
-
end
|
|
178
|
-
satoshis_expr = Sequel.lit('COALESCE(satoshis, (data->>?)::bigint, 0)', 'satoshis')
|
|
179
|
-
ds = ds.order(sort_order == :asc ? Sequel.asc(satoshis_expr) : Sequel.desc(satoshis_expr))
|
|
180
|
-
ds.all.map { |r| symbolise_keys(r[:data]) }
|
|
181
|
-
end
|
|
182
|
-
|
|
183
|
-
# Transitions the state of an existing output.
|
|
184
|
-
#
|
|
185
|
-
# When +new_state+ is +:pending+, sets +pending_since+, +pending_reference+,
|
|
186
|
-
# and +no_send+, and merges those values into the JSONB +data+ blob.
|
|
187
|
-
#
|
|
188
|
-
# When transitioning away from +:pending+, clears the pending metadata
|
|
189
|
-
# columns and removes the corresponding keys from the JSONB blob.
|
|
190
|
-
#
|
|
191
|
-
# @param outpoint [String] the outpoint identifier
|
|
192
|
-
# @param new_state [Symbol] +:spendable+, +:pending+, or +:spent+
|
|
193
|
-
# @param pending_reference [String, nil] caller-supplied label for a pending lock
|
|
194
|
-
# @param no_send [Boolean, nil] true if the lock belongs to a no_send transaction
|
|
195
|
-
# @raise [BSV::Wallet::WalletError] if the outpoint is not found
|
|
196
|
-
# @return [Hash] the updated output hash
|
|
197
|
-
def update_output_state(outpoint, new_state, pending_reference: nil, no_send: nil)
|
|
198
|
-
state_str = new_state.to_s
|
|
199
|
-
|
|
200
|
-
# Keep legacy spendable boolean in sync so filter_outputs and other
|
|
201
|
-
# queries that haven't migrated to the state column still work.
|
|
202
|
-
spendable_bool = new_state == :spendable
|
|
203
|
-
|
|
204
|
-
if new_state == :pending
|
|
205
|
-
updates = {
|
|
206
|
-
state: state_str,
|
|
207
|
-
spendable: spendable_bool,
|
|
208
|
-
pending_since: Sequel.lit('NOW()'),
|
|
209
|
-
pending_reference: pending_reference,
|
|
210
|
-
no_send: no_send ? true : false,
|
|
211
|
-
data: Sequel.lit(
|
|
212
|
-
"data || jsonb_build_object('state', ?, 'pending_since', NOW()::text, 'pending_reference', ?, 'no_send', ?)",
|
|
213
|
-
state_str, pending_reference, no_send ? true : false
|
|
214
|
-
)
|
|
215
|
-
}
|
|
216
|
-
else
|
|
217
|
-
updates = {
|
|
218
|
-
state: state_str,
|
|
219
|
-
spendable: spendable_bool,
|
|
220
|
-
pending_since: nil,
|
|
221
|
-
pending_reference: nil,
|
|
222
|
-
no_send: false
|
|
223
|
-
}
|
|
224
|
-
# Remove pending keys from JSONB blob, update state
|
|
225
|
-
updates[:data] = Sequel.lit(
|
|
226
|
-
"(data - 'pending_since' - 'pending_reference' - 'no_send') || jsonb_build_object('state', ?)",
|
|
227
|
-
state_str
|
|
228
|
-
)
|
|
229
|
-
end
|
|
230
|
-
|
|
231
|
-
ds = @db[:wallet_outputs].where(outpoint: outpoint)
|
|
232
|
-
rows_updated = ds.update(updates)
|
|
233
|
-
raise WalletError, "Output not found: #{outpoint}" if rows_updated.zero?
|
|
234
|
-
|
|
235
|
-
row = ds.first
|
|
236
|
-
symbolise_keys(row[:data])
|
|
237
|
-
end
|
|
238
|
-
|
|
239
|
-
# Atomically marks a set of outpoints as +:pending+.
|
|
240
|
-
#
|
|
241
|
-
# Uses +UPDATE ... WHERE state = 'spendable' ... RETURNING outpoint+ so that
|
|
242
|
-
# the check-and-set is atomic at the database level. A concurrent caller that
|
|
243
|
-
# wins the race will have already changed the state to 'pending', so the
|
|
244
|
-
# second caller's WHERE clause will not match and will return nothing. No
|
|
245
|
-
# explicit row-level locking is needed — the UPDATE itself takes the lock.
|
|
246
|
-
#
|
|
247
|
-
# Legacy rows with +state = NULL AND spendable = TRUE+ are also eligible.
|
|
248
|
-
#
|
|
249
|
-
# @param outpoints [Array<String>] outpoint identifiers to lock
|
|
250
|
-
# @param reference [String] caller-supplied pending reference
|
|
251
|
-
# @param no_send [Boolean] true if this is a no_send lock
|
|
252
|
-
# @return [Array<String>] outpoints that were actually locked
|
|
253
|
-
def lock_utxos(outpoints, reference:, no_send: false)
|
|
254
|
-
return [] if outpoints.empty?
|
|
255
|
-
|
|
256
|
-
rows = @db[:wallet_outputs]
|
|
257
|
-
.where(outpoint: outpoints)
|
|
258
|
-
.where(Sequel.lit('(state = ? OR (state IS NULL AND spendable = TRUE))', 'spendable'))
|
|
259
|
-
.returning(:outpoint)
|
|
260
|
-
.update(
|
|
261
|
-
state: 'pending',
|
|
262
|
-
spendable: false,
|
|
263
|
-
pending_since: Sequel.lit('NOW()'),
|
|
264
|
-
pending_reference: reference,
|
|
265
|
-
no_send: no_send ? true : false,
|
|
266
|
-
data: Sequel.lit(
|
|
267
|
-
"data || jsonb_build_object('state', 'pending', 'pending_since', NOW()::text, " \
|
|
268
|
-
"'pending_reference', ?, 'no_send', ?)",
|
|
269
|
-
reference, no_send ? true : false
|
|
270
|
-
)
|
|
271
|
-
)
|
|
272
|
-
|
|
273
|
-
rows.map { |r| r[:outpoint] }
|
|
274
|
-
end
|
|
275
|
-
|
|
276
|
-
# Releases stale pending locks back to +:spendable+.
|
|
277
|
-
#
|
|
278
|
-
# Any output in +:pending+ state whose +pending_since+ is older than
|
|
279
|
-
# +timeout+ seconds is reset to +spendable+ and its pending metadata is
|
|
280
|
-
# cleared. Outputs with +no_send = true+ are exempt and remain pending.
|
|
281
|
-
# Outputs with +pending_since = NULL+ are also skipped — they are treated
|
|
282
|
-
# as freshly locked (NULL means "just acquired but no timestamp yet").
|
|
283
|
-
#
|
|
284
|
-
# @param timeout [Integer] age in seconds before a lock is considered stale (default 300)
|
|
285
|
-
# @return [Integer] number of outputs released
|
|
286
|
-
def release_stale_pending!(timeout: 300)
|
|
287
|
-
rows = @db[:wallet_outputs]
|
|
288
|
-
.where(state: 'pending')
|
|
289
|
-
.where(Sequel.lit('no_send IS NOT TRUE'))
|
|
290
|
-
.where(Sequel.lit('pending_since IS NOT NULL'))
|
|
291
|
-
.where(Sequel.lit('pending_since < (NOW() - INTERVAL ?)', "#{timeout} seconds"))
|
|
292
|
-
.returning(:outpoint)
|
|
293
|
-
.update(
|
|
294
|
-
state: 'spendable',
|
|
295
|
-
spendable: true,
|
|
296
|
-
pending_since: nil,
|
|
297
|
-
pending_reference: nil,
|
|
298
|
-
no_send: false,
|
|
299
|
-
data: Sequel.lit(
|
|
300
|
-
"(data - 'pending_since' - 'pending_reference' - 'no_send') || jsonb_build_object('state', 'spendable')"
|
|
301
|
-
)
|
|
302
|
-
)
|
|
303
|
-
|
|
304
|
-
rows.length
|
|
305
|
-
end
|
|
306
|
-
|
|
307
|
-
# --- Certificates ---
|
|
308
|
-
|
|
309
|
-
def store_certificate(cert_data)
|
|
310
|
-
row = certificate_row(cert_data)
|
|
311
|
-
@db[:wallet_certificates]
|
|
312
|
-
.insert_conflict(
|
|
313
|
-
target: %i[type serial_number certifier],
|
|
314
|
-
update: { subject: row[:subject], data: row[:data] }
|
|
315
|
-
)
|
|
316
|
-
.insert(row)
|
|
317
|
-
cert_data
|
|
318
|
-
end
|
|
319
|
-
|
|
320
|
-
def find_certificates(query)
|
|
321
|
-
ds = filter_certificates(@db[:wallet_certificates], query)
|
|
322
|
-
paginate(ds, query).map { |r| symbolise_keys(r[:data]) }
|
|
323
|
-
end
|
|
324
|
-
|
|
325
|
-
def count_certificates(query)
|
|
326
|
-
filter_certificates(@db[:wallet_certificates], query).count
|
|
327
|
-
end
|
|
328
|
-
|
|
329
|
-
def delete_certificate(type:, serial_number:, certifier:)
|
|
330
|
-
@db[:wallet_certificates]
|
|
331
|
-
.where(type: type, serial_number: serial_number, certifier: certifier)
|
|
332
|
-
.delete
|
|
333
|
-
.positive?
|
|
334
|
-
end
|
|
335
|
-
|
|
336
|
-
# --- Proofs ---
|
|
337
|
-
|
|
338
|
-
def store_proof(txid, bump_hex)
|
|
339
|
-
@db[:wallet_proofs]
|
|
340
|
-
.insert_conflict(target: :txid, update: { bump_hex: bump_hex })
|
|
341
|
-
.insert(txid: txid, bump_hex: bump_hex)
|
|
342
|
-
end
|
|
343
|
-
|
|
344
|
-
def find_proof(txid)
|
|
345
|
-
@db[:wallet_proofs].where(txid: txid).get(:bump_hex)
|
|
346
|
-
end
|
|
347
|
-
|
|
348
|
-
# --- Transactions ---
|
|
349
|
-
|
|
350
|
-
def store_transaction(txid, tx_hex)
|
|
351
|
-
@db[:wallet_transactions]
|
|
352
|
-
.insert_conflict(target: :txid, update: { tx_hex: tx_hex })
|
|
353
|
-
.insert(txid: txid, tx_hex: tx_hex)
|
|
354
|
-
end
|
|
355
|
-
|
|
356
|
-
def find_transaction(txid)
|
|
357
|
-
@db[:wallet_transactions].where(txid: txid).get(:tx_hex)
|
|
358
|
-
end
|
|
359
|
-
|
|
360
|
-
# --- Settings ---
|
|
361
|
-
|
|
362
|
-
def store_setting(key, value)
|
|
363
|
-
@db[:wallet_settings]
|
|
364
|
-
.insert_conflict(target: :key, update: { value: value })
|
|
365
|
-
.insert(key: key, value: value)
|
|
366
|
-
end
|
|
367
|
-
|
|
368
|
-
def find_setting(key)
|
|
369
|
-
@db[:wallet_settings].where(key: key).get(:value)
|
|
370
|
-
end
|
|
371
|
-
|
|
372
|
-
private
|
|
373
|
-
|
|
374
|
-
# --- Row builders ---
|
|
375
|
-
|
|
376
|
-
def action_row(data)
|
|
377
|
-
{
|
|
378
|
-
txid: data[:txid],
|
|
379
|
-
labels: Sequel.pg_array(Array(data[:labels]), :text),
|
|
380
|
-
data: Sequel.pg_jsonb(data.to_h)
|
|
381
|
-
}
|
|
382
|
-
end
|
|
383
|
-
|
|
384
|
-
def output_row(data)
|
|
385
|
-
spendable = data[:spendable] != false # nil treated as spendable, like MemoryStore
|
|
386
|
-
state = data[:state]&.to_s
|
|
387
|
-
{
|
|
388
|
-
outpoint: data[:outpoint],
|
|
389
|
-
basket: data[:basket],
|
|
390
|
-
tags: Sequel.pg_array(Array(data[:tags]), :text),
|
|
391
|
-
spendable: spendable,
|
|
392
|
-
state: state,
|
|
393
|
-
satoshis: data[:satoshis],
|
|
394
|
-
data: Sequel.pg_jsonb(data.to_h)
|
|
395
|
-
}
|
|
396
|
-
end
|
|
397
|
-
|
|
398
|
-
def certificate_row(data)
|
|
399
|
-
{
|
|
400
|
-
type: data[:type],
|
|
401
|
-
serial_number: data[:serial_number],
|
|
402
|
-
certifier: data[:certifier],
|
|
403
|
-
subject: data[:subject],
|
|
404
|
-
data: Sequel.pg_jsonb(data.to_h)
|
|
405
|
-
}
|
|
406
|
-
end
|
|
407
|
-
|
|
408
|
-
# --- Filters ---
|
|
409
|
-
|
|
410
|
-
def filter_actions(ds, query)
|
|
411
|
-
apply_array_filter(ds, :labels, query[:labels], query[:label_query_mode])
|
|
412
|
-
end
|
|
413
|
-
|
|
414
|
-
def filter_outputs(ds, query)
|
|
415
|
-
ds = ds.where(outpoint: query[:outpoint]) if query[:outpoint]
|
|
416
|
-
ds = ds.where(basket: query[:basket]) if query[:basket]
|
|
417
|
-
ds = apply_array_filter(ds, :tags, query[:tags], query[:tag_query_mode])
|
|
418
|
-
ds = ds.where(Sequel.lit('(state = ? OR (state IS NULL AND spendable = TRUE))', 'spendable')) unless query[:include_spent]
|
|
419
|
-
ds
|
|
420
|
-
end
|
|
421
|
-
|
|
422
|
-
def filter_certificates(ds, query)
|
|
423
|
-
ds = ds.where(certifier: query[:certifiers]) if query[:certifiers]
|
|
424
|
-
ds = ds.where(type: query[:types]) if query[:types]
|
|
425
|
-
ds = ds.where(subject: query[:subject]) if query[:subject]
|
|
426
|
-
ds = apply_attributes_filter(ds, query[:attributes]) if query[:attributes]
|
|
427
|
-
ds
|
|
428
|
-
end
|
|
429
|
-
|
|
430
|
-
def apply_array_filter(ds, column, values, mode)
|
|
431
|
-
return ds unless values
|
|
432
|
-
|
|
433
|
-
array = Sequel.pg_array(Array(values), :text)
|
|
434
|
-
op = Sequel.pg_array_op(column)
|
|
435
|
-
if mode == 'all'
|
|
436
|
-
ds.where(op.contains(array))
|
|
437
|
-
else
|
|
438
|
-
ds.where(op.overlaps(array))
|
|
439
|
-
end
|
|
440
|
-
end
|
|
441
|
-
|
|
442
|
-
def apply_attributes_filter(ds, attrs)
|
|
443
|
-
# Match certificates whose stored fields hash contains every
|
|
444
|
-
# key/value pair in +attrs+. Symbol keys stringify when the
|
|
445
|
-
# record is serialised to JSONB, so symbol/string keys both work.
|
|
446
|
-
fragment = attrs.each_with_object({}) { |(k, v), h| h[k.to_s] = v }
|
|
447
|
-
ds.where(Sequel.lit('data->\'fields\' @> ?::jsonb', fragment.to_json))
|
|
448
|
-
end
|
|
449
|
-
|
|
450
|
-
# --- Pagination ---
|
|
451
|
-
|
|
452
|
-
def paginate(ds, query)
|
|
453
|
-
ds.order(:id)
|
|
454
|
-
.offset(query[:offset] || 0)
|
|
455
|
-
.limit(query[:limit] || 10)
|
|
456
|
-
.all
|
|
457
|
-
end
|
|
458
|
-
|
|
459
|
-
# --- JSONB read helpers ---
|
|
460
|
-
|
|
461
|
-
# Recursively convert string-keyed hashes to symbol-keyed hashes so
|
|
462
|
-
# reads round-trip with MemoryStore's contract. pg_json returns
|
|
463
|
-
# string keys by default wrapped in +Sequel::Postgres::JSONBHash+
|
|
464
|
-
# / +JSONBArray+, which are +DelegateClass+-based — not Hash/Array
|
|
465
|
-
# subclasses — so the case statement uses +to_hash+ / +to_ary+
|
|
466
|
-
# coercion to handle them.
|
|
467
|
-
def symbolise_keys(obj)
|
|
468
|
-
case obj
|
|
469
|
-
when Hash
|
|
470
|
-
obj.each_with_object({}) { |(k, v), h| h[k.to_sym] = symbolise_keys(v) }
|
|
471
|
-
when Array
|
|
472
|
-
obj.map { |e| symbolise_keys(e) }
|
|
473
|
-
else
|
|
474
|
-
return symbolise_keys(obj.to_hash) if obj.respond_to?(:to_hash)
|
|
475
|
-
return symbolise_keys(obj.to_ary) if obj.respond_to?(:to_ary)
|
|
476
|
-
|
|
477
|
-
obj
|
|
478
|
-
end
|
|
479
|
-
end
|
|
480
|
-
end
|
|
481
|
-
end
|
|
482
|
-
end
|