riverqueue-activerecord 0.7.0 → 0.8.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/driver.rb +54 -65
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 4835014e02a761dfc5bf31f57d8e98b2c63d346d5b0b4affb763196f80a40f23
|
4
|
+
data.tar.gz: c6c4fa4c6c11b75c7f4b813106c79687be8739bc5ca700232d066843c31cc47f
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 59a2df57045ed0cdba52709074b13a8c7cc5d4edcaf9c47ea036ceb865612e1f45fb0adbcced4b2077fd496c3a0c5ad8f047cde5801a0c288ef32779fb7eef6a
|
7
|
+
data.tar.gz: 85db910e8107132c5e0afc0c8a9dfce6eaa3ac315899fcdaaee4a9f1c3c10000271f2eb4d47c2cc5553dbf9413ee737f9ea2a81ddfccdcdb3e826966fac9355c
|
data/lib/driver.rb
CHANGED
@@ -31,36 +31,18 @@ module River::Driver
|
|
31
31
|
end
|
32
32
|
end
|
33
33
|
|
34
|
-
def advisory_lock(key)
|
35
|
-
::ActiveRecord::Base.connection.execute("SELECT pg_advisory_xact_lock(#{key})")
|
36
|
-
nil
|
37
|
-
end
|
38
|
-
|
39
|
-
def advisory_lock_try(key)
|
40
|
-
::ActiveRecord::Base.connection.execute("SELECT pg_try_advisory_xact_lock(123)").first["pg_try_advisory_xact_lock"]
|
41
|
-
end
|
42
|
-
|
43
34
|
def job_get_by_id(id)
|
44
35
|
data_set = RiverJob.where(id: id)
|
45
36
|
data_set.first ? to_job_row_from_model(data_set.first) : nil
|
46
37
|
end
|
47
38
|
|
48
|
-
def job_get_by_kind_and_unique_properties(get_params)
|
49
|
-
data_set = RiverJob.where(kind: get_params.kind)
|
50
|
-
data_set = data_set.where("tstzrange(?, ?, '[)') @> created_at", get_params.created_at[0], get_params.created_at[1]) if get_params.created_at
|
51
|
-
data_set = data_set.where(args: get_params.encoded_args) if get_params.encoded_args
|
52
|
-
data_set = data_set.where(queue: get_params.queue) if get_params.queue
|
53
|
-
data_set = data_set.where(state: get_params.state) if get_params.state
|
54
|
-
data_set.first ? to_job_row_from_model(data_set.first) : nil
|
55
|
-
end
|
56
|
-
|
57
39
|
def job_insert(insert_params)
|
58
|
-
|
40
|
+
job_insert_many([insert_params]).first
|
59
41
|
end
|
60
42
|
|
61
|
-
def
|
62
|
-
res = RiverJob.
|
63
|
-
insert_params_to_hash(
|
43
|
+
def job_insert_many(insert_params_many)
|
44
|
+
res = RiverJob.upsert_all(
|
45
|
+
insert_params_many.map { |param| insert_params_to_hash(param) },
|
64
46
|
on_duplicate: Arel.sql("kind = EXCLUDED.kind"),
|
65
47
|
returning: Arel.sql("*, (xmax != 0) AS unique_skipped_as_duplicate"),
|
66
48
|
|
@@ -69,15 +51,9 @@ module River::Driver
|
|
69
51
|
# ActiveRecord tries to look up a unique index instead of letting
|
70
52
|
# Postgres handle that, and of course it doesn't support a `WHERE`
|
71
53
|
# clause. The workaround is to target the index name instead of columns.
|
72
|
-
unique_by: "
|
54
|
+
unique_by: "river_job_unique_idx"
|
73
55
|
)
|
74
|
-
|
75
|
-
[to_job_row_from_raw(res), res.send(:hash_rows)[0]["unique_skipped_as_duplicate"]]
|
76
|
-
end
|
77
|
-
|
78
|
-
def job_insert_many(insert_params_many)
|
79
|
-
RiverJob.insert_all(insert_params_many.map { |p| insert_params_to_hash(p) })
|
80
|
-
insert_params_many.count
|
56
|
+
to_insert_results(res)
|
81
57
|
end
|
82
58
|
|
83
59
|
def job_list
|
@@ -94,8 +70,6 @@ module River::Driver
|
|
94
70
|
end
|
95
71
|
|
96
72
|
private def insert_params_to_hash(insert_params)
|
97
|
-
# the call to `#compact` is important so that we remove nils and table
|
98
|
-
# default values get picked up instead
|
99
73
|
{
|
100
74
|
args: insert_params.encoded_args,
|
101
75
|
kind: insert_params.kind,
|
@@ -104,8 +78,10 @@ module River::Driver
|
|
104
78
|
queue: insert_params.queue,
|
105
79
|
state: insert_params.state,
|
106
80
|
scheduled_at: insert_params.scheduled_at,
|
107
|
-
tags: insert_params.tags
|
108
|
-
|
81
|
+
tags: insert_params.tags || [],
|
82
|
+
unique_key: insert_params.unique_key,
|
83
|
+
unique_states: insert_params.unique_states
|
84
|
+
}
|
109
85
|
end
|
110
86
|
|
111
87
|
private def to_job_row_from_model(river_job)
|
@@ -139,51 +115,64 @@ module River::Driver
|
|
139
115
|
scheduled_at: river_job.scheduled_at.getutc,
|
140
116
|
state: river_job.state,
|
141
117
|
tags: river_job.tags,
|
142
|
-
unique_key: river_job.unique_key
|
118
|
+
unique_key: river_job.unique_key,
|
119
|
+
unique_states: river_job.unique_states
|
143
120
|
)
|
144
121
|
end
|
145
122
|
|
123
|
+
private def to_insert_results(res)
|
124
|
+
res.rows.map do |row|
|
125
|
+
to_job_row_from_raw(row, res.columns, res.column_types)
|
126
|
+
end
|
127
|
+
end
|
128
|
+
|
146
129
|
# This is really awful, but some of ActiveRecord's methods (e.g. `.create`)
|
147
130
|
# return a model, and others (e.g. `.upsert`) return raw values, and
|
148
131
|
# therefore this second version from unmarshaling a job row exists. I
|
149
132
|
# searched long and hard for a way to have the former type of method return
|
150
133
|
# raw or the latter type of method return a model, but was unable to find
|
151
134
|
# anything.
|
152
|
-
private def to_job_row_from_raw(
|
135
|
+
private def to_job_row_from_raw(row, columns, column_types)
|
153
136
|
river_job = {}
|
154
137
|
|
155
|
-
|
156
|
-
river_job[
|
138
|
+
row.each_with_index do |val, i|
|
139
|
+
river_job[columns[i]] = column_types[i].deserialize(val)
|
157
140
|
end
|
158
141
|
|
159
|
-
|
160
|
-
|
161
|
-
args: JSON.parse(river_job["args"]),
|
162
|
-
attempt: river_job["attempt"],
|
163
|
-
attempted_at: river_job["attempted_at"]&.getutc,
|
164
|
-
attempted_by: river_job["attempted_by"],
|
165
|
-
created_at: river_job["created_at"].getutc,
|
166
|
-
errors: river_job["errors"]&.map { |e|
|
167
|
-
deserialized_error = JSON.parse(e)
|
142
|
+
errors = river_job["errors"]&.map do |e|
|
143
|
+
deserialized_error = JSON.parse(e)
|
168
144
|
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
145
|
+
River::AttemptError.new(
|
146
|
+
at: Time.parse(deserialized_error["at"]),
|
147
|
+
attempt: deserialized_error["attempt"],
|
148
|
+
error: deserialized_error["error"],
|
149
|
+
trace: deserialized_error["trace"]
|
150
|
+
)
|
151
|
+
end
|
152
|
+
|
153
|
+
[
|
154
|
+
River::JobRow.new(
|
155
|
+
id: river_job["id"],
|
156
|
+
args: JSON.parse(river_job["args"]),
|
157
|
+
attempt: river_job["attempt"],
|
158
|
+
attempted_at: river_job["attempted_at"]&.getutc,
|
159
|
+
attempted_by: river_job["attempted_by"],
|
160
|
+
created_at: river_job["created_at"].getutc,
|
161
|
+
errors: errors,
|
162
|
+
finalized_at: river_job["finalized_at"]&.getutc,
|
163
|
+
kind: river_job["kind"],
|
164
|
+
max_attempts: river_job["max_attempts"],
|
165
|
+
metadata: river_job["metadata"],
|
166
|
+
priority: river_job["priority"],
|
167
|
+
queue: river_job["queue"],
|
168
|
+
scheduled_at: river_job["scheduled_at"].getutc,
|
169
|
+
state: river_job["state"],
|
170
|
+
tags: river_job["tags"],
|
171
|
+
unique_key: river_job["unique_key"],
|
172
|
+
unique_states: ::River::UniqueBitmask.to_states(river_job["unique_states"]&.to_i(2))
|
173
|
+
),
|
174
|
+
river_job["unique_skipped_as_duplicate"]
|
175
|
+
]
|
187
176
|
end
|
188
177
|
end
|
189
178
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: riverqueue-activerecord
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.8.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Blake Gentry
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2024-
|
12
|
+
date: 2024-12-20 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: activerecord
|
@@ -99,7 +99,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
99
99
|
- !ruby/object:Gem::Version
|
100
100
|
version: '0'
|
101
101
|
requirements: []
|
102
|
-
rubygems_version: 3.
|
102
|
+
rubygems_version: 3.5.16
|
103
103
|
signing_key:
|
104
104
|
specification_version: 4
|
105
105
|
summary: ActiveRecord driver for the River Ruby gem.
|