his_emr_api_lab 0.0.14 → 0.0.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/app/services/lab/lims/api/couchdb_api.rb +50 -0
- data/app/services/lab/lims/api/mysql_api.rb +316 -0
- data/app/services/lab/lims/config.rb +6 -0
- data/app/services/lab/lims/migrator.rb +62 -65
- data/app/services/lab/lims/order_dto.rb +1 -3
- data/app/services/lab/lims/utils.rb +8 -7
- data/app/services/lab/lims/worker.rb +27 -21
- data/app/services/lab/results_service.rb +1 -3
- data/lib/lab/version.rb +1 -1
- metadata +4 -3
- data/app/services/lab/lims/api.rb +0 -48
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 2a7b072fd83de5ea6c03054521d68b82ac2c01237097a8ea9748113b5446a285
|
4
|
+
data.tar.gz: 5a67e4211a1d0b61d6f8a51357ee2a9fb898bd1c7973d2fc224039451f0a00d1
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: ea314a6c8598617ae9f8ef0cb272d3258ecc50d60f2226103625edd2a11ae7a49bbb08915816ffe3650c3bd78339db7373ee96b63e0a7072c3a703c2e62ecca7
|
7
|
+
data.tar.gz: ba1049dbb93d3d842cb31eae9a88e939e0e687e0a2d866432c07713bc670d75c92760dbdd2a22c89f733f00ccc2382b757cd0a4f32563a590fedfe253c6af5a2
|
@@ -0,0 +1,50 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'couch_bum/couch_bum'
|
4
|
+
|
5
|
+
require_relative '../config'
|
6
|
+
|
7
|
+
module Lab
|
8
|
+
module Lims
|
9
|
+
module Api
|
10
|
+
##
|
11
|
+
# Talk to LIMS like a boss
|
12
|
+
class CouchDbApi
|
13
|
+
attr_reader :bum
|
14
|
+
|
15
|
+
def initialize(config: nil)
|
16
|
+
config ||= Config.couchdb
|
17
|
+
|
18
|
+
@bum = CouchBum.new(protocol: config['protocol'],
|
19
|
+
host: config['host'],
|
20
|
+
port: config['port'],
|
21
|
+
database: "#{config['prefix']}_order_#{config['suffix']}",
|
22
|
+
username: config['username'],
|
23
|
+
password: config['password'])
|
24
|
+
end
|
25
|
+
|
26
|
+
##
|
27
|
+
# Consume orders from the LIMS queue.
|
28
|
+
#
|
29
|
+
# Retrieves orders from the LIMS queue and passes each order to
|
30
|
+
# given block until the queue is empty or connection is terminated
|
31
|
+
# by calling method +choke+.
|
32
|
+
def consume_orders(from: 0, limit: 30)
|
33
|
+
bum.binge_changes(since: from, limit: limit, include_docs: true) do |change|
|
34
|
+
next unless change['doc']['type']&.casecmp?('Order')
|
35
|
+
|
36
|
+
yield OrderDTO.new(change['doc']), self
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
def create_order(order)
|
41
|
+
bum.couch_rest :post, '/', order
|
42
|
+
end
|
43
|
+
|
44
|
+
def update_order(id, order)
|
45
|
+
bum.couch_rest :put, "/#{id}", order
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
@@ -0,0 +1,316 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Lab
|
4
|
+
module Lims
|
5
|
+
module Api
|
6
|
+
class MysqlApi
|
7
|
+
def self.start
|
8
|
+
instance = MysqlApi.new
|
9
|
+
orders_processed = 0
|
10
|
+
instance.consume_orders(from: 0, limit: 1000) do |order|
|
11
|
+
puts "Order ##{orders_processed}"
|
12
|
+
pp order
|
13
|
+
orders_processed += 1
|
14
|
+
puts
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
def initialize(processes: 1, on_merge_processes: nil)
|
19
|
+
@processes = processes
|
20
|
+
@on_merge_processes = on_merge_processes
|
21
|
+
@mysql_connection_pool = {}
|
22
|
+
end
|
23
|
+
|
24
|
+
def multiprocessed?
|
25
|
+
@processes > 1
|
26
|
+
end
|
27
|
+
|
28
|
+
def consume_orders(from: nil, limit: 1000)
|
29
|
+
loop do
|
30
|
+
specimens_to_process = specimens(from, limit)
|
31
|
+
break if specimens_to_process.size.zero?
|
32
|
+
|
33
|
+
processes = multiprocessed? ? @processes : 0
|
34
|
+
on_merge_processes = ->(_item, index, _result) { @on_merge_processes&.call(from + index) }
|
35
|
+
|
36
|
+
Parallel.map(specimens_to_process, in_processes: processes, finish: on_merge_processes) do |specimen|
|
37
|
+
User.current ||= Utils.lab_user
|
38
|
+
|
39
|
+
tests = specimen_tests(specimen['specimen_id'])
|
40
|
+
results = tests.each_with_object({}) do |test, object|
|
41
|
+
object[test['test_name']] = test_results(test['test_id'])
|
42
|
+
end
|
43
|
+
|
44
|
+
dto = make_order_dto(
|
45
|
+
specimen: specimen,
|
46
|
+
patient: specimen_patient(specimen['specimen_id']),
|
47
|
+
test_results: results,
|
48
|
+
specimen_status_trail: specimen_status_trail(specimen['specimen_id']),
|
49
|
+
test_status_trail: tests.each_with_object({}) do |test, trails|
|
50
|
+
trails[test['test_name']] = test_status_trail(test['test_id'])
|
51
|
+
end
|
52
|
+
)
|
53
|
+
|
54
|
+
yield dto, OpenStruct.new(last_seq: from)
|
55
|
+
end
|
56
|
+
|
57
|
+
from += limit
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
def parallel_map(items, on_merge: nil, &block); end
|
62
|
+
|
63
|
+
private
|
64
|
+
|
65
|
+
def specimens(start_id, limit)
|
66
|
+
query = <<~SQL
|
67
|
+
SELECT specimen.id AS specimen_id,
|
68
|
+
specimen.couch_id AS doc_id,
|
69
|
+
specimen_types.name AS specimen_name,
|
70
|
+
specimen.tracking_number,
|
71
|
+
specimen.priority,
|
72
|
+
specimen.target_lab,
|
73
|
+
specimen.sending_facility,
|
74
|
+
specimen.drawn_by_id,
|
75
|
+
specimen.drawn_by_name,
|
76
|
+
specimen.drawn_by_phone_number,
|
77
|
+
specimen.ward_id,
|
78
|
+
specimen_statuses.name AS specimen_status,
|
79
|
+
specimen.district,
|
80
|
+
specimen.date_created AS order_date
|
81
|
+
FROM specimen
|
82
|
+
INNER JOIN specimen_types ON specimen_types.id = specimen.specimen_type_id
|
83
|
+
INNER JOIN specimen_statuses ON specimen_statuses.id = specimen.specimen_status_id
|
84
|
+
SQL
|
85
|
+
|
86
|
+
query = "#{query} WHERE specimen.id > #{sql_escape(start_id)}" if start_id
|
87
|
+
query = "#{query} LIMIT #{limit.to_i}"
|
88
|
+
|
89
|
+
Rails.logger.debug(query)
|
90
|
+
query(query)
|
91
|
+
end
|
92
|
+
|
93
|
+
##
|
94
|
+
# Pull patient associated with given specimen
|
95
|
+
def specimen_patient(specimen_id)
|
96
|
+
results = query <<~SQL
|
97
|
+
SELECT patients.patient_number AS nhid,
|
98
|
+
patients.name,
|
99
|
+
patients.gender,
|
100
|
+
DATE(patients.dob) AS birthdate
|
101
|
+
FROM patients
|
102
|
+
INNER JOIN tests
|
103
|
+
ON tests.patient_id = patients.id
|
104
|
+
AND tests.specimen_id = #{sql_escape(specimen_id)}
|
105
|
+
LIMIT 1
|
106
|
+
SQL
|
107
|
+
|
108
|
+
results.first
|
109
|
+
end
|
110
|
+
|
111
|
+
def specimen_tests(specimen_id)
|
112
|
+
query <<~SQL
|
113
|
+
SELECT tests.id AS test_id,
|
114
|
+
test_types.name AS test_name,
|
115
|
+
tests.created_by AS drawn_by_name
|
116
|
+
FROM tests
|
117
|
+
INNER JOIN test_types ON test_types.id = tests.test_type_id
|
118
|
+
WHERE tests.specimen_id = #{sql_escape(specimen_id)}
|
119
|
+
SQL
|
120
|
+
end
|
121
|
+
|
122
|
+
def specimen_status_trail(specimen_id)
|
123
|
+
query <<~SQL
|
124
|
+
SELECT specimen_statuses.name AS status_name,
|
125
|
+
specimen_status_trails.who_updated_id AS updated_by_id,
|
126
|
+
specimen_status_trails.who_updated_name AS updated_by_name,
|
127
|
+
specimen_status_trails.who_updated_phone_number AS updated_by_phone_number,
|
128
|
+
specimen_status_trails.time_updated AS date
|
129
|
+
FROM specimen_status_trails
|
130
|
+
INNER JOIN specimen_statuses
|
131
|
+
ON specimen_statuses.id = specimen_status_trails.specimen_status_id
|
132
|
+
WHERE specimen_status_trails.specimen_id = #{sql_escape(specimen_id)}
|
133
|
+
SQL
|
134
|
+
end
|
135
|
+
|
136
|
+
def test_status_trail(test_id)
|
137
|
+
query <<~SQL
|
138
|
+
SELECT test_statuses.name AS status_name,
|
139
|
+
test_status_trails.who_updated_id AS updated_by_id,
|
140
|
+
test_status_trails.who_updated_name AS updated_by_name,
|
141
|
+
test_status_trails.who_updated_phone_number AS updated_by_phone_number,
|
142
|
+
COALESCE(test_status_trails.time_updated, test_status_trails.created_at) AS date
|
143
|
+
FROM test_status_trails
|
144
|
+
INNER JOIN test_statuses
|
145
|
+
ON test_statuses.id = test_status_trails.test_status_id
|
146
|
+
WHERE test_status_trails.test_id = #{sql_escape(test_id)}
|
147
|
+
SQL
|
148
|
+
end
|
149
|
+
|
150
|
+
def test_results(test_id)
|
151
|
+
query <<~SQL
|
152
|
+
SELECT measures.name AS measure_name,
|
153
|
+
test_results.result,
|
154
|
+
test_results.time_entered AS date
|
155
|
+
FROM test_results
|
156
|
+
INNER JOIN measures ON measures.id = test_results.measure_id
|
157
|
+
WHERE test_results.test_id = #{sql_escape(test_id)}
|
158
|
+
SQL
|
159
|
+
end
|
160
|
+
|
161
|
+
def make_order_dto(specimen:, patient:, test_status_trail:, specimen_status_trail:, test_results:)
|
162
|
+
drawn_by_first_name, drawn_by_last_name = specimen['drawn_by_name']&.split
|
163
|
+
patient_first_name, patient_last_name = patient['name'].split
|
164
|
+
|
165
|
+
OrderDTO.new(
|
166
|
+
_id: specimen['doc_id'].blank? ? SecureRandom.uuid : specimen['doc_id'],
|
167
|
+
_rev: '0',
|
168
|
+
tracking_number: specimen['tracking_number'],
|
169
|
+
date_created: specimen['order_date'],
|
170
|
+
sample_type: specimen['specimen_name'],
|
171
|
+
tests: test_status_trail.keys,
|
172
|
+
districy: specimen['district'], # districy [sic] - That's how it's named
|
173
|
+
order_location: specimen['ward_id'],
|
174
|
+
sending_facility: specimen['sending_facility'],
|
175
|
+
receiving_facility: specimen['target_lab'],
|
176
|
+
priority: specimen['priority'],
|
177
|
+
patient: {
|
178
|
+
id: patient['nhid'],
|
179
|
+
first_name: patient_first_name,
|
180
|
+
last_name: patient_last_name,
|
181
|
+
gender: patient['gender'],
|
182
|
+
birthdate: patient['birthdate'],
|
183
|
+
email: nil,
|
184
|
+
phone_number: nil
|
185
|
+
},
|
186
|
+
type: 'Order',
|
187
|
+
who_order_test: {
|
188
|
+
first_name: drawn_by_first_name,
|
189
|
+
last_name: drawn_by_last_name,
|
190
|
+
id: specimen['drawn_by_id'],
|
191
|
+
phone_number: specimen['drawn_by_phone_number']
|
192
|
+
},
|
193
|
+
sample_status: specimen['specimen_status'],
|
194
|
+
sample_statuses: specimen_status_trail.each_with_object({}) do |trail_entry, object|
|
195
|
+
first_name, last_name = trail_entry['updated_by_name'].split
|
196
|
+
|
197
|
+
object[format_date(trail_entry['date'])] = {
|
198
|
+
status: trail_entry['status_name'],
|
199
|
+
updated_by: {
|
200
|
+
first_name: first_name,
|
201
|
+
last_name: last_name,
|
202
|
+
phone_number: trail_entry['updated_by_phone_number'],
|
203
|
+
id: trail_entry['updated_by_id']
|
204
|
+
}
|
205
|
+
}
|
206
|
+
end,
|
207
|
+
test_statuses: test_status_trail.each_with_object({}) do |trail_entry, formatted_trail|
|
208
|
+
test_name, test_statuses = trail_entry
|
209
|
+
|
210
|
+
formatted_trail[test_name] = test_statuses.each_with_object({}) do |test_status, formatted_statuses|
|
211
|
+
updated_by_first_name, updated_by_last_name = test_status['updated_by_name'].split
|
212
|
+
|
213
|
+
formatted_statuses[format_date(test_status['date'])] = {
|
214
|
+
status: test_status['status_name'],
|
215
|
+
updated_by: {
|
216
|
+
first_name: updated_by_first_name,
|
217
|
+
last_name: updated_by_last_name,
|
218
|
+
phone_number: test_status['updated_by_phone_number'],
|
219
|
+
id: test_status['updated_by_id']
|
220
|
+
}
|
221
|
+
}
|
222
|
+
end
|
223
|
+
end,
|
224
|
+
test_results: test_results.each_with_object({}) do |results_entry, formatted_results|
|
225
|
+
test_name, results = results_entry
|
226
|
+
|
227
|
+
formatted_results[test_name] = format_test_result_for_dto(test_name, specimen, results, test_status_trail)
|
228
|
+
end
|
229
|
+
)
|
230
|
+
end
|
231
|
+
|
232
|
+
def format_test_result_for_dto(test_name, specimen, results, test_status_trail)
|
233
|
+
return {} if results.size.zero?
|
234
|
+
|
235
|
+
result_create_event = test_status_trail[test_name]&.find do |trail_entry|
|
236
|
+
trail_entry['status_name'].casecmp?('drawn')
|
237
|
+
end
|
238
|
+
|
239
|
+
result_creator_first_name, result_creator_last_name = result_create_event&.fetch('updated_by_name')&.split
|
240
|
+
unless result_creator_first_name
|
241
|
+
result_creator_first_name, result_creator_last_name = specimen['drawn_by_name']&.split
|
242
|
+
end
|
243
|
+
|
244
|
+
{
|
245
|
+
results: results.each_with_object({}) do |result, formatted_measures|
|
246
|
+
formatted_measures[result['measure_name']] = {
|
247
|
+
result_value: result['result']
|
248
|
+
}
|
249
|
+
end,
|
250
|
+
date_result_entered: format_date(result_create_event&.fetch('date') || specimen['order_date'], :iso),
|
251
|
+
result_entered_by: {
|
252
|
+
first_name: result_creator_first_name,
|
253
|
+
last_name: result_creator_last_name,
|
254
|
+
phone_number: result_create_event&.fetch('updated_by_phone_number') || specimen['drawn_by_phone_number'],
|
255
|
+
id: result_create_event&.fetch('updated_by_id') || specimen['updated_by_id']
|
256
|
+
}
|
257
|
+
}
|
258
|
+
end
|
259
|
+
|
260
|
+
def mysql
|
261
|
+
return mysql_connection if mysql_connection
|
262
|
+
|
263
|
+
config = lambda do |key|
|
264
|
+
@config ||= Lab::Lims::Config.database
|
265
|
+
@config['default'][key] || @config['development'][key]
|
266
|
+
end
|
267
|
+
|
268
|
+
connection = Mysql2::Client.new(host: config['host'] || 'localhost',
|
269
|
+
username: config['username'] || 'root',
|
270
|
+
password: config['password'],
|
271
|
+
port: config['port'] || '3306',
|
272
|
+
database: config['database'],
|
273
|
+
reconnect: true)
|
274
|
+
|
275
|
+
self.mysql_connection = connection
|
276
|
+
end
|
277
|
+
|
278
|
+
def pid
|
279
|
+
return -1 if Parallel.worker_number.nil?
|
280
|
+
|
281
|
+
Parallel.worker_number
|
282
|
+
end
|
283
|
+
|
284
|
+
def mysql_connection=(connection)
|
285
|
+
@mysql_connection_pool[pid] = connection
|
286
|
+
end
|
287
|
+
|
288
|
+
def mysql_connection
|
289
|
+
@mysql_connection_pool[pid]
|
290
|
+
end
|
291
|
+
|
292
|
+
def query(sql)
|
293
|
+
Rails.logger.debug("#{MysqlApi}: #{sql}")
|
294
|
+
mysql.query(sql)
|
295
|
+
end
|
296
|
+
|
297
|
+
def sql_escape(value)
|
298
|
+
mysql.escape(value.to_s)
|
299
|
+
end
|
300
|
+
|
301
|
+
##
|
302
|
+
# Lims has some weird date formatting standards...
|
303
|
+
def format_date(date, format = nil)
|
304
|
+
date = date&.to_time
|
305
|
+
|
306
|
+
case format
|
307
|
+
when :iso
|
308
|
+
date&.strftime('%Y-%m-%d %H:%M:%S')
|
309
|
+
else
|
310
|
+
date&.strftime('%Y%m%d%H%M%S')
|
311
|
+
end
|
312
|
+
end
|
313
|
+
end
|
314
|
+
end
|
315
|
+
end
|
316
|
+
end
|
@@ -29,6 +29,7 @@ require 'lab/lab_test'
|
|
29
29
|
require 'lab/lims_order_mapping'
|
30
30
|
require 'lab/lims_failed_import'
|
31
31
|
|
32
|
+
require_relative './config'
|
32
33
|
require_relative './worker'
|
33
34
|
require_relative '../orders_service'
|
34
35
|
require_relative '../results_service'
|
@@ -43,53 +44,73 @@ require_relative 'utils'
|
|
43
44
|
module Lab
|
44
45
|
module Lims
|
45
46
|
module Migrator
|
46
|
-
|
47
|
-
MAX_THREADS = (ENV.fetch('MIGRATION_WORKERS') { 6 }).to_i
|
47
|
+
MAX_THREADS = ENV.fetch('MIGRATION_WORKERS', 6).to_i
|
48
48
|
|
49
|
-
|
49
|
+
class CouchDbMigratorApi < Api::CouchDbApi
|
50
|
+
def initialize(*args, processes: 1, on_merge_processes: nil, **kwargs)
|
51
|
+
super(*args, **kwargs)
|
52
|
+
|
53
|
+
@processes = processes
|
54
|
+
@on_merge_processes = on_merge_processes
|
55
|
+
end
|
50
56
|
|
51
57
|
def consume_orders(from: nil, **_kwargs)
|
52
|
-
limit =
|
58
|
+
limit = 25_000
|
53
59
|
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
next unless row['doc']['type']&.casecmp?('Order')
|
60
|
+
loop do
|
61
|
+
on_merge_processes = ->(_item, index, _result) { @on_merge_processes&.call(from + index) }
|
62
|
+
processes = @processes > 1 ? @processes : 0
|
58
63
|
|
59
|
-
|
60
|
-
|
61
|
-
end
|
62
|
-
end
|
64
|
+
orders = read_orders(from, limit)
|
65
|
+
break if orders.empty?
|
63
66
|
|
64
|
-
|
65
|
-
|
67
|
+
Parallel.each(orders, in_processes: processes, finish: on_merge_processes) do |row|
|
68
|
+
next unless row['doc']['type']&.casecmp?('Order')
|
66
69
|
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
+
User.current = Utils.lab_user
|
71
|
+
yield OrderDTO.new(row['doc']), OpenStruct.new(last_seq: (from || 0) + limit, current_seq: from)
|
72
|
+
end
|
73
|
+
|
74
|
+
from += orders.size
|
70
75
|
end
|
71
76
|
end
|
72
77
|
|
73
78
|
private
|
74
79
|
|
75
|
-
def
|
76
|
-
|
80
|
+
def read_orders(from, batch_size)
|
81
|
+
start_key_param = from ? "&skip=#{from}" : ''
|
82
|
+
url = "_all_docs?include_docs=true&limit=#{batch_size}#{start_key_param}"
|
83
|
+
|
84
|
+
Rails.logger.debug("#{CouchDbMigratorApi}: Pulling orders from LIMS CouchDB: #{url}")
|
85
|
+
response = bum.couch_rest :get, url
|
86
|
+
|
87
|
+
response['rows']
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
class MigrationWorker < Worker
|
92
|
+
LOG_FILE_PATH = LIMS_LOG_PATH.join('migration-last-id.dat')
|
93
|
+
|
94
|
+
attr_reader :rejections
|
95
|
+
|
96
|
+
def initialize(api_class)
|
97
|
+
api = api_class.new(processes: MAX_THREADS, on_merge_processes: method(:save_seq))
|
98
|
+
super(api)
|
77
99
|
end
|
78
100
|
|
79
|
-
def
|
80
|
-
|
81
|
-
save_last_seq(last_seq + index)
|
82
|
-
status, reason = result
|
83
|
-
next unless status == :rejected
|
101
|
+
def last_seq
|
102
|
+
return 0 unless File.exist?(LOG_FILE_PATH)
|
84
103
|
|
85
|
-
|
104
|
+
File.open(LOG_FILE_PATH, File::RDONLY) do |file|
|
105
|
+
last_seq = file.read&.strip
|
106
|
+
return last_seq.blank? ? nil : last_seq&.to_i
|
86
107
|
end
|
87
108
|
end
|
88
109
|
|
89
|
-
|
90
|
-
return unless last_seq
|
110
|
+
private
|
91
111
|
|
92
|
-
|
112
|
+
def save_seq(last_seq)
|
113
|
+
File.open(LOG_FILE_PATH, File::WRONLY | File::CREAT, 0o644) do |file|
|
93
114
|
Rails.logger.debug("Process ##{Parallel.worker_number}: Saving last seq: #{last_seq}")
|
94
115
|
file.flock(File::LOCK_EX)
|
95
116
|
file.write(last_seq.to_s)
|
@@ -97,39 +118,11 @@ module Lab
|
|
97
118
|
end
|
98
119
|
end
|
99
120
|
|
100
|
-
def
|
101
|
-
|
102
|
-
def read_orders(from, batch_size)
|
103
|
-
Enumerator.new do |enum|
|
104
|
-
loop do
|
105
|
-
start_key_param = from ? "&skip=#{from}" : ''
|
106
|
-
url = "_all_docs?include_docs=true&limit=#{batch_size}#{start_key_param}"
|
107
|
-
|
108
|
-
Rails.logger.debug("#{MigratorApi}: Pulling orders from LIMS CouchDB: #{url}")
|
109
|
-
response = bum.couch_rest :get, url
|
110
|
-
|
111
|
-
from ||= 0
|
112
|
-
|
113
|
-
break from if response['rows'].empty?
|
114
|
-
|
115
|
-
response['rows'].each do |row|
|
116
|
-
enum.yield(row)
|
117
|
-
end
|
118
|
-
|
119
|
-
from += response['rows'].size
|
120
|
-
end
|
121
|
-
end
|
122
|
-
end
|
123
|
-
end
|
124
|
-
|
125
|
-
class MigrationWorker < Worker
|
126
|
-
protected
|
121
|
+
def order_rejected(order_dto, reason)
|
122
|
+
@rejections ||= []
|
127
123
|
|
128
|
-
|
129
|
-
lims_api.last_seq
|
124
|
+
@rejections << OpenStruct.new(order: order_dto, reason: reason)
|
130
125
|
end
|
131
|
-
|
132
|
-
def update_last_seq(_last_seq); end
|
133
126
|
end
|
134
127
|
|
135
128
|
def self.save_csv(filename, rows:, headers: nil)
|
@@ -177,8 +170,7 @@ module Lab
|
|
177
170
|
MIGRATION_LOG_PATH = LIMS_LOG_PATH.join('migration.log')
|
178
171
|
|
179
172
|
def self.start_migration
|
180
|
-
|
181
|
-
Dir.mkdir(log_dir) unless File.exist?(log_dir)
|
173
|
+
Dir.mkdir(LIMS_LOG_PATH) unless File.exist?(LIMS_LOG_PATH)
|
182
174
|
|
183
175
|
logger = LoggerMultiplexor.new(Logger.new($stdout), MIGRATION_LOG_PATH)
|
184
176
|
logger.level = :debug
|
@@ -186,12 +178,17 @@ module Lab
|
|
186
178
|
ActiveRecord::Base.logger = logger
|
187
179
|
# CouchBum.logger = logger
|
188
180
|
|
189
|
-
|
190
|
-
|
181
|
+
api_class = case ENV.fetch('MIGRATION_SOURCE', 'couchdb').downcase
|
182
|
+
when 'couchdb' then CouchDbMigratorApi
|
183
|
+
when 'mysql' then Api::MysqlApi
|
184
|
+
else raise "Invalid MIGRATION_SOURCE: #{ENV['MIGRATION_SOURCE']}"
|
185
|
+
end
|
186
|
+
|
187
|
+
worker = MigrationWorker.new(api_class)
|
191
188
|
|
192
|
-
worker.pull_orders
|
189
|
+
worker.pull_orders(batch_size: 10_000)
|
193
190
|
ensure
|
194
|
-
|
191
|
+
worker && export_rejections(worker.rejections)
|
195
192
|
export_failures
|
196
193
|
end
|
197
194
|
end
|
@@ -69,9 +69,7 @@ module Lab
|
|
69
69
|
end
|
70
70
|
|
71
71
|
def start_date
|
72
|
-
if self['date_created'].blank?
|
73
|
-
raise LimsException, 'Order missing created date'
|
74
|
-
end
|
72
|
+
raise LimsException, 'Order missing created date' if self['date_created'].blank?
|
75
73
|
|
76
74
|
Utils.parse_date(self['date_created'])
|
77
75
|
end
|
@@ -52,21 +52,22 @@ module Lab
|
|
52
52
|
end
|
53
53
|
|
54
54
|
def self.parse_date(str_date, fallback_date = nil)
|
55
|
-
|
56
|
-
|
57
|
-
|
55
|
+
str_date = str_date&.to_s
|
56
|
+
|
57
|
+
raise "Can't parse blank date" if str_date.blank? && fallback_date.blank?
|
58
58
|
|
59
59
|
return parse_date(fallback_date) if str_date.blank?
|
60
60
|
|
61
61
|
str_date = str_date.gsub(/^00/, '20').gsub(/^180/, '20')
|
62
62
|
|
63
|
-
|
63
|
+
case str_date
|
64
|
+
when /\d{4}-\d{2}-\d{2}/
|
64
65
|
str_date
|
65
|
-
|
66
|
+
when /\d{2}-\d{2}-\d{2}/
|
66
67
|
Date.strptime(str_date, '%d-%m-%Y').strftime('%Y-%m-%d')
|
67
|
-
|
68
|
+
when /(\d{4}\d{2}\d{2})\d+/
|
68
69
|
Date.strptime(str_date, '%Y%m%d').strftime('%Y-%m-%d')
|
69
|
-
|
70
|
+
when %r{\d{2}/\d{2}/\d{4}}
|
70
71
|
str_date.to_date.to_s
|
71
72
|
else
|
72
73
|
Rails.logger.warn("Invalid date: #{str_date}")
|
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
require 'cgi/util'
|
4
4
|
|
5
|
-
require_relative './api'
|
5
|
+
require_relative './api/couchdb_api'
|
6
6
|
require_relative './exceptions'
|
7
7
|
require_relative './order_serializer'
|
8
8
|
require_relative './utils'
|
@@ -26,7 +26,7 @@ module Lab
|
|
26
26
|
User.current = Utils.lab_user
|
27
27
|
|
28
28
|
fout.write("Worker ##{Process.pid} started at #{Time.now}")
|
29
|
-
worker = new(
|
29
|
+
worker = new(CouchDbApi.new)
|
30
30
|
worker.pull_orders
|
31
31
|
# TODO: Verify that names being pushed to LIMS are of the correct format (ie matching
|
32
32
|
# LIMS naming conventions). Enable pushing when that is done
|
@@ -81,28 +81,34 @@ module Lab
|
|
81
81
|
|
82
82
|
##
|
83
83
|
# Pulls orders from the LIMS queue and writes them to the local database
|
84
|
-
def pull_orders
|
84
|
+
def pull_orders(batch_size: 10_000)
|
85
85
|
logger.info("Retrieving LIMS orders starting from #{last_seq}")
|
86
86
|
|
87
|
-
lims_api.consume_orders(from: last_seq, limit:
|
87
|
+
lims_api.consume_orders(from: last_seq, limit: batch_size) do |order_dto, context|
|
88
88
|
logger.debug("Retrieved order ##{order_dto[:tracking_number]}: #{order_dto}")
|
89
89
|
|
90
90
|
patient = find_patient_by_nhid(order_dto[:patient][:id])
|
91
91
|
unless patient
|
92
92
|
logger.debug("Discarding order: Non local patient ##{order_dto[:patient][:id]} on order ##{order_dto[:tracking_number]}")
|
93
|
-
|
93
|
+
order_rejected(order_dto, "Patient NPID, '#{order_dto[:patient][:id]}', didn't match any local NPIDs")
|
94
|
+
next
|
95
|
+
end
|
96
|
+
|
97
|
+
if order_dto[:tests].empty?
|
98
|
+
logger.debug("Discarding order: Missing tests on order ##{order_dto[:tracking_number]}")
|
99
|
+
order_rejected(order_dto, 'Order is missing tests')
|
100
|
+
next
|
94
101
|
end
|
95
102
|
|
96
103
|
diff = match_patient_demographics(patient, order_dto['patient'])
|
97
104
|
if diff.empty?
|
98
105
|
save_order(patient, order_dto)
|
106
|
+
order_saved(order_dto)
|
99
107
|
else
|
100
108
|
save_failed_import(order_dto, 'Demographics not matching', diff)
|
101
109
|
end
|
102
110
|
|
103
111
|
update_last_seq(context.current_seq)
|
104
|
-
|
105
|
-
[:accepted, "Patient NPID, '#{order_dto[:patient][:id]}', matched"]
|
106
112
|
rescue DuplicateNHID
|
107
113
|
logger.warn("Failed to import order due to duplicate patient NHID: #{order_dto[:patient][:id]}")
|
108
114
|
save_failed_import(order_dto, "Duplicate local patient NHID: #{order_dto[:patient][:id]}")
|
@@ -134,6 +140,10 @@ module Lab
|
|
134
140
|
end
|
135
141
|
end
|
136
142
|
|
143
|
+
def order_saved(order_dto); end
|
144
|
+
|
145
|
+
def order_rejected(order_dto, message); end
|
146
|
+
|
137
147
|
private
|
138
148
|
|
139
149
|
def find_patient_by_nhid(nhid)
|
@@ -156,9 +166,7 @@ module Lab
|
|
156
166
|
.distinct(:patient_id)
|
157
167
|
.all
|
158
168
|
|
159
|
-
if patients.size > 1
|
160
|
-
raise DuplicateNHID, "Duplicate National Health ID: #{nhid}"
|
161
|
-
end
|
169
|
+
raise DuplicateNHID, "Duplicate National Health ID: #{nhid}" if patients.size > 1
|
162
170
|
|
163
171
|
patients.first
|
164
172
|
end
|
@@ -209,10 +217,11 @@ module Lab
|
|
209
217
|
mapping.update(pulled_at: Time.now)
|
210
218
|
else
|
211
219
|
order = create_order(patient, order_dto)
|
212
|
-
LimsOrderMapping.create
|
213
|
-
|
214
|
-
|
215
|
-
|
220
|
+
mapping = LimsOrderMapping.create(lims_id: order_dto[:_id],
|
221
|
+
order_id: order['id'],
|
222
|
+
pulled_at: Time.now,
|
223
|
+
revision: order_dto['_rev'])
|
224
|
+
byebug unless mapping.errors.empty?
|
216
225
|
end
|
217
226
|
|
218
227
|
order
|
@@ -222,9 +231,7 @@ module Lab
|
|
222
231
|
def create_order(patient, order_dto)
|
223
232
|
logger.debug("Creating order ##{order_dto['_id']}")
|
224
233
|
order = OrdersService.order_test(order_dto.to_order_service_params(patient_id: patient.patient_id))
|
225
|
-
unless order_dto['test_results'].empty?
|
226
|
-
update_results(order, order_dto['test_results'])
|
227
|
-
end
|
234
|
+
update_results(order, order_dto['test_results']) unless order_dto['test_results'].empty?
|
228
235
|
|
229
236
|
order
|
230
237
|
end
|
@@ -233,9 +240,7 @@ module Lab
|
|
233
240
|
logger.debug("Updating order ##{order_dto['_id']}")
|
234
241
|
order = OrdersService.update_order(order_id, order_dto.to_order_service_params(patient_id: patient.patient_id)
|
235
242
|
.merge(force_update: true))
|
236
|
-
unless order_dto['test_results'].empty?
|
237
|
-
update_results(order, order_dto['test_results'])
|
238
|
-
end
|
243
|
+
update_results(order, order_dto['test_results']) unless order_dto['test_results'].empty?
|
239
244
|
|
240
245
|
order
|
241
246
|
end
|
@@ -250,6 +255,8 @@ module Lab
|
|
250
255
|
next
|
251
256
|
end
|
252
257
|
|
258
|
+
next unless test_results['results']
|
259
|
+
|
253
260
|
measures = test_results['results'].map do |indicator, value|
|
254
261
|
measure = find_measure(order, indicator, value)
|
255
262
|
next nil unless measure
|
@@ -258,7 +265,6 @@ module Lab
|
|
258
265
|
end
|
259
266
|
|
260
267
|
measures = measures.compact
|
261
|
-
|
262
268
|
next if measures.empty?
|
263
269
|
|
264
270
|
creator = format_result_entered_by(test_results['result_entered_by'])
|
@@ -87,9 +87,7 @@ module Lab
|
|
87
87
|
end
|
88
88
|
|
89
89
|
def validate_measure_params(params)
|
90
|
-
if params[:value].blank?
|
91
|
-
raise InvalidParameterError, 'measures.value is required'
|
92
|
-
end
|
90
|
+
raise InvalidParameterError, 'measures.value is required' if params[:value].blank?
|
93
91
|
|
94
92
|
if params[:indicator]&.[](:concept_id).blank?
|
95
93
|
raise InvalidParameterError, 'measures.indicator.concept_id is required'
|
data/lib/lab/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: his_emr_api_lab
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.15
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elizabeth Glaser Pediatric Foundation Malawi
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2021-05-
|
11
|
+
date: 2021-05-21 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: couchrest
|
@@ -249,7 +249,8 @@ files:
|
|
249
249
|
- app/services/lab/accession_number_service.rb
|
250
250
|
- app/services/lab/concepts_service.rb
|
251
251
|
- app/services/lab/labelling_service/order_label.rb
|
252
|
-
- app/services/lab/lims/api.rb
|
252
|
+
- app/services/lab/lims/api/couchdb_api.rb
|
253
|
+
- app/services/lab/lims/api/mysql_api.rb
|
253
254
|
- app/services/lab/lims/config.rb
|
254
255
|
- app/services/lab/lims/exceptions.rb
|
255
256
|
- app/services/lab/lims/failed_imports.rb
|
@@ -1,48 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
require 'couch_bum/couch_bum'
|
4
|
-
|
5
|
-
require_relative './config'
|
6
|
-
|
7
|
-
module Lab
|
8
|
-
module Lims
|
9
|
-
##
|
10
|
-
# Talk to LIMS like a boss
|
11
|
-
class Api
|
12
|
-
attr_reader :bum
|
13
|
-
|
14
|
-
def initialize(config: nil)
|
15
|
-
config ||= Config.couchdb
|
16
|
-
|
17
|
-
@bum = CouchBum.new(protocol: config['protocol'],
|
18
|
-
host: config['host'],
|
19
|
-
port: config['port'],
|
20
|
-
database: "#{config['prefix']}_order_#{config['suffix']}",
|
21
|
-
username: config['username'],
|
22
|
-
password: config['password'])
|
23
|
-
end
|
24
|
-
|
25
|
-
##
|
26
|
-
# Consume orders from the LIMS queue.
|
27
|
-
#
|
28
|
-
# Retrieves orders from the LIMS queue and passes each order to
|
29
|
-
# given block until the queue is empty or connection is terminated
|
30
|
-
# by calling method +choke+.
|
31
|
-
def consume_orders(from: 0, limit: 30)
|
32
|
-
bum.binge_changes(since: from, limit: limit, include_docs: true) do |change|
|
33
|
-
next unless change['doc']['type']&.casecmp?('Order')
|
34
|
-
|
35
|
-
yield OrderDTO.new(change['doc']), self
|
36
|
-
end
|
37
|
-
end
|
38
|
-
|
39
|
-
def create_order(order)
|
40
|
-
bum.couch_rest :post, '/', order
|
41
|
-
end
|
42
|
-
|
43
|
-
def update_order(id, order)
|
44
|
-
bum.couch_rest :put, "/#{id}", order
|
45
|
-
end
|
46
|
-
end
|
47
|
-
end
|
48
|
-
end
|