ruby_astm 1.5.3 → 1.5.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/lib/publisher/pf_download_exception.rb +6 -0
- data/lib/publisher/pf_lab_interface.rb +407 -70
- data/lib/publisher/pf_update_exception.rb +6 -0
- data/lib/publisher/poller.rb +1 -1
- data/lib/publisher/stream_module.rb +68 -0
- data/lib/ruby_astm.rb +6 -0
- data/lib/ruby_astm/custom/esr.rb +3 -0
- data/lib/ruby_astm/custom/siemens_abg_electrolyte_module.rb +1 -1
- data/lib/ruby_astm/custom/siemens_abg_electrolyte_server.rb +0 -0
- data/lib/ruby_astm/custom/siemens_dimension_exl_module.rb +130 -0
- data/lib/ruby_astm/custom/siemens_dimension_exl_server.rb +46 -0
- data/lib/ruby_astm/lab_interface.rb +3 -3
- data/lib/ruby_astm/usb_module.rb +10 -15
- metadata +22 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 4e23929d7d26dd7c171b839033998571960bf11b5ca4c8668fa3c67a9a08327b
|
4
|
+
data.tar.gz: 603c5c33d330b6bdeb54d7d6d39750cc84f1d428c8dc042c9be278bd72f35644
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5cf677ebb74365e0be42400c7ed573093892343720eba5256a4a2041cf996b2577067e749ee20d4fa7b74dece3682794462273188270319802f1a3523274ab7c
|
7
|
+
data.tar.gz: f4832f9050d30efac849cb7b0bf594716aa468bd0faa66cc53dcbcbdf02090891a0e15e5b18b66a09e9500a43c6fbdd7b3fab6bc2764cf570fae4ab9fc0b672d
|
@@ -1,9 +1,60 @@
|
|
1
1
|
require 'fileutils'
|
2
|
-
|
2
|
+
require_relative 'poller'
|
3
|
+
require_relative 'pf_download_exception'
|
3
4
|
require 'typhoeus'
|
5
|
+
require 'resolv-replace'
|
6
|
+
|
7
|
+
RestFirebase.class_eval do
|
8
|
+
|
9
|
+
attr_accessor :private_key_hash
|
10
|
+
|
11
|
+
def query
|
12
|
+
{:access_token => auth}
|
13
|
+
end
|
14
|
+
|
15
|
+
def get_jwt
|
16
|
+
puts Base64.encode64(JSON.generate(self.private_key_hash))
|
17
|
+
# Get your service account's email address and private key from the JSON key file
|
18
|
+
$service_account_email = self.private_key_hash["client_email"]
|
19
|
+
$private_key = OpenSSL::PKey::RSA.new self.private_key_hash["private_key"]
|
20
|
+
now_seconds = Time.now.to_i
|
21
|
+
payload = {:iss => $service_account_email,
|
22
|
+
:sub => $service_account_email,
|
23
|
+
:aud => self.private_key_hash["token_uri"],
|
24
|
+
:iat => now_seconds,
|
25
|
+
:exp => now_seconds + 1, # Maximum expiration time is one hour
|
26
|
+
:scope => 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/firebase.database'
|
27
|
+
|
28
|
+
}
|
29
|
+
JWT.encode payload, $private_key, "RS256"
|
30
|
+
|
31
|
+
end
|
32
|
+
|
33
|
+
def generate_access_token
|
34
|
+
uri = URI.parse(self.private_key_hash["token_uri"])
|
35
|
+
https = Net::HTTP.new(uri.host, uri.port)
|
36
|
+
https.use_ssl = true
|
37
|
+
req = Net::HTTP::Post.new(uri.path)
|
38
|
+
req['Cache-Control'] = "no-store"
|
39
|
+
req.set_form_data({
|
40
|
+
grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer",
|
41
|
+
assertion: get_jwt
|
42
|
+
})
|
43
|
+
|
44
|
+
resp = JSON.parse(https.request(req).body)
|
45
|
+
resp["access_token"]
|
46
|
+
end
|
47
|
+
|
48
|
+
def generate_auth opts={}
|
49
|
+
generate_access_token
|
50
|
+
end
|
51
|
+
|
52
|
+
end
|
4
53
|
|
5
54
|
class Pf_Lab_Interface < Poller
|
6
55
|
|
56
|
+
include StreamModule
|
57
|
+
|
7
58
|
ORDERS = "orders"
|
8
59
|
ORDERS_SORTED_SET = "orders_sorted_set"
|
9
60
|
BARCODES = "barcodes"
|
@@ -16,8 +67,8 @@ class Pf_Lab_Interface < Poller
|
|
16
67
|
## 48 hours, expressed as seconds.
|
17
68
|
DEFAULT_STORAGE_TIME_FOR_ORDERS_IN_SECONDS = 48*3600
|
18
69
|
## the last request that was made and what it said.
|
19
|
-
|
20
|
-
|
70
|
+
POLL_ENDPOINT = "interfaces"
|
71
|
+
PUT_ENDPOINT = "lis_update_orders"
|
21
72
|
LAST_REQUEST = "last_request"
|
22
73
|
FROM_EPOCH = "from_epoch"
|
23
74
|
TO_EPOCH = "to_epoch"
|
@@ -34,9 +85,16 @@ class Pf_Lab_Interface < Poller
|
|
34
85
|
ITEMS = "items"
|
35
86
|
CODE = "code"
|
36
87
|
ORDERS_TO_UPDATE_PER_CYCLE = 10
|
88
|
+
PREV_REQUEST_COMPLETED = "prev_request_completed"
|
37
89
|
|
38
90
|
attr_accessor :lis_security_key
|
39
91
|
|
92
|
+
## should include the https://www.xyz.com:3000
|
93
|
+
## defaults to http://localhost:3000
|
94
|
+
attr_accessor :server_url_with_port
|
95
|
+
|
96
|
+
attr_accessor :retry_count
|
97
|
+
|
40
98
|
###################################################################
|
41
99
|
##
|
42
100
|
##
|
@@ -81,14 +139,20 @@ class Pf_Lab_Interface < Poller
|
|
81
139
|
###################################################################
|
82
140
|
def remove_order(order_id)
|
83
141
|
order = get_order(order_id)
|
84
|
-
order
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
142
|
+
puts "order id is:#{order_id} is"
|
143
|
+
unless order.blank?
|
144
|
+
puts "order not blank."
|
145
|
+
order[:reports].each do |report|
|
146
|
+
report[:tests].each do |test|
|
147
|
+
remove_barcode(test[:barcode])
|
148
|
+
remove_barcode(test[:code])
|
149
|
+
end
|
150
|
+
end
|
151
|
+
$redis.hdel(ORDERS,order_id)
|
152
|
+
$redis.zrem(ORDERS_SORTED_SET,order_id)
|
153
|
+
else
|
154
|
+
puts "order is blank."
|
89
155
|
end
|
90
|
-
$redis.hdel(ORDERS,order[ID])
|
91
|
-
$redis.zrem(ORDERS_SORTED_SET,order[ID])
|
92
156
|
end
|
93
157
|
|
94
158
|
def remove_barcode(barcode)
|
@@ -132,7 +196,7 @@ class Pf_Lab_Interface < Poller
|
|
132
196
|
|
133
197
|
## @param[Hash] order : order object, as a hash.
|
134
198
|
def add_order(order)
|
135
|
-
|
199
|
+
at_least_one_item_exists = false
|
136
200
|
order[REPORTS].each do |report|
|
137
201
|
test_machine_codes = report[TESTS].map{|c|
|
138
202
|
$inverted_mappings[c[LIS_CODE]]
|
@@ -140,6 +204,7 @@ class Pf_Lab_Interface < Poller
|
|
140
204
|
report[REQUIREMENTS].each do |req|
|
141
205
|
get_priority_category(req)[ITEMS].each do |item|
|
142
206
|
if !item[BARCODE].blank?
|
207
|
+
at_least_one_item_exists = true
|
143
208
|
add_barcode(item[BARCODE],JSON.generate(
|
144
209
|
{
|
145
210
|
:order_id => order[ID],
|
@@ -147,6 +212,7 @@ class Pf_Lab_Interface < Poller
|
|
147
212
|
}
|
148
213
|
))
|
149
214
|
elsif !item[CODE].blank?
|
215
|
+
at_least_one_item_exists = true
|
150
216
|
add_barcode(item[CODE],JSON.generate({
|
151
217
|
:order_id => order[ID],
|
152
218
|
:machine_codes => test_machine_codes
|
@@ -155,14 +221,17 @@ class Pf_Lab_Interface < Poller
|
|
155
221
|
end
|
156
222
|
end
|
157
223
|
end
|
158
|
-
|
159
|
-
|
224
|
+
|
225
|
+
unless at_least_one_item_exists.blank?
|
226
|
+
$redis.hset(ORDERS,order[ID],JSON.generate(order))
|
227
|
+
$redis.zadd(ORDERS_SORTED_SET,Time.now.to_i,order[ID])
|
228
|
+
end
|
160
229
|
end
|
161
230
|
|
162
231
|
## start work on simple.
|
163
232
|
|
164
233
|
def update_order(order)
|
165
|
-
$redis.hset(ORDERS,order[ID],JSON.generate(order))
|
234
|
+
$redis.hset(ORDERS,order[ID.to_sym],JSON.generate(order))
|
166
235
|
end
|
167
236
|
|
168
237
|
## @param[Hash] order : the existing order
|
@@ -171,17 +240,30 @@ class Pf_Lab_Interface < Poller
|
|
171
240
|
## @working : updates the results from res, into the order at the relevant tests inside the order.
|
172
241
|
## $MAPPINGS -> [MACHINE_CODE => LIS_CODE]
|
173
242
|
## $INVERTED_MAPPINGS -> [LIS_CODE => MACHINE_CODE]
|
174
|
-
def add_test_result(order,res)
|
243
|
+
def add_test_result(order,res,lis_code)
|
244
|
+
#puts "res is:"
|
245
|
+
#puts res.to_s
|
246
|
+
|
175
247
|
order[REPORTS.to_sym].each do |report|
|
248
|
+
#puts "doing report"
|
176
249
|
report[TESTS.to_sym].each_with_index{|t,k|
|
177
|
-
|
250
|
+
#puts "doing test"
|
251
|
+
#puts t.to_s
|
252
|
+
|
253
|
+
puts "teh test lis code to sym is:"
|
254
|
+
puts t[LIS_CODE.to_sym]
|
255
|
+
puts "lis code is: #{lis_code.to_s}"
|
256
|
+
if t[LIS_CODE.to_sym] == lis_code.to_s
|
257
|
+
puts "got equality"
|
178
258
|
t[RESULT_RAW.to_sym] = res[:value]
|
259
|
+
puts "set value"
|
179
260
|
end
|
180
261
|
}
|
181
262
|
end
|
182
263
|
end
|
183
264
|
|
184
265
|
def queue_order_for_update(order)
|
266
|
+
update_order(order)
|
185
267
|
$redis.lpush(UPDATE_QUEUE,order[ID.to_sym])
|
186
268
|
end
|
187
269
|
|
@@ -199,31 +281,51 @@ class Pf_Lab_Interface < Poller
|
|
199
281
|
end
|
200
282
|
=end
|
201
283
|
def all_hits_downloaded?(last_request)
|
202
|
-
last_request[
|
284
|
+
last_request[PREV_REQUEST_COMPLETED].to_s == "true"
|
203
285
|
end
|
204
|
-
|
205
|
-
|
286
|
+
|
287
|
+
## @param[Time] from : time object
|
288
|
+
## @param[Time] to : time object
|
289
|
+
def fresh_request_params(from,to)
|
290
|
+
#puts "came to make fresh request params, with from epoch: #{from_epoch}"
|
206
291
|
params = {}
|
207
|
-
params[TO_EPOCH] =
|
208
|
-
params[FROM_EPOCH] =
|
292
|
+
params[TO_EPOCH] = to.to_i
|
293
|
+
params[FROM_EPOCH] = from.to_i
|
209
294
|
params[SKIP] = 0
|
210
295
|
params
|
211
296
|
end
|
212
297
|
|
213
|
-
|
298
|
+
## so build request should have a from and a to
|
299
|
+
## what are the defaults ?
|
300
|
+
## @param[Time] from : default (nil)
|
301
|
+
## @param[Time] to : default(nil)
|
302
|
+
def build_request(from=nil,to=nil)
|
303
|
+
puts "entering build request with from: #{from} and to:#{to}"
|
304
|
+
to ||= Time.now
|
305
|
+
from ||= to - 1.day
|
214
306
|
last_request = get_last_request
|
215
307
|
params = nil
|
216
308
|
if last_request.blank?
|
217
|
-
|
309
|
+
AstmServer.log("no last request, making fresh request")
|
310
|
+
params = fresh_request_params(from,to)
|
218
311
|
else
|
219
312
|
if all_hits_downloaded?(last_request)
|
220
|
-
|
313
|
+
AstmServer.log("last request all hits have been downloaded, going for next request.")
|
314
|
+
if last_request[TO_EPOCH].to_i == to.to_i
|
315
|
+
return nil
|
316
|
+
else
|
317
|
+
params = fresh_request_params(last_request[TO_EPOCH],to)
|
318
|
+
end
|
221
319
|
else
|
320
|
+
AstmServer.log("last request all hits not downloaded.")
|
222
321
|
params = last_request
|
223
322
|
end
|
224
323
|
end
|
225
324
|
params.merge!(lis_security_key: self.lis_security_key)
|
226
|
-
|
325
|
+
AstmServer.log("reuqest params become: #{params}")
|
326
|
+
AstmServer.log("sleeping")
|
327
|
+
#sleep(10000)
|
328
|
+
Typhoeus::Request.new(self.get_poll_url_path,params: params)
|
227
329
|
end
|
228
330
|
|
229
331
|
## commits the request params to redis.
|
@@ -237,13 +339,15 @@ class Pf_Lab_Interface < Poller
|
|
237
339
|
$redis.hset(LAST_REQUEST,SIZE,response_hash[SIZE].to_i)
|
238
340
|
$redis.hset(LAST_REQUEST,FROM_EPOCH,response_hash[FROM_EPOCH].to_i)
|
239
341
|
$redis.hset(LAST_REQUEST,TO_EPOCH,response_hash[TO_EPOCH].to_i)
|
342
|
+
$redis.hset(LAST_REQUEST,PREV_REQUEST_COMPLETED,request_size_completed?(response_hash).to_s)
|
240
343
|
end
|
241
344
|
|
242
345
|
# since we request only a certain set of orders per request
|
243
346
|
# we need to know if the earlier request has been completed
|
244
347
|
# or we still need to rerequest the same time frame again.
|
245
348
|
def request_size_completed?(response_hash)
|
246
|
-
|
349
|
+
#puts response_hash.to_s
|
350
|
+
response_hash[SKIP].to_i + response_hash[ORDERS].size >= response_hash[SIZE].to_i
|
247
351
|
end
|
248
352
|
###################################################################
|
249
353
|
##
|
@@ -263,36 +367,152 @@ class Pf_Lab_Interface < Poller
|
|
263
367
|
###################################################################
|
264
368
|
## @param[String] mpg : path to mappings file. Defaults to nil.
|
265
369
|
## @param[String] lis_security_key : the security key for the LIS organization, to be dowloaded from the organizations/show/id, endpoint in the website.
|
266
|
-
def initialize(mpg=nil,lis_security_key)
|
370
|
+
def initialize(mpg=nil,lis_security_key,server_url_with_port,organization_id,private_key_hash)
|
267
371
|
super(mpg)
|
372
|
+
self.private_key_hash = private_key_hash
|
373
|
+
self.event_source = "organizations/" + organization_id
|
374
|
+
self.on_message_handler_function = "evented_poll_LIS_for_requisition"
|
268
375
|
self.lis_security_key = lis_security_key
|
376
|
+
|
377
|
+
self.server_url_with_port = (server_url_with_port || BASE_URL)
|
378
|
+
self.retry_count = 0
|
379
|
+
## called from stream module
|
380
|
+
setup_connection
|
269
381
|
AstmServer.log("Initialized Lab Interface")
|
270
382
|
end
|
271
383
|
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
384
|
+
## this is triggered by whatever firebase sends
|
385
|
+
## you put this in the callback, and let me block and see what happens.
|
386
|
+
## we cannot watch two different endpoints ?
|
387
|
+
## or we can ?
|
388
|
+
## on the same endpoint -> will
|
389
|
+
## so it becomes a merged document.
|
390
|
+
## and both events will fire.
|
391
|
+
## and get triggered.
|
392
|
+
def evented_poll_LIS_for_requisition(data)
|
393
|
+
unless data.blank?
|
394
|
+
|
395
|
+
data = data["data"].blank? ? data : data["data"]
|
396
|
+
|
397
|
+
unless data["delete_order"].blank?
|
398
|
+
puts "delete order is not blank"
|
399
|
+
unless data["delete_order"]["order_id"].blank?
|
400
|
+
puts "order id is not blank"
|
401
|
+
puts "going to delete the completed order --------------->"
|
402
|
+
delete_completed_order(data["delete_order"]["order_id"])
|
403
|
+
end
|
404
|
+
end
|
405
|
+
unless data["trigger_lis_poll"].blank?
|
406
|
+
unless data["trigger_lis_poll"]["epoch"].blank?
|
407
|
+
new_poll_LIS_for_requisition(data["trigger_lis_poll"]["epoch"].to_i)
|
408
|
+
end
|
409
|
+
end
|
410
|
+
else
|
411
|
+
|
412
|
+
end
|
413
|
+
end
|
414
|
+
|
415
|
+
def delete_completed_order(order_id)
|
416
|
+
remove_order(order_id)
|
417
|
+
end
|
418
|
+
|
419
|
+
def put_delete_order_event(order_id)
|
420
|
+
puts self.connection.put(self.event_source,:order_id => order_id)
|
421
|
+
end
|
422
|
+
|
423
|
+
def test_trigger_lis_poll(epoch=nil)
|
424
|
+
puts self.connection.put(self.event_source + "/trigger_lis_poll", :epoch => epoch)
|
425
|
+
end
|
426
|
+
|
427
|
+
def test_trigger_delete_order(order_id)
|
428
|
+
puts self.connection.put(self.event_source + "/delete_order", :order_id => order_id)
|
429
|
+
end
|
430
|
+
|
431
|
+
def new_poll_LIS_for_requisition(to_epoch=nil)
|
432
|
+
AstmServer.log(to_epoch.to_s)
|
433
|
+
while true
|
434
|
+
orders = []
|
435
|
+
begin
|
436
|
+
Retriable.retriable(on: PfDownloadException) do
|
437
|
+
self.retry_count+=1
|
438
|
+
AstmServer.log("retrying----->")
|
439
|
+
request = build_request(nil,to_epoch)
|
440
|
+
break if request.blank?
|
441
|
+
request.run
|
442
|
+
response = request.response
|
443
|
+
if response.success?
|
444
|
+
code = response.code
|
445
|
+
time = response.total_time
|
446
|
+
headers = response.headers
|
447
|
+
#AstmServer.log("successfully polled server")
|
448
|
+
response_hash = JSON.parse(response.body)
|
449
|
+
#AstmServer.log("Pathofast LIS poll response --->")
|
450
|
+
#AstmServer.log(response_hash.to_s)
|
451
|
+
orders = response_hash[ORDERS]
|
452
|
+
orders.each do |order|
|
453
|
+
add_order(order)
|
454
|
+
end
|
455
|
+
commit_request_params_to_redis(response_hash)
|
456
|
+
#puts "are the orders blank: #{orders.blank?}"
|
457
|
+
#break if orders.blank?
|
458
|
+
elsif response.timed_out?
|
459
|
+
#AstmServer.log("Error polling server with code: #{code}")
|
460
|
+
raise PfDownloadException.new("timeout")
|
461
|
+
elsif response.code == 0
|
462
|
+
#AstmServer.log("Error polling server with code: #{code}")
|
463
|
+
raise PfDownloadException.new("didnt get any http response")
|
464
|
+
else
|
465
|
+
#AstmServer.log("Error polling server with code: #{code}")
|
466
|
+
raise PfDownloadException.new("non 200 response")
|
467
|
+
end
|
468
|
+
end
|
469
|
+
rescue => e
|
470
|
+
puts e.to_s
|
471
|
+
puts "raised exception-----------> breaking."
|
472
|
+
## retryable has raised the errors again.
|
473
|
+
break
|
474
|
+
else
|
475
|
+
## break only if the orders are blank.
|
476
|
+
break if orders.blank?
|
477
|
+
end
|
478
|
+
end
|
479
|
+
end
|
480
|
+
|
481
|
+
## how it deletes the records is that when all the reports in that order are verified, then that order is cleared from all LIS receptive organizations.
|
482
|
+
## that's the only way.
|
483
|
+
## once I'm done with that, its only the barcodes, and then the inventory bits.
|
484
|
+
|
485
|
+
## how it deletes records
|
486
|
+
## so this is called in the form of a while loop.
|
487
|
+
## how it handles the update response.
|
488
|
+
def poll_LIS_for_requisition(to_epoch=nil)
|
489
|
+
AstmServer.log(to_epoch.to_s)
|
490
|
+
while true
|
491
|
+
#puts "came back to true"
|
492
|
+
request = build_request(nil,to_epoch)
|
493
|
+
break if request.blank?
|
494
|
+
request.run
|
495
|
+
response = request.response
|
496
|
+
code = response.code
|
497
|
+
time = response.total_time
|
498
|
+
headers = response.headers
|
499
|
+
if code.to_s != "200"
|
500
|
+
AstmServer.log("Error polling server with code: #{code}")
|
501
|
+
break
|
502
|
+
else
|
503
|
+
AstmServer.log("successfully polled server")
|
504
|
+
response_hash = JSON.parse(response.body)
|
505
|
+
AstmServer.log("Pathofast LIS poll response --->")
|
506
|
+
#AstmServer.log(response_hash.to_s)
|
507
|
+
orders = response_hash[ORDERS]
|
508
|
+
orders.each do |order|
|
509
|
+
add_order(order)
|
510
|
+
end
|
511
|
+
commit_request_params_to_redis(response_hash)
|
512
|
+
puts "are the orders blank: #{orders.blank?}"
|
513
|
+
break if orders.blank?
|
514
|
+
end
|
294
515
|
end
|
295
|
-
request.run
|
296
516
|
end
|
297
517
|
|
298
518
|
=begin
|
@@ -388,39 +608,59 @@ data = [
|
|
388
608
|
def process_update_queue
|
389
609
|
#puts "came to process update queue."
|
390
610
|
order_ids = []
|
611
|
+
#puts $redis.lrange UPDATE_QUEUE, 0, -1
|
612
|
+
|
613
|
+
## first push that to patient.
|
614
|
+
## first create that order and add that barcode.
|
615
|
+
## for citrate.
|
616
|
+
## then let that get downloaded.
|
617
|
+
## so keep on test going for that.
|
618
|
+
##
|
619
|
+
## why complicate this so much.
|
620
|
+
## just do a brpop?
|
621
|
+
##
|
391
622
|
ORDERS_TO_UPDATE_PER_CYCLE.times do |n|
|
392
623
|
order_ids << $redis.rpop(UPDATE_QUEUE)
|
393
624
|
end
|
394
625
|
#puts "order ids popped"
|
395
626
|
#puts order_ids.to_s
|
627
|
+
order_ids.compact!
|
628
|
+
order_ids.uniq!
|
396
629
|
orders = order_ids.map{|c|
|
397
630
|
get_order(c)
|
398
631
|
}.compact
|
399
|
-
|
632
|
+
#puts orders[0].to_s
|
400
633
|
|
401
634
|
#puts "orders are:"
|
402
|
-
#puts orders.
|
635
|
+
#puts orders.size
|
636
|
+
#exit(1)
|
403
637
|
|
404
|
-
req = Typhoeus::Request.new(
|
638
|
+
req = Typhoeus::Request.new(self.get_put_url_path, method: :put, body: {orders: orders}.to_json, params: {lis_security_key: self.lis_security_key}, headers: {Accept: 'application/json', "Content-Type".to_sym => 'application/json'})
|
405
639
|
|
406
640
|
|
407
641
|
req.on_complete do |response|
|
408
642
|
if response.success?
|
409
643
|
response_body = response.body
|
410
644
|
orders = JSON.parse(response.body)["orders"]
|
411
|
-
orders.
|
645
|
+
#puts orders.to_s
|
646
|
+
orders.values.each do |order|
|
647
|
+
#puts order.to_s
|
412
648
|
if order["errors"].blank?
|
413
649
|
else
|
414
650
|
puts "got an error for the order."
|
415
651
|
## how many total error attempts to manage.
|
416
652
|
end
|
417
653
|
end
|
654
|
+
## here we have to raise.
|
418
655
|
elsif response.timed_out?
|
419
656
|
AstmServer.log("got a time out")
|
657
|
+
raise PfUpdateException.new("update order timed out")
|
420
658
|
elsif response.code == 0
|
421
659
|
AstmServer.log(response.return_message)
|
660
|
+
raise PfUpdateException.new("update order response code 0")
|
422
661
|
else
|
423
662
|
AstmServer.log("HTTP request failed: " + response.code.to_s)
|
663
|
+
raise PfUpdateException.new("update order response code non success: #{response.code}")
|
424
664
|
end
|
425
665
|
end
|
426
666
|
|
@@ -440,45 +680,142 @@ data = [
|
|
440
680
|
end
|
441
681
|
end
|
442
682
|
|
683
|
+
ORDERS_KEY = "@orders"
|
684
|
+
FAILED_UPDATES = "failed_updates"
|
685
|
+
PATIENTS_REDIS_LIST = "patients"
|
686
|
+
PROCESSING_REDIS_LIST = "processing"
|
687
|
+
|
688
|
+
## this is only done on startup
|
689
|
+
## okay so what do we
|
690
|
+
def reattempt_failed_updates
|
691
|
+
$redis.scard(FAILED_UPDATES).times do
|
692
|
+
if patient_results = $redis.spop(FAILED_UPDATES)
|
693
|
+
patient_results = JSON.parse(patient_results)
|
694
|
+
begin
|
695
|
+
Retriable.retriable(on: PfUpdateException) do
|
696
|
+
unless update(patient_results)
|
697
|
+
raise PfUpdateException.new("didnt get any http response")
|
698
|
+
end
|
699
|
+
end
|
700
|
+
rescue => e
|
701
|
+
AstmServer.log("reattempted and failed")
|
702
|
+
ensure
|
703
|
+
|
704
|
+
end
|
705
|
+
end
|
706
|
+
end
|
707
|
+
end
|
708
|
+
|
709
|
+
## we can do this.
|
710
|
+
## args can be used to modulate exit behaviours
|
711
|
+
## @param[Hash] args : hash of arguments
|
712
|
+
def update_LIS(args={})
|
713
|
+
prepare_redis
|
714
|
+
exit_requested = false
|
715
|
+
Kernel.trap( "INT" ) { exit_requested = true }
|
716
|
+
while !exit_requested
|
717
|
+
puts "exit not requested."
|
718
|
+
if patient_results = $redis.brpoplpush(PATIENTS_REDIS_LIST,PROCESSING_REDIS_LIST,0)
|
719
|
+
puts "got patient results."
|
720
|
+
patient_results = JSON.parse(patient_results)
|
721
|
+
puts "patient results are:"
|
722
|
+
puts JSON.pretty_generate(patient_results)
|
723
|
+
begin
|
724
|
+
Retriable.retriable(on: PfUpdateException) do
|
725
|
+
unless update(patient_results)
|
726
|
+
raise PfUpdateException.new("didnt get any http response")
|
727
|
+
end
|
728
|
+
end
|
729
|
+
exit_requested = !args[:exit_on_success].blank?
|
730
|
+
#puts "exit requested becomes: #{exit_requested}"
|
731
|
+
rescue => e
|
732
|
+
$redis.sadd(FAILED_UPDATES,JSON.generate(patient_results))
|
733
|
+
exit_requested = !args[:exit_on_failure].blank?
|
734
|
+
puts "came to eventual rescue, exit requested is: #{exit_requested}"
|
735
|
+
ensure
|
736
|
+
$redis.lpop("processing")
|
737
|
+
end
|
738
|
+
else
|
739
|
+
puts "no patient results"
|
740
|
+
end
|
741
|
+
end
|
742
|
+
end
|
743
|
+
|
744
|
+
|
745
|
+
|
443
746
|
def update(data)
|
444
|
-
data
|
445
|
-
|
446
|
-
|
747
|
+
puts "data is:"
|
748
|
+
puts JSON.pretty_generate(data)
|
749
|
+
data[ORDERS_KEY].each do |order|
|
750
|
+
puts "order is "
|
751
|
+
puts order
|
752
|
+
barcode = order["id"]
|
753
|
+
results = order["results"]
|
754
|
+
puts "barcode is: #{barcode}, results are : #{results}"
|
755
|
+
results.deep_symbolize_keys!
|
447
756
|
if barcode_hash = get_barcode(barcode)
|
757
|
+
puts "barcode hash is: #{barcode_hash}"
|
448
758
|
if order = get_order(barcode_hash[:order_id])
|
759
|
+
puts "got order"
|
449
760
|
## update the test results, and add the order to the final update hash.
|
450
|
-
puts "order got from barcode is:"
|
451
|
-
puts order
|
761
|
+
#puts "order got from barcode is:"
|
762
|
+
#puts order
|
452
763
|
machine_codes = barcode_hash[:machine_codes]
|
453
|
-
|
454
|
-
|
764
|
+
puts "machine codes: #{machine_codes}"
|
765
|
+
|
766
|
+
results.keys.each do |lis_code|
|
767
|
+
res = results[lis_code]
|
768
|
+
add_test_result(order,res,lis_code)
|
769
|
+
end
|
770
|
+
|
771
|
+
=begin
|
772
|
+
results.values.each do |res|
|
455
773
|
if machine_codes.include? res[:name]
|
456
774
|
## so we need to update to the requisite test inside the order.
|
457
|
-
|
775
|
+
puts "came to add test result"
|
776
|
+
puts res
|
777
|
+
add_test_result(order,res,nil)
|
458
778
|
## commit to redis
|
459
779
|
## and then
|
460
780
|
end
|
461
781
|
end
|
462
|
-
|
782
|
+
=end
|
783
|
+
#puts "came to queue order for update"
|
463
784
|
queue_order_for_update(order)
|
464
785
|
end
|
465
786
|
else
|
466
787
|
AstmServer.log("the barcode:#{barcode}, does not exist in the barcodes hash")
|
467
788
|
## does not exist.
|
468
789
|
end
|
469
|
-
end
|
790
|
+
end
|
470
791
|
|
471
792
|
process_update_queue
|
472
|
-
|
793
|
+
|
473
794
|
|
474
795
|
end
|
475
796
|
|
797
|
+
def get_poll_url_path
|
798
|
+
self.server_url_with_port + POLL_ENDPOINT
|
799
|
+
end
|
800
|
+
|
801
|
+
def get_put_url_path
|
802
|
+
self.server_url_with_port + PUT_ENDPOINT
|
803
|
+
end
|
804
|
+
|
805
|
+
|
806
|
+
def _start
|
807
|
+
evented_poll_LIS_for_requisition({"trigger_lis_poll" => {"epoch" => Time.now.to_i.to_s}})
|
808
|
+
reattempt_failed_updates
|
809
|
+
update_LIS
|
810
|
+
end
|
811
|
+
|
812
|
+
## the watcher is seperate.
|
813
|
+
## that deals with other things.
|
814
|
+
|
815
|
+
## this method is redundant, and no longer used
|
816
|
+
## the whole thing is now purely evented.
|
476
817
|
def poll
|
477
|
-
pre_poll_LIS
|
478
|
-
poll_LIS_for_requisition
|
479
|
-
update_LIS
|
480
|
-
post_poll_LIS
|
481
818
|
end
|
482
819
|
|
483
|
-
|
820
|
+
|
484
821
|
end
|
data/lib/publisher/poller.rb
CHANGED
@@ -442,7 +442,7 @@ class Poller
|
|
442
442
|
## name of the sorted set can be defined in the class that inherits from adapter, or will default to "requisitions"
|
443
443
|
## when a query is sent from any laboratory equipment to the local ASTMServer, it will query the redis sorted set, for the test information.
|
444
444
|
## so this poller basically constantly replicates the cloud based test information to the local server.
|
445
|
-
def poll_LIS_for_requisition
|
445
|
+
def poll_LIS_for_requisition(to_epoch=nil)
|
446
446
|
|
447
447
|
end
|
448
448
|
|
@@ -0,0 +1,68 @@
|
|
1
|
+
module StreamModule
|
2
|
+
|
3
|
+
SECRET = ENV["FIREBASE_SECRET"]
|
4
|
+
SITE_URL = ENV["FIREBASE_SITE"]
|
5
|
+
|
6
|
+
attr_accessor :on_message_handler_function
|
7
|
+
attr_accessor :connection
|
8
|
+
attr_accessor :private_key_hash
|
9
|
+
attr_accessor :event_source
|
10
|
+
## the event_stream object
|
11
|
+
attr_accessor :es
|
12
|
+
|
13
|
+
def setup_connection
|
14
|
+
raise "please provide the private key hash, from firebase service account -> create private key " if self.private_key_hash.blank?
|
15
|
+
raise "no event source endpoint provided" if self.event_source.blank?
|
16
|
+
self.connection = RestFirebase.new :site => SITE_URL,
|
17
|
+
:secret => SECRET, :private_key_hash => private_key_hash, :auth_ttl => 1800
|
18
|
+
self.on_message_handler_function ||= "on_message_handler"
|
19
|
+
|
20
|
+
end
|
21
|
+
|
22
|
+
def watch
|
23
|
+
@reconnect = true
|
24
|
+
self.es = self.connection.event_source(self.event_source)
|
25
|
+
self.es.onopen { |sock| p sock } # Called when connecte
|
26
|
+
self.es.onmessage{ |event, data, sock|
|
27
|
+
#puts "event: #{event}"
|
28
|
+
send(self.on_message_handler_function,data)
|
29
|
+
}
|
30
|
+
self.es.onerror { |error, sock| p error } # Called 4
|
31
|
+
self.es.onreconnect{ |error, sock| p error; @reconnect }
|
32
|
+
self.es.start
|
33
|
+
rd, wr = IO.pipe
|
34
|
+
%w[INT TERM].each do |sig|
|
35
|
+
Signal.trap(sig) do
|
36
|
+
wr.puts # unblock the main thread
|
37
|
+
end
|
38
|
+
end
|
39
|
+
rd.gets # block main thread until INT or TERM received
|
40
|
+
@reconnect = false
|
41
|
+
self.es.close
|
42
|
+
self.es.wait # shutdown cleanly
|
43
|
+
end
|
44
|
+
|
45
|
+
def watch_limited(seconds)
|
46
|
+
|
47
|
+
@reconnect = true
|
48
|
+
self.es = self.connection.event_source(self.event_source)
|
49
|
+
self.es.onopen { |sock| p sock } # Called when connecte
|
50
|
+
self.es.onmessage{ |event, data, sock|
|
51
|
+
send(self.on_message_handler_function,data)
|
52
|
+
}
|
53
|
+
self.es.onerror { |error, sock| p error } # Called 4
|
54
|
+
self.es.onreconnect{ |error, sock| p error; @reconnect }
|
55
|
+
self.es.start
|
56
|
+
sleep(seconds)
|
57
|
+
@reconnect = false
|
58
|
+
self.es.close
|
59
|
+
self.es.wait # shutdown cleanly
|
60
|
+
|
61
|
+
end
|
62
|
+
|
63
|
+
def on_message_handler(data)
|
64
|
+
#puts "got some data"
|
65
|
+
#puts data
|
66
|
+
end
|
67
|
+
|
68
|
+
end
|
data/lib/ruby_astm.rb
CHANGED
@@ -1,4 +1,5 @@
|
|
1
1
|
require_relative "ruby_astm/usb_module"
|
2
|
+
require_relative "ruby_astm/custom/esr"
|
2
3
|
require_relative "ruby_astm/query"
|
3
4
|
require_relative "ruby_astm/frame"
|
4
5
|
require_relative "ruby_astm/header"
|
@@ -8,6 +9,9 @@ require_relative "ruby_astm/order"
|
|
8
9
|
require_relative "ruby_astm/patient"
|
9
10
|
require_relative "ruby_astm/result"
|
10
11
|
require_relative "ruby_astm/astm_server"
|
12
|
+
require_relative "publisher/pf_download_exception"
|
13
|
+
require_relative "publisher/pf_update_exception"
|
14
|
+
require_relative "publisher/stream_module"
|
11
15
|
require_relative "publisher/adapter"
|
12
16
|
require_relative "publisher/google_lab_interface"
|
13
17
|
require_relative "publisher/poller"
|
@@ -19,4 +23,6 @@ require_relative "ruby_astm/HL7/hl7_order"
|
|
19
23
|
require_relative "ruby_astm/HL7/hl7_observation"
|
20
24
|
require_relative "ruby_astm/custom/siemens_abg_electrolyte_module"
|
21
25
|
require_relative "ruby_astm/custom/siemens_abg_electrolyte_server"
|
26
|
+
require_relative "ruby_astm/custom/siemens_dimension_exl_module"
|
27
|
+
require_relative "ruby_astm/custom/siemens_dimension_exl_server"
|
22
28
|
|
@@ -207,7 +207,7 @@ module SiemensAbgElectrolyteModule
|
|
207
207
|
unless o.results.blank?
|
208
208
|
p.orders << o
|
209
209
|
$redis.hset(SIEMENS_ELEC_ABG_RESULTS_HASH,patient_id,JSON.generate(o.results_values_hash))
|
210
|
-
self.headers[-1].patients
|
210
|
+
self.headers[-1].patients = [p]
|
211
211
|
end
|
212
212
|
|
213
213
|
end
|
File without changes
|
@@ -0,0 +1,130 @@
|
|
1
|
+
module SiemensDimensionExlModule
|
2
|
+
|
3
|
+
include LabInterface
|
4
|
+
|
5
|
+
def self.included base
|
6
|
+
base.extend ClassMethods
|
7
|
+
end
|
8
|
+
|
9
|
+
FS = "\x1C"
|
10
|
+
|
11
|
+
def pre_process_bytes(byte_arr,concat)
|
12
|
+
|
13
|
+
puts "this is the overridden method"
|
14
|
+
puts byte_arr.to_s
|
15
|
+
|
16
|
+
indices_to_delete = is_mid_frame_end?(byte_arr)
|
17
|
+
#puts "indices to delete"
|
18
|
+
#puts indices_to_delete.to_s
|
19
|
+
|
20
|
+
if self.mid_frame_end_detected == true
|
21
|
+
#puts "deletected mid fram is true, so deleting first byte before delete"
|
22
|
+
#puts byte_arr.to_s
|
23
|
+
byte_arr = byte_arr[1..-1]
|
24
|
+
#puts "after deleteing"
|
25
|
+
#puts byte_arr.to_s
|
26
|
+
self.mid_frame_end_detected = false
|
27
|
+
end
|
28
|
+
|
29
|
+
unless indices_to_delete.blank?
|
30
|
+
if byte_arr[(indices_to_delete[-1] + 1)]
|
31
|
+
#puts "before deleting frame number "
|
32
|
+
#puts byte_arr.to_s
|
33
|
+
byte_arr.delete_at((indices_to_delete[-1] + 1))
|
34
|
+
#puts "after deleting"
|
35
|
+
#puts byte_arr.to_s
|
36
|
+
else
|
37
|
+
self.mid_frame_end_detected = true
|
38
|
+
end
|
39
|
+
end
|
40
|
+
#puts "byte arr before reject"
|
41
|
+
byte_arr = byte_arr.reject.with_index{|c,i| indices_to_delete.include? i}
|
42
|
+
|
43
|
+
|
44
|
+
byte_arr.each do |byte|
|
45
|
+
x = [byte].pack('c*').force_encoding('UTF-8')
|
46
|
+
if x == "\r"
|
47
|
+
concat+="\n"
|
48
|
+
elsif x == "\n"
|
49
|
+
#puts "new line found --- "
|
50
|
+
concat+=x
|
51
|
+
#puts "last thing in concat."
|
52
|
+
#puts concat[-1].to_s
|
53
|
+
else
|
54
|
+
concat+=x
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
concat
|
59
|
+
|
60
|
+
end
|
61
|
+
|
62
|
+
def message_ends?
|
63
|
+
x = self.data_bytes.flatten[-1] == 3
|
64
|
+
if x == true
|
65
|
+
puts "message ends #{self.data_bytes.flatten}"
|
66
|
+
self.data_bytes = []
|
67
|
+
end
|
68
|
+
x
|
69
|
+
end
|
70
|
+
|
71
|
+
def enq?
|
72
|
+
self.data_bytes.flatten[-1] == 5
|
73
|
+
end
|
74
|
+
|
75
|
+
def acknowledge
|
76
|
+
resp = ACK
|
77
|
+
send_data(ACK)
|
78
|
+
end
|
79
|
+
|
80
|
+
def no_request
|
81
|
+
resp = STX + "N" + FS + "6A" + ETX + "\n"
|
82
|
+
send_data(resp.bytes.to_a.pack('c*'))
|
83
|
+
end
|
84
|
+
|
85
|
+
|
86
|
+
|
87
|
+
def receive_data(data)
|
88
|
+
|
89
|
+
|
90
|
+
begin
|
91
|
+
|
92
|
+
|
93
|
+
self.data_buffer ||= ''
|
94
|
+
|
95
|
+
#puts "incoming data bytes."
|
96
|
+
|
97
|
+
concat = ""
|
98
|
+
|
99
|
+
|
100
|
+
byte_arr = data.bytes.to_a
|
101
|
+
|
102
|
+
self.test_data_bytes ||= []
|
103
|
+
|
104
|
+
self.data_bytes ||= []
|
105
|
+
|
106
|
+
self.test_data_bytes.push(byte_arr)
|
107
|
+
|
108
|
+
self.data_bytes.push(byte_arr)
|
109
|
+
|
110
|
+
concat = pre_process_bytes(byte_arr,concat)
|
111
|
+
|
112
|
+
self.data_buffer << concat
|
113
|
+
|
114
|
+
|
115
|
+
if message_ends?
|
116
|
+
acknowledge
|
117
|
+
no_request
|
118
|
+
end
|
119
|
+
|
120
|
+
|
121
|
+
rescue => e
|
122
|
+
|
123
|
+
#self.headers = []
|
124
|
+
AstmServer.log("data was: " + self.data_buffer + "error is:" + e.backtrace.to_s)
|
125
|
+
#send_data(EOT)
|
126
|
+
end
|
127
|
+
|
128
|
+
end
|
129
|
+
|
130
|
+
end
|
@@ -0,0 +1,46 @@
|
|
1
|
+
class SiemensDimensionExlServer
|
2
|
+
|
3
|
+
include SiemensDimensionExlModule
|
4
|
+
|
5
|
+
## DEFAULT SERIAL PORT : /dev/ttyS0
|
6
|
+
## DEFAULT USB PORT : /dev/ttyUSB0
|
7
|
+
## @param[Array] ethernet_connections : each element is expected to be a hash, with keys for :server_ip, :server_port.
|
8
|
+
## @param[Array] serial_connections : each element is expected to be a hash with port_address, baud_rate, and parity
|
9
|
+
#def initialize(server_ip=nil,server_port=nil,mpg=nil,respond_to_queries=false,serial_port='/dev/ttyS0',usb_port='/dev/ttyUSB0',serial_baud=9600,serial_parity=8,usb_baud=19200,usb_parity=8)
|
10
|
+
def initialize(ethernet_connections,serial_connections,mpg=nil,respond_to_queries=nil)
|
11
|
+
$redis = Redis.new
|
12
|
+
self.class.log("Initializing AstmServer")
|
13
|
+
self.ethernet_connections = ethernet_connections
|
14
|
+
self.serial_connections = serial_connections
|
15
|
+
self.server_ip = server_ip || "127.0.0.1"
|
16
|
+
self.server_port = server_port || 3000
|
17
|
+
self.respond_to_queries = respond_to_queries
|
18
|
+
self.serial_port = serial_port
|
19
|
+
self.serial_baud = serial_baud
|
20
|
+
self.serial_parity = serial_parity
|
21
|
+
self.usb_port = usb_port
|
22
|
+
self.usb_baud = usb_baud
|
23
|
+
self.usb_parity = usb_parity
|
24
|
+
$mappings = JSON.parse(IO.read(mpg || self.class.default_mappings))
|
25
|
+
end
|
26
|
+
|
27
|
+
def start_server
|
28
|
+
EventMachine.run {
|
29
|
+
self.ethernet_connections.each do |econn|
|
30
|
+
raise "please provide a valid ethernet configuration with ip address" unless econn[:server_ip]
|
31
|
+
raise "please provide a valid ethernet configuration with port" unless econn[:server_port]
|
32
|
+
EventMachine::start_server econn[:server_ip], econn[:server_port], SiemensDimensionExlModule
|
33
|
+
self.class.log("Running ETHERNET with configuration #{econn}")
|
34
|
+
end
|
35
|
+
self.serial_connections.each do |sconn|
|
36
|
+
raise "please provide a valid serial configuration with port address" unless sconn[:port_address]
|
37
|
+
raise "please provide a valid serial configuration with baud rate" unless sconn[:baud_rate]
|
38
|
+
raise "please provide a valid serial configuration with parity" unless sconn[:parity]
|
39
|
+
EventMachine.open_serial(sconn[:port_address], sconn[:baud_rate], sconn[:parity],SiemensDimensionExlModule)
|
40
|
+
puts "RUNNING SERIAL port with configuration : #{sconn}"
|
41
|
+
end
|
42
|
+
|
43
|
+
}
|
44
|
+
end
|
45
|
+
|
46
|
+
end
|
@@ -60,7 +60,7 @@ module LabInterface
|
|
60
60
|
#######################################################
|
61
61
|
module ClassMethods
|
62
62
|
def log(message)
|
63
|
-
puts
|
63
|
+
puts message
|
64
64
|
$redis.zadd("ruby_astm_log",Time.now.to_i,message)
|
65
65
|
end
|
66
66
|
|
@@ -267,8 +267,8 @@ module LabInterface
|
|
267
267
|
puts "GOT EOT --- PROCESSING BUFFER, AND CLEARING."
|
268
268
|
process_text(self.data_buffer)
|
269
269
|
root_path = File.dirname __dir__
|
270
|
-
puts "root path #{root_path}"
|
271
|
-
IO.write((File.join root_path,'../test','resources','d10_error.txt'),self.test_data_bytes.to_s)
|
270
|
+
#puts "root path #{root_path}"
|
271
|
+
#IO.write((File.join root_path,'../test','resources','d10_error.txt'),self.test_data_bytes.to_s)
|
272
272
|
#puts self.test_data_bytes.flatten.to_s
|
273
273
|
self.data_buffer = ''
|
274
274
|
unless self.headers.blank?
|
data/lib/ruby_astm/usb_module.rb
CHANGED
@@ -34,16 +34,6 @@ module UsbModule
|
|
34
34
|
|
35
35
|
self.usb_response_bytes[-3] == 13
|
36
36
|
end
|
37
|
-
|
38
|
-
def add_result_by_count?(barcode,result)
|
39
|
-
begin
|
40
|
-
|
41
|
-
|
42
|
-
rescue => e
|
43
|
-
puts e.to_s
|
44
|
-
return true
|
45
|
-
end
|
46
|
-
end
|
47
37
|
## total times repeated, we can have that as max 3 times.
|
48
38
|
## so after that it wont add.
|
49
39
|
## so we keep a sorted set with the barcode and the count
|
@@ -76,14 +66,20 @@ module UsbModule
|
|
76
66
|
end
|
77
67
|
|
78
68
|
def parse_usb_response(string_data)
|
69
|
+
#puts "string data is:"
|
70
|
+
#puts string_data.to_s
|
79
71
|
string_data.bytes.to_a.each do |byte|
|
80
72
|
self.usb_response_bytes.push(byte)
|
81
73
|
end
|
82
|
-
|
83
|
-
#puts self.usb_response_bytes.
|
74
|
+
parse_bytes
|
75
|
+
#puts self.usb_response_bytes.pack('c*')
|
76
|
+
end
|
77
|
+
|
78
|
+
def parse_bytes(bytes=nil)
|
79
|
+
self.usb_response_bytes ||= bytes
|
84
80
|
if interpret?
|
85
81
|
#puts "interpret"
|
86
|
-
|
82
|
+
existing_barcodes = []
|
87
83
|
if kk = self.usb_response_bytes[13..-4]
|
88
84
|
## its going fresh -> old
|
89
85
|
## so if the barcode has already come
|
@@ -108,7 +104,7 @@ module UsbModule
|
|
108
104
|
order.id = bar_code
|
109
105
|
order.results = []
|
110
106
|
order.results << result
|
111
|
-
|
107
|
+
puts "barcode: #{bar_code}, result : #{result.value}"
|
112
108
|
patient.orders << order
|
113
109
|
if add_result?(bar_code,result.value,existing_barcodes)
|
114
110
|
#puts patient.to_json
|
@@ -125,7 +121,6 @@ module UsbModule
|
|
125
121
|
else
|
126
122
|
#puts "dont interpret"
|
127
123
|
end
|
128
|
-
|
129
124
|
end
|
130
125
|
|
131
126
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby_astm
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.5.
|
4
|
+
version: 1.5.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Bhargav Raut
|
@@ -164,6 +164,20 @@ dependencies:
|
|
164
164
|
- - ">="
|
165
165
|
- !ruby/object:Gem::Version
|
166
166
|
version: '0'
|
167
|
+
- !ruby/object:Gem::Dependency
|
168
|
+
name: retriable
|
169
|
+
requirement: !ruby/object:Gem::Requirement
|
170
|
+
requirements:
|
171
|
+
- - "~>"
|
172
|
+
- !ruby/object:Gem::Version
|
173
|
+
version: '3.1'
|
174
|
+
type: :runtime
|
175
|
+
prerelease: false
|
176
|
+
version_requirements: !ruby/object:Gem::Requirement
|
177
|
+
requirements:
|
178
|
+
- - "~>"
|
179
|
+
- !ruby/object:Gem::Version
|
180
|
+
version: '3.1'
|
167
181
|
description: This gem provides a server that can handle communication from medical
|
168
182
|
instruments that send/receive information on the ASTM protocol.
|
169
183
|
email: bhargav.r.raut@gmail.com
|
@@ -174,17 +188,23 @@ files:
|
|
174
188
|
- lib/mappings.json
|
175
189
|
- lib/publisher/adapter.rb
|
176
190
|
- lib/publisher/google_lab_interface.rb
|
191
|
+
- lib/publisher/pf_download_exception.rb
|
177
192
|
- lib/publisher/pf_lab_interface.rb
|
193
|
+
- lib/publisher/pf_update_exception.rb
|
178
194
|
- lib/publisher/poller.rb
|
179
195
|
- lib/publisher/real_time_db.rb
|
196
|
+
- lib/publisher/stream_module.rb
|
180
197
|
- lib/ruby_astm.rb
|
181
198
|
- lib/ruby_astm/HL7/hl7_header.rb
|
182
199
|
- lib/ruby_astm/HL7/hl7_observation.rb
|
183
200
|
- lib/ruby_astm/HL7/hl7_order.rb
|
184
201
|
- lib/ruby_astm/HL7/hl7_patient.rb
|
185
202
|
- lib/ruby_astm/astm_server.rb
|
203
|
+
- lib/ruby_astm/custom/esr.rb
|
186
204
|
- lib/ruby_astm/custom/siemens_abg_electrolyte_module.rb
|
187
205
|
- lib/ruby_astm/custom/siemens_abg_electrolyte_server.rb
|
206
|
+
- lib/ruby_astm/custom/siemens_dimension_exl_module.rb
|
207
|
+
- lib/ruby_astm/custom/siemens_dimension_exl_server.rb
|
188
208
|
- lib/ruby_astm/frame.rb
|
189
209
|
- lib/ruby_astm/header.rb
|
190
210
|
- lib/ruby_astm/lab_interface.rb
|
@@ -214,7 +234,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
214
234
|
version: '0'
|
215
235
|
requirements: []
|
216
236
|
rubyforge_project:
|
217
|
-
rubygems_version: 2.
|
237
|
+
rubygems_version: 2.7.8
|
218
238
|
signing_key:
|
219
239
|
specification_version: 4
|
220
240
|
summary: A Ruby gem to interface with Medical instruments that work on the ASTM protocol
|