metamri 0.2.25 → 0.2.26

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 92932063e1a8a63e45c8b74c92adfbd4def00b09
4
- data.tar.gz: 467f2328d0aee4538b0cd03c68e98b8a8599c7b9
3
+ metadata.gz: 5af9adf7949a97f8a098295e8f05aeac91d61fd2
4
+ data.tar.gz: 4c9475178d5a518b07ec5b4fce88bb9b7d28eec4
5
5
  SHA512:
6
- metadata.gz: 9de4f0f131ad11edd2c8a9e99c22efeb8c0e243e14fb8a23ec3c373e7fa537cceabc3dc837310dd35704c7d77a674b8f136fc455adf490e4227c2d1ced991419
7
- data.tar.gz: bf1ade09fe6571e22d4c6f71efb3305e28bb01b528e87e5c9212bf6e8aab3fd8f2d5d4289c3d45323c941a2f77fb394b73b7e42a0fffa7329c24e3651b465542
6
+ metadata.gz: 161d4cd4c8dd011e8455fdab16591d7a84a83c095999722bbb0cc2d1094ffb24b9c14b6c8671f771047376d6fbdb1f1f4f610feffd2758d1ad3915b6bb442258
7
+ data.tar.gz: f34510cc2c7ed1280745a3bb6fef604c857786e92ef20d2f229fdff38975e00d2460aade4bf2a42c62ccc21b81a144dd41d9435a4bc45b1e8c825bccac9c3580
data/Manifest CHANGED
Binary file
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.2.25
1
+ 0.2.26
@@ -49,7 +49,7 @@ require 'metamri'
49
49
  def import_visit(raw_directory, scan_procedure_codename, database)
50
50
  log = Logger.new(File.basename(raw_directory))
51
51
  v = VisitRawDataDirectory.new(raw_directory, scan_procedure_codename)
52
- puts "+++ Importing #{v.visit_directory} as part of #{v.scan_procedure_name} +++"
52
+ puts "|||+++ Importing #{v.visit_directory} as part of #{v.scan_procedure_name} +++"
53
53
  begin
54
54
  v.scan
55
55
  v.db_insert!(database)
@@ -128,6 +128,29 @@ class Pathname
128
128
  end
129
129
  end
130
130
 
131
+
132
+ def each_scanner_archive_summary
133
+ entries.each do |leaf|
134
+ next unless leaf.to_s =~ /^ScanArchive.*(.h5.json)$/
135
+ branch = self + leaf
136
+ next if branch.symlink?
137
+ lc = branch.local_copy
138
+ begin
139
+ yield lc
140
+ rescue StandardError => e
141
+ case $LOG.level
142
+ when Logger::DEBUG
143
+ raise e
144
+ else
145
+ puts "#{e}"
146
+ end
147
+ ensure
148
+ lc.delete
149
+ end
150
+ end
151
+ end
152
+
153
+
131
154
 
132
155
  def first_dicom
133
156
  entries.each do |leaf|
@@ -47,6 +47,12 @@ class RawImageDataset
47
47
  attr_reader :dicom_taghash
48
48
  # Array of Read Error Strings
49
49
  attr_reader :read_errors
50
+ # head coil
51
+ attr_reader :mri_coil_name
52
+ # staion name
53
+ attr_reader :mri_station_name
54
+ # mri model name
55
+ attr_reader :mri_manufacturer_model_name
50
56
 
51
57
 
52
58
  # * dir: The directory containing the files.
@@ -113,6 +119,12 @@ class RawImageDataset
113
119
 
114
120
  @image_uid = @raw_image_files.first.image_uid
115
121
  validates_metainfo_for :image_uid if pfile?
122
+
123
+ @mri_coil_name = @raw_image_files.first.mri_coil_name
124
+
125
+ @mri_station_name = @raw_image_files.first.mri_station_name
126
+
127
+ @mri_manufacturer_model_name = @raw_image_files.first.mri_manufacturer_model_name
116
128
 
117
129
  $LOG ||= Logger.new(STDOUT)
118
130
  end
@@ -134,7 +146,7 @@ class RawImageDataset
134
146
  # transaction at the visit level, or even higher when doing a whole file system
135
147
  # scan.
136
148
  def db_insert(visit_id)
137
- "INSERT INTO image_datasets
149
+ "INSERT INto image_datasets
138
150
  (rmr, series_description, path, timestamp, created_at, updated_at, visit_id,
139
151
  glob, rep_time, bold_reps, slices_per_volume, scanned_file, 'dicom_study_uid')
140
152
  VALUES ('#{@rmr_number}', '#{@series_description}', '#{@directory}', '#{@timestamp.to_s}', '#{DateTime.now}',
@@ -191,7 +203,10 @@ class RawImageDataset
191
203
  :scanned_file => @scanned_file,
192
204
  :dicom_series_uid => @dicom_series_uid,
193
205
  :dicom_taghash => @dicom_taghash,
194
- :image_uid => @image_uid
206
+ :image_uid => @image_uid,
207
+ :mri_coil_name => @mri_coil_name,
208
+ :mri_station_name => @mri_station_name,
209
+ :mri_manufacturer_model_name => @mri_manufacturer_model_name
195
210
  }.merge attrs
196
211
  end
197
212
 
@@ -287,7 +302,9 @@ Returns a path to the created dataset as a string if successful.
287
302
  @file_count = Dir.open(@directory).reject{ |branch| /(^\.|.yaml$)/.match(branch) }.length
288
303
  elsif @raw_image_files.first.pfile?
289
304
  @file_count = 1
290
- else raise "File not recognized as dicom or pfile."
305
+ elsif @raw_image_files.first.scan_archive_h5_json?
306
+ @file_count = 1
307
+ else raise "File not recognized as dicom or pfile or scan_archive_h5_json."
291
308
  end
292
309
  end
293
310
  return @file_count
@@ -325,6 +342,13 @@ Returns a path to the created dataset as a string if successful.
325
342
  else
326
343
  relative_dataset_path = image_file.filename
327
344
  end
345
+ when 'scan_archive_h5_json'
346
+ full_dataset_path = Pathname.new(File.join(directory, image_file.filename))
347
+ if visit_dir
348
+ relative_dataset_path = full_dataset_path.relative_path_from(visit_dir)
349
+ else
350
+ relative_dataset_path = image_file.filename
351
+ end
328
352
  else raise "Cannot identify #{@raw_image_files.first.filename}"
329
353
  end
330
354
 
@@ -354,6 +378,10 @@ Returns a path to the created dataset as a string if successful.
354
378
  def geifile?
355
379
  @raw_image_files.first.geifile?
356
380
  end
381
+
382
+ def scan_archive_h5_json?
383
+ @raw_image_files.first.scan_archive_h5_json?
384
+ end
357
385
 
358
386
  private
359
387
 
@@ -14,10 +14,16 @@ class RawImageDatasetResource < ActiveResource::Base
14
14
  # need to stop loading bz2 P files - 15 GB nmprage pfiles taking to long to bunzip2
15
15
  # load just P*.7 and have routine job to pbzip2 things up later
16
16
  filename = Pathname.new(File.join(path, scanned_file))
17
- flash "wwwwwwwwwwww filename= #{filename}" if $LOG.level <= Logger::INFO
17
+ flash "wwwwwwwwwwwwZZZZZZ filename= #{filename}" #if $LOG.level <= Logger::INFO
18
18
  filename_matches = /P\d{5}.7(.bz2)?/.match(filename)
19
19
  filename_matches_non_bz2 = /P\d{5}(.7)?/.match(filename)
20
20
  filename_matches_summary = /P\d{5}(.7.summary)?/.match(filename)
21
+
22
+ filename_matches_json = /^ScanArchive.*(.h5.json)?/.match(filename)
23
+ puts "zzzzZZZ filename="+filename
24
+ if filename_matches_json
25
+ puts "IIITTTS a json"
26
+ end
21
27
 
22
28
  if filename_matches # Pfile
23
29
  if filename_matches[1] # '.bz2' if present, nil if otherwise.
@@ -4,6 +4,7 @@ require 'rubygems';
4
4
  require 'yaml';
5
5
  # require 'sqlite3';
6
6
  require 'dicom'
7
+ require 'json'
7
8
 
8
9
 
9
10
  # Implements a collection of metadata associated with a raw image file. In
@@ -23,8 +24,9 @@ class RawImageFile
23
24
  RDGEHDR = "rdgehdr"
24
25
  PRINTRAW = "printraw"
25
26
  PRINTRAW_SUMMARY = "cat" #"printraw_summary"
27
+ PRINTRAW_H5_JSON = "cat"
26
28
  RUBYDICOM_HDR = "rubydicom"
27
- VALID_HEADERS = [DICOM_HDR, PRINTRAW, RDGEHDR, RUBYDICOM_HDR,PRINTRAW_SUMMARY]
29
+ VALID_HEADERS = [DICOM_HDR, PRINTRAW, RDGEHDR, RUBYDICOM_HDR,PRINTRAW_SUMMARY,PRINTRAW_H5_JSON]
28
30
  MONTHS = {
29
31
  :jan => "01", :feb => "02", :mar => "03", :apr => "04", :may => "05",
30
32
  :jun => "06", :jul => "07", :aug => "08", :sep => "09", :oct => "10",
@@ -87,6 +89,12 @@ class RawImageFile
87
89
  attr_reader :operator_name
88
90
  # Patient "Name", usually StudyID or ENUM
89
91
  attr_reader :patient_name
92
+ # head coil
93
+ attr_reader :mri_coil_name
94
+ # staion name
95
+ attr_reader :mri_station_name
96
+ # mri model name
97
+ attr_reader :mri_manufacturer_model_name
90
98
 
91
99
  # Creates a new instance of the class given a path to a valid image file.
92
100
  #
@@ -108,6 +116,9 @@ class RawImageFile
108
116
  if @filename =~ /^P*\.summary/
109
117
  @hdr_reader = PRINTRAW_SUMMARY
110
118
  end
119
+ if @filename =~ /^ScanArchive*\.h5.json/
120
+ @hdr_reader = nil #PRINTRAW_H5_JSON
121
+ end
111
122
  # try to read the header, raise an IOError if unsuccessful
112
123
  begin
113
124
  @hdr_data, @hdr_reader = read_header(absfilepath)
@@ -152,6 +163,9 @@ class RawImageFile
152
163
  return @file_type == "pfile"
153
164
  end
154
165
 
166
+ def scan_archive_h5_json?
167
+ return @file_type == "scan_archive_h5_json"
168
+ end
155
169
 
156
170
  # Predicate simply returns true if "dicom" is stored in the img_type instance variable.
157
171
  def dicom?
@@ -284,7 +298,7 @@ private
284
298
  # Note: The rdgehdr is a binary file; the correct version for your architecture must be installed in the path.
285
299
  def read_header(absfilepath)
286
300
  tmp_filename= File.basename(absfilepath)
287
-
301
+
288
302
  case File.basename(absfilepath)
289
303
  when /^P.{5}\.7$|^I\..{3}/
290
304
  # check for
@@ -302,6 +316,22 @@ private
302
316
  @current_hdr_reader = nil
303
317
  return [ header, PRINTRAW ]
304
318
  end
319
+ when /^ScanArchive_.{10,}\.h5\.json$/
320
+ json_file = File.read(absfilepath)
321
+ data_hash = JSON.parse(json_file)
322
+ ####header ="se_desc = "+data_hash["SERIES INFORMATION"]["Series Desc"]+"
323
+ ####image_uid = "+data_hash["RHUSER and OPUSER INFORMATION"]["imagehead.image_uid"]+"\n"
324
+ ## if ( header.chomp != "" )#and header.length > MIN_HDR_SUMMARY_LENGTH )
325
+ if !data_hash.nil?
326
+ @current_hdr_reader = nil
327
+ # puts data_hash.to_s
328
+ # FAILING IN HEADER READ - WHERE IS THE HEADER TRYING TO GET READ? NOT STOPING AT PRINTRAW_SUMMARY
329
+ # CAN THE data_hash be passed instead of the header, and then read in the header_json_reader?
330
+ return [data_hash, nil] ###PRINTRAW_H5_JSON ]
331
+ ###return [ header, PRINTRAW_H5_JSON ]
332
+ end
333
+ # need to read json
334
+ # create header
305
335
  when /^P.{5}\.7\.summary/
306
336
  # check for
307
337
  @current_hdr_reader = PRINTRAW_SUMMARY
@@ -360,6 +390,7 @@ private
360
390
  return "pfile" if image? and (@filename =~ /^P.....\.7/) != nil
361
391
  return "pfile" if (@filename =~ /^P.....\.7\.summary/) != nil
362
392
  return "geifile" if image? and (@filename =~ /^I\.\d*/) != nil
393
+ return "scan_archive_h5_json" if (@filename =~ /^ScanArchive_.{10,}.h5.json/) != nil
363
394
  return "dicom" if image? and (@filename =~ /^P.....\.7/) == nil
364
395
  return nil
365
396
  end
@@ -368,10 +399,10 @@ private
368
399
  # Parses the header data and extracts a collection of instance variables. If
369
400
  # @hdr_data and @hdr_reader are not already available, this function does nothing.
370
401
  def import_hdr
371
-
372
402
  if @hdr_reader == nil
373
403
  case @file_type
374
404
  when "pfile" then printraw_summary_import
405
+ when "scan_archive_h5_json" then printraw_scan_archive_h5_json
375
406
  end
376
407
  else
377
408
  raise(IndexError, "No Header Data Available.") if @hdr_data == nil
@@ -713,6 +744,88 @@ puts "printraw_import rrrrrr @image_uid ="+@image_uid .to_s
713
744
  @hdr_data = nil
714
745
 
715
746
  end
747
+ def printraw_scan_archive_h5_json
748
+ #puts "hhhhhhh @hdr_data[SERIES INFORMATION][Series Desc]="+@hdr_data["SERIES INFORMATION"]["Series Desc"]
749
+ source_pat = /hospital [Nn]ame: ([[:graph:]\t ]+)/i
750
+ num_slices_pat = /Number of slices in this scan group: ([0-9]+)/i
751
+ slice_thickness_pat = /slice thickness \(mm\): ([[:graph:]]+)/i
752
+ slice_spacing_pat = /spacing between scans \(mm\??\): ([[:graph:]]+)/i
753
+ date_pat = /actual image date\/time stamp: (.*)\n/i
754
+ gender_pat = /Patient Sex: (1|2)/i
755
+ acquisition_matrix_x_pat = /Image matrix size \- X: ([0-9]+)/i
756
+ acquisition_matrix_y_pat = /Image matrix size \- Y: ([0-9]+)/i
757
+ series_description_pat = /Series Description: ([[:graph:] \t]+)/i
758
+ recon_diam_pat = /Display field of view \- X \(mm\): ([0-9]+)/i
759
+ rmr_number_pat = /Patient ID for this exam: ([[:graph:]]+)/i
760
+ bold_reps_pat = /Number of excitations: ([0-9]+)/i
761
+ rep_time_pat = /Pulse repetition time \(usec\): ([0-9]+)/i
762
+ study_uid_pat = /Study entity unique ID: ([[:graph:]]+)/i
763
+ series_uid_pat = /Series entity unique ID: ([[:graph:]]+)/i
764
+ image_uid_pat = /Image unique ID: ([[:graph:]]+)/i
765
+
766
+ @dicom_taghash = @hdr_data
767
+ @rmr_number = (@hdr_data["PATIENT INFORMATION"]["PID"]).nil? ? "rmr not found" : (@hdr_data["PATIENT INFORMATION"]["PID"]).strip.chomp
768
+
769
+ @source = (@hdr_data["EXAM INFORMATION"]["Hospital Name"]).nil? ? "source not found" : (@hdr_data["EXAM INFORMATION"]["Hospital Name"]).strip.chomp
770
+
771
+ ####num_slices_pat =~ @hdr_data
772
+ @num_slices = (@hdr_data["ACQUISITION INFORMATION"]["Nslices"]).strip.chomp
773
+
774
+ ####slice_thickness_pat =~ @hdr_data
775
+ @slice_thickness = (@hdr_data["RECONSTRUCTION INFORMATION"][ "Z thick"]).strip.to_f
776
+
777
+
778
+ ####slice_spacing_pat =~ @hdr_data
779
+ @slice_spacing = 0 #($1).to_f
780
+
781
+ ####date_pat =~ @hdr_data
782
+
783
+ v_scan_date_mm_dd_yy = @hdr_data["EXAM INFORMATION"]["Exam Date"] # @hdr_data["ACQUISITION INFORMATION"]["Scan Date"] had 3 digit year???
784
+ v_scan_time_hh24_min = @hdr_data["ACQUISITION INFORMATION"]["Scan Time"]
785
+ v_scan_date_mm_dd_yy_array = v_scan_date_mm_dd_yy.split(" ")
786
+ v_scan_time_hh24_min_array = v_scan_time_hh24_min.split(" ")
787
+ if v_scan_date_mm_dd_yy_array.count > 2 and v_scan_time_hh24_min_array.count > 1
788
+ v_datetime = "20"+v_scan_date_mm_dd_yy_array[2]+"-"+v_scan_date_mm_dd_yy_array[0]+"-"+v_scan_date_mm_dd_yy_array[1]+"T"+v_scan_time_hh24_min_array[0]+":"+v_scan_time_hh24_min_array[1]
789
+ @timestamp = v_datetime.to_datetime
790
+ else
791
+ @timestamp = Datetime.new
792
+
793
+ end
794
+ ####@timestamp = DateTime.parse(v_datetime)
795
+ @gender = @hdr_data["PATIENT INFORMATION"]["Sex"] == 1 ? "M" : "F"
796
+
797
+ ####acquisition_matrix_x_pat =~ @hdr_data
798
+ @acquisition_matrix_x = (@hdr_data["RECONSTRUCTION INFORMATION"]["Xres"]).strip.to_i
799
+ ####acquisition_matrix_y_pat =~ @hdr_data
800
+ @acquisition_matrix_y = 0 #####($1).to_i
801
+
802
+ @series_description = (@hdr_data["RHUSER and OPUSER INFORMATION"]["serieshead.se_desc"]).strip.chomp
803
+
804
+ ####recon_diam_pat =~ @hdr_data
805
+ @reconstruction_diameter = (@hdr_data["RECONSTRUCTION INFORMATION"]["Yres"]).strip.to_i
806
+
807
+ ####bold_reps_pat =~ @hdr_data
808
+ @bold_reps = 0 ####($1).to_i
809
+
810
+ ####rep_time_pat =~ @hdr_data
811
+ @rep_time = (@hdr_data["RHUSER and OPUSER INFORMATION"]["imagehead.reptime"]).to_f / 1000000
812
+ @study_uid = (@hdr_data["RHUSER and OPUSER INFORMATION"]["examhead.study_uid"]).strip.chomp unless @hdr_data["RHUSER and OPUSER INFORMATION"]["examhead.study_uid"].nil?
813
+
814
+
815
+ @series_uid = @hdr_data["RHUSER and OPUSER INFORMATION"]["serieshead.series_uid"].chomp unless @hdr_data["RHUSER and OPUSER INFORMATION"]["serieshead.series_uid"].nil?
816
+
817
+
818
+ @image_uid = (@hdr_data["RHUSER and OPUSER INFORMATION"]["imagehead.image_uid"]).strip.chomp unless @hdr_data["RHUSER and OPUSER INFORMATION"]["imagehead.image_uid"].nil?
819
+ if !@image_uid.nil?
820
+ @image_uid = "sa"+@image_uid # unique index on uid - same uid may be in dicoms as scan archive
821
+ end
822
+
823
+ @mri_coil_name = @hdr_data["ACQUISITION INFORMATION"]["Coil Name"].chomp unless @hdr_data["ACQUISITION INFORMATION"]["Coil Name"].nil?
824
+
825
+ @mri_station_name = @hdr_data["EXAM INFORMATION"]["System ID"].chomp unless @hdr_data["EXAM INFORMATION"]["System ID"].nil?
826
+ ####@mri_manufacturer_model_name = ???
827
+ end
828
+
716
829
 
717
830
  def printraw_summary_import
718
831
  source_pat = /hospital [Nn]ame: ([[:graph:]\t ]+)/i
@@ -1,3 +1,3 @@
1
1
  module Metamri
2
- VERSION = "0.2.25"
2
+ VERSION = "0.2.26"
3
3
  end
@@ -108,6 +108,9 @@ class VisitRawDataDirectory
108
108
  dd.each_pfile { |pf| # check for p*.7.summary
109
109
  @datasets << import_dataset(pf, dd); @datasets.last.print_scan_status if $LOG.level == Logger::INFO }
110
110
  dd.first_dicom { |fd| @datasets << import_dataset(fd, dd); @datasets.last.print_scan_status if $LOG.level == Logger::INFO }
111
+ if (dd.to_s).include?("scan_archives") and (dd.to_s).include?("raw_data")
112
+ dd.each_scanner_archive_summary { |sa| @datasets << import_dataset(sa, dd); @datasets.last.print_scan_status if $LOG.level == Logger::INFO }
113
+ end
111
114
  rescue StandardError => e
112
115
  raise(e, "There was an error scaning dataset #{dd}: #{e}")
113
116
  end
@@ -192,8 +195,24 @@ Returns an array of the created nifti files.
192
195
 
193
196
  @datasets.each do |dataset|
194
197
  nifti_output_path = output_directory
195
- v_basename =File.basename(dataset.directory).gsub(/-/,"").gsub(/_/,"").gsub(/\:/,"").gsub(/\//,"")
196
- v_series_description = "."+dataset.series_description.gsub(/ /,"").gsub(/-/,"").gsub(/_/,"").gsub(/\:/,"").gsub(/\//,"")
198
+ #v_basename =File.basename(dataset.directory).gsub(/-/,"").gsub(/_/,"").gsub(/\:/,"").gsub(/\//,"")
199
+ #v_series_description = "."+dataset.series_description.gsub(/ /,"").gsub(/-/,"").gsub(/_/,"").gsub(/\:/,"").gsub(/\//,"")
200
+ # 20171120 addition
201
+ v_basename =File.basename(dataset.directory)
202
+ v_series_description = "."+dataset.series_description
203
+ v_series_description_full_replace = v_series_description
204
+ # need to get the scan series numbers - take the v_basename/folder name -- replace all the series description stuff
205
+ # end up with scan series number - add to the end of the series_description to get the nii file nam
206
+ if !v_basename.nil?
207
+ v_basename = v_basename.gsub(/ /,"").gsub(/\-/,"").gsub(/\_/,"").gsub(/\(/,"").gsub(/\)/,"").gsub(/\=/,"").gsub(/\+/,"").gsub(/\'/,"").gsub(/\^/,"").gsub(/\,/,"").gsub(/\:/,"").gsub(/\*/,"star")
208
+ end
209
+ if !v_series_description_full_replace.nil?
210
+ v_series_description_full_replace = v_series_description_full_replace.gsub(/ /,"").gsub(/\-/,"").gsub(/\_/,"").gsub(/\(/,"").gsub(/\)/,"").gsub(/\=/,"").gsub(/\+/,"").gsub(/\'/,"").gsub(/\^/,"").gsub(/\,/,"").gsub(/\:/,"").gsub(/\*/,"star")
211
+ end
212
+
213
+ #puts "ccccc end v_basename="+v_basename
214
+ #puts "dddd end v_series_description_full_replace="+v_series_description_full_replace
215
+
197
216
  if v_basename.include? v_series_description
198
217
  # want the scan series number - e.g. 00001 at the end
199
218
  v_tmp_filename = v_basename.gsub(v_series_description,"")
@@ -371,9 +390,13 @@ Returns an array of the created nifti files.
371
390
  tmp_filename = File.basename rawfile.to_s
372
391
  tmp_filname_summary_s = (tmp_filename).gsub(/\.bz2/,"").gsub(/\.summary/,"")+".summary"
373
392
  tmp_branch_summary_s = original_parent_directory.to_s+"/"+tmp_filname_summary_s
393
+ tmp_branch_scan_archive_json = original_parent_directory.to_s+"/"+tmp_filename
394
+
374
395
  if File.exist?(tmp_branch_summary_s)
375
396
  #branch_summary_pn = Pathname.new(branch_summary_s)
376
397
  rawimagefile = RawImageFile.new(tmp_branch_summary_s)
398
+ elsif File.exists?(tmp_branch_scan_archive_json)
399
+ rawimagefile = RawImageFile.new(tmp_branch_scan_archive_json)
377
400
  else
378
401
  rawimagefile = RawImageFile.new(rawfile.to_s)
379
402
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: metamri
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.25
4
+ version: 0.2.26
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kristopher J. Kosmatka
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2017-04-06 00:00:00.000000000 Z
12
+ date: 2018-10-24 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: dicom
@@ -154,7 +154,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
154
154
  version: '0'
155
155
  requirements: []
156
156
  rubyforge_project:
157
- rubygems_version: 2.4.5.1
157
+ rubygems_version: 2.6.14
158
158
  signing_key:
159
159
  specification_version: 4
160
160
  summary: MRI metadata